I'm following this link,
https://colab.research.google.com/drive/11ko0DBnI1QLxVoJQR8gt9b4JDcvbCrtU#scrollTo=A_tyvKnBP6qD to build my object detector. I am using Google collab. My workspace structure is exactly as followed in this link. Everything was going fine until this block of code:
from object_detection.utils import dataset_util
%cd /content/drive/MyDrive/Gun_Detection/models
data_base_url = '/content/drive/MyDrive/Gun_Detection/data'
image_dir = data_base_url + 'images/'
def class_text_to_int(row_label):
if row_label == 'pistol':
return 1
else:
None
def split(df, group):
data = namedtuple('data', ['filename', 'object'])
gb = df.groupby(group)
return [data(filename, gb.get_group(x))
for filename, x in zip(gb.groups.keys(), gb.groups)]
def create_tf_example(group, path):
with tf.io.gfile.GFile(os.path.join(path, '{}'.format(group.filename)), 'rb') as fid:
encoded_jpg = fid.read()
encoded_jpg_io = io.BytesIO(encoded_jpg)
image = Image.open(encoded_jpg_io)
width, height = image.size
filename = group.filename.encode('utf8')
image_format = b'jpg'
xmins = []
xmaxs = []
ymins = []
ymaxs = []
classes_text = []
classes = []
for index, row in group.object.iterrows():
xmins.append(row['xmin'] / width)
xmaxs.append(row['xmax'] / width)
ymins.append(row['ymin'] / height)
ymaxs.append(row['ymax'] / height)
classes_text.append(row['class'].encode('utf8'))
classes.append(class_text_to_int(row['class']))
tf_example = tf.train.Example(features=tf.train.Features(feature={
'image/height': dataset_util.int64_feature(height),
'image/width': dataset_util.int64_feature(width),
'image/filename': dataset_util.bytes_feature(filename),
'image/source_id': dataset_util.bytes_feature(filename),
'image/encoded': dataset_util.bytes_feature(encoded_jpg),
'image/format': dataset_util.bytes_feature(image_format),
'image/object/bbox/xmin': dataset_util.float_list_feature(xmins),
'image/object/bbox/xmax': dataset_util.float_list_feature(xmaxs),
'image/object/bbox/ymin': dataset_util.float_list_feature(ymins),
'image/object/bbox/ymax': dataset_util.float_list_feature(ymaxs),
'image/object/class/text': dataset_util.bytes_list_feature(classes_text),
'image/object/class/label': dataset_util.int64_list_feature(classes),
}))
return tf_example
for csv in ['train_labels', 'test_labels']:
writer = tf.io.TFRecordWriter(data_base_url + csv + '.record')
path = os.path.join(image_dir)
examples = pd.read_csv(data_base_url + csv + '.csv')
grouped = split(examples, 'filename')
for group in grouped:
tf_example = create_tf_example(group, path)
writer.write(tf_example.SerializeToString())
writer.close()
output_path = os.path.join(os.getcwd(), data_base_url + csv + '.record')
print('Successfully created the TFRecords: {}'.format(data_base_url + csv + '.record'))
After this error, datatrain_label.record got generated in my Gun Detection folder in my drive. I am confused 😕 I can't proceed further. Please help!
N.B: I am not pro with python and am still learning. Trying hard to understand the code but I honestly don't.
This is the error message I'm getting:
<pre>/content/drive/MyDrive/Gun_Detection/models
---------------------------------------------------------------------------
FileNotFoundError Traceback (most recent call last)
<ipython-input-33-b566ecc28dcc> in <module>()
61 writer = tf.io.TFRecordWriter(data_base_url + csv + '.record')
62 path = os.path.join(image_dir)
---> 63 examples = pd.read_csv(data_base_url + csv + '.csv')
64 grouped = split(examples, 'filename')
65 for group in grouped:
4 frames
/usr/local/lib/python3.7/dist-packages/pandas/io/parsers.py in __init__(self, src, **kwds)
2008 kwds["usecols"] = self.usecols
2009
-> 2010 self._reader = parsers.TextReader(src, **kwds)
2011 self.unnamed_cols = self._reader.unnamed_cols
2012
pandas/_libs/parsers.pyx in pandas._libs.parsers.TextReader.__cinit__()
pandas/_libs/parsers.pyx in pandas._libs.parsers.TextReader._setup_parser_source()
FileNotFoundError: [Errno 2] No such file or directory: '/content/drive/MyDrive/Gun_Detection/datatrain_labels.csv'
What I have tried:
I have googled alot on this error but all in vain.