Skip to content

Commit

Permalink
everything works, start training
Browse files Browse the repository at this point in the history
  • Loading branch information
gustavz committed Jan 24, 2018
1 parent 5c29ead commit 3abf405
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 50 deletions.
63 changes: 34 additions & 29 deletions egohands_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,16 +37,15 @@ def get_bbox_visualize(base_path, dir):
img_path = base_path + dir + "/" + f
image_path_array.append(img_path)

image_path_array.sort()
boxes = sio.loadmat(base_path + dir + "/polygons.mat")
print (boxes)
# there are 100 of these per folder in the egohands dataset
polygons = boxes["polygons"][0]
# first = polygons[0]
# print(len(first))
pointindex = 0

for first in polygons:
index = 0

font = cv2.FONT_HERSHEY_SIMPLEX

Expand All @@ -66,7 +65,7 @@ def get_bbox_visualize(base_path, dir):
csvholder = []
for pointlist in first:
pst = np.empty((0, 2), int)
max_x = max_y = min_x = min_y = height = width = 0
max_x = max_y = min_x = min_y = 0

findex = 0
for point in pointlist:
Expand Down Expand Up @@ -110,16 +109,14 @@ def get_bbox_visualize(base_path, dir):
cv2.imshow('Verifying annotation ', img)
save_csv(csv_path + ".csv", csvholder)
print("===== saving csv file for ", tail)
cv2.waitKey(1000) # close window when a key press is detected
cv2.waitKey(1) # Change this to 1000 to see every single frame


def create_directory(dir_path):
if not os.path.exists(dir_path):
os.makedirs(dir_path)

# combine all individual csv files for each image into a single csv file per folder.


def generate_label_files(image_dir):
header = ['filename', 'width', 'height',
'class', 'xmin', 'ymin', 'xmax', 'ymax']
Expand Down Expand Up @@ -207,13 +204,20 @@ def rename_files(image_dir):
old = image_dir + dir + "/" + f
new = image_dir + dir + "/" + dir + "_" + f
os.rename(old, new)
print("renaming {} to {}".format(old,new))
else:
break

generate_csv_files("egohands/_LABELLED_SAMPLES/")


def extract_folder(dataset_path):
if not os.path.exists("egohands"):
zip_ref = zipfile.ZipFile(dataset_path, 'r')
print("> Extracting Dataset files")
zip_ref.extractall("egohands")
print("> Extraction complete")
zip_ref.close()
rename_files("egohands/_LABELLED_SAMPLES/")

def download_egohands_dataset(dataset_url, dataset_path):
print("\nTHIS CODE IS BASED ON VICTOR DIBIAs WORK\
\nSEE HIS REPO:\
Expand All @@ -226,24 +230,19 @@ def download_egohands_dataset(dataset_url, dataset_path):
opener = urllib.request.URLopener()
opener.retrieve(dataset_url, dataset_path)
print("> download complete")
print("> run egohands_dataset_cleaner.py again")

extract_folder(dataset_path)
else:
print("Egohands dataset already downloaded.\nGenerating CSV files")

if not os.path.exists("egohands"):
zip_ref = zipfile.ZipFile(dataset_path, 'r')
print("> Extracting Dataset files")
zip_ref.extractall("egohands")
print("> Extraction complete")
zip_ref.close()
rename_files("egohands/_LABELLED_SAMPLES/")
final_finish()
extract_folder(dataset_path)

def create_label_map():
label_map = "data/label_map.pbtxt"
if not os.path.isfile(label_map):
f = open(label_map,"w")
f.write("item {\n id: 1\n name: 'hand'\n}")
f.close()

def final_finish():
f = open("data/label_map.pbtxt","w")
f.write("item {\n id: 1\n name: 'hand'\n}")
f.close()
cwd = os.getcwd()
for directory in ['train','eval']:
src_dir = cwd+'/data/{}/'.format(directory)
Expand All @@ -252,15 +251,21 @@ def final_finish():
for file in sorted(os.listdir(src_dir)):
if file.endswith(".jpg"):
sh.move(src_dir+file,drc_dir+file)

sh.rmtree('egohands')
#os.remove(EGO_HANDS_FILE)
print('\n> creating the dataset complete\
\n> you can now start training\
\n> see howto_wiki for more information')

EGOHANDS_DATASET_URL = "http://vision.soic.indiana.edu/egohands_files/egohands_data.zip"
EGO_HANDS_FILE = "egohands_data.zip"


download_egohands_dataset(EGOHANDS_DATASET_URL, EGO_HANDS_FILE)


def main():
EGOHANDS_DATASET_URL = "http://vision.soic.indiana.edu/egohands_files/egohands_data.zip"
EGO_HANDS_FILE = "egohands_data.zip"

download_egohands_dataset(EGOHANDS_DATASET_URL, EGO_HANDS_FILE)
create_label_map()
final_finish()


if __name__ == '__main__':
main()
6 changes: 0 additions & 6 deletions howto_wiki → howto_tf
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,6 @@ python object_detection/export_inference_graph.py \
--output_directory=/home/gustav/workspace/deeptraining_hands/model/frozen_model


## TRAINING YOLO
# from the darknet directory
./darknet detector train /home/gustav/workspace/deeptraining_hands/data/handsnet.data /home/gustav/workspace/deeptraining_hands/model/tiny-yolo-handsnet.cfg darknet19_448.conv.23



#### Options for the model.config file ####
# To check config options look at objectdetection/protos
in train_config:
Expand Down
14 changes: 14 additions & 0 deletions howto_yolo
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# From darknet/cfg copy tiny-yolo.cfg and rename it to your needs
# Make following changes for Training:
# in [net]:
batch=64
subdivisions=8
# in [region]:
classes=1
# in the last [convolutional]:
filters = 30

# TRAINING
# from the darknet/ directory
./darknet detector train /home/gustav/workspace/deeptraining_hands/data/handsnet.data /home/gustav/workspace/deeptraining_hands/model/tiny-yolo-handsnet.cfg darknet19_448.conv.23

1 change: 1 addition & 0 deletions mat_to_xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ def create_xml_file(hand_pos, filename,IMG_FILES_PATH, XML_FILES_PATH):
xml_file.write(pretty_xml)



def main():
# Read a .mat file and convert it to a pascal format
for directory in ['train','eval']:
Expand Down
38 changes: 23 additions & 15 deletions oxfordhands_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
def create_directory(dir_path):
if not os.path.exists(dir_path):
os.makedirs(dir_path)

def download_dataset(dataset_name, dataset_url, tarfile_path):
if not os.path.exists(tarfile_path):
print(
Expand All @@ -30,8 +30,8 @@ def download_dataset(dataset_name, dataset_url, tarfile_path):
tar.extractall()
print("> Extraction complete")
tar.close()


def rename_double(path,name):
if os.path.isfile(path+name):
newname = 'x'+ name
Expand All @@ -40,7 +40,7 @@ def rename_double(path,name):
rename_double(path,newname)
else:
return name

def check_equal(src_dir, drc_dir):
src = len([name for name in os.listdir(src_dir) if os.path.isfile(name)])
drc = len([name for name in os.listdir(drc_dir) if os.path.isfile(name)])
Expand All @@ -50,29 +50,36 @@ def check_equal(src_dir, drc_dir):
else:
print("> unequal directory sizes, manual check necessary!")
return False


def create_label_map():
label_map = "data/label_map.pbtxt"
if not os.path.isfile(label_map):
f = open(label_map,"w")
f.write("item {\n id: 1\n name: 'hand'\n}")
f.close()

def cleanup_structure(data_path, dataset_path, tarfile_path):
check = []
create_directory(data_path)
print('> merge training and vildation set\
\n and copy all files to data/ directory')

for directory in ['test','validation','training']:
for typ in ['images','annotations']:
src_dir = dataset_path + '/{}_dataset/{}_data/{}/'.format(directory,directory,typ)

if directory is 'test':
if typ is 'annotations':
drc_dir = data_path+'eval/{}/mat/'.format(typ)
else:
drc_dir = data_path+'eval/{}/'.format(typ)
else:
else:
if typ is 'annotations':
drc_dir = data_path+'train/{}/mat/'.format(typ)
else:
drc_dir = data_path+'train/{}/'.format(typ)
create_directory(drc_dir)

create_directory(drc_dir)
for file in os.listdir(src_dir):
if file.endswith(".jpg") or file.endswith(".mat"):
newfile = rename_double(drc_dir,file)
Expand All @@ -85,20 +92,21 @@ def cleanup_structure(data_path, dataset_path, tarfile_path):
print('> Dataset successuflly set up!')
else:
print("> check manually for possible errors in created /data directory!")





def main():
CWD = os.getcwd()
dataset_name = 'hand_dataset'
tarfile_path = CWD+'/hand_dataset.tar.gz'
dataset_url = 'http://www.robots.ox.ac.uk/~vgg/data/hands/downloads/hand_dataset.tar.gz'
dataset_path = CWD+'/'+dataset_name
data_path = CWD+'/data/'

download_dataset(dataset_name, dataset_url, tarfile_path)
cleanup_structure(data_path, dataset_path, tarfile_path)
create_label_map()



if __name__ == '__main__':
main()
main()

0 comments on commit 3abf405

Please sign in to comment.