Commit 383a8654 authored by mihaivanea's avatar mihaivanea
Browse files

Generator works. Need to process the images to feed them as training input to fit().

parent 77e46397
......@@ -34,6 +34,7 @@ def generate_arrays_fddb(path):
for lb in labels:
img = lb[0]
data = imread("../fddb/" + img + ".jpg")
print(len(data))
yield data, convert_to_cartesian(lb[2])
#generator = generate_arrays_fddb(path)
......
......@@ -4,7 +4,7 @@ from keras.preprocessing import image
from keras.models import Model
from keras.layers import Dense, GlobalAveragePooling2D
from keras import backend as K
from inceptionv3_fine_tune import generate_arrays_fddb
from generate_arrays_fddb import generate_arrays_fddb
# create the base pre-trained model
base_model = InceptionV3(weights='imagenet', include_top=False)
......@@ -31,10 +31,16 @@ model.compile(optimizer='rmsprop', loss='categorical_crossentropy')
fddb_path = "../fddb/FDDB-folds/FDDB-fold-01-ellipseList.txt"
# train the model on the new data for a few epochs
model.fit_generator(
generator=generate_arrays_fddb(fddb_path),
steps_per_epoch=1000, epochs=10
)
fail = 0
try:
model.fit_generator(
generator=generate_arrays_fddb(fddb_path),
steps_per_epoch=1000, epochs=10
)
except Exception as e:
fail += 1
print(e)
print("FAIL:" + str(fail))
# at this point, the top layers are well trained and we can start fine-tuning
# convolutional layers from inception V3. We will freeze the bottom N layers
......@@ -42,9 +48,9 @@ model.fit_generator(
# let's visualize layer names and layer indices to see how many layers
# we should freeze:
print(base_model.input.name)
for i, layer in enumerate(base_model.layers):
print(i, layer.name)
print(base_model.output)
#print(base_model.input.name)
#for i, layer in enumerate(base_model.layers):
# print(i, layer.name)
#print(base_model.output)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment