Commit 77e46397 authored by mihaivanea's avatar mihaivanea
Browse files

The first hybrid model is ready for training

parent aab57840
......@@ -13,7 +13,7 @@ from sys import stdout, argv
# generate arrays only for a minibatch for now"
path = "../fddb/FDDB-folds/FDDB-fold-01-ellipseList.txt"
def conver_to_cartesian(ellip_array):
def convert_to_cartesian(ellip_array):
result = []
for ellip in ellip_array:
ellip = [e for e in ellip.rstrip('\n').split(' ')]
......@@ -34,12 +34,8 @@ def generate_arrays_fddb(path):
for lb in labels:
img = lb[0]
data = imread("../fddb/" + img + ".jpg")
yield data, conver_to_cartesian(lb[2])
yield data, convert_to_cartesian(lb[2])
generator = generate_arrays_fddb(path)
for g in generator:
print(g)
print("HERE")
for g in generator:
print(g)
print("HERE")
#generator = generate_arrays_fddb(path)
#for g in generator:
# print(g)
......@@ -4,6 +4,7 @@ from keras.preprocessing import image
from keras.models import Model
from keras.layers import Dense, GlobalAveragePooling2D
from keras import backend as K
from inceptionv3_fine_tune import generate_arrays_fddb
# create the base pre-trained model
base_model = InceptionV3(weights='imagenet', include_top=False)
......@@ -27,8 +28,13 @@ for layer in base_model.layers:
# compile the model (should be done *after* setting layers to non-trainable)
model.compile(optimizer='rmsprop', loss='categorical_crossentropy')
fddb_path = "../fddb/FDDB-folds/FDDB-fold-01-ellipseList.txt"
# train the model on the new data for a few epochs
model.fit_generator(steps_per_epoch=1000, epochs=10)
model.fit_generator(
generator=generate_arrays_fddb(fddb_path),
steps_per_epoch=1000, epochs=10
)
# at this point, the top layers are well trained and we can start fine-tuning
# convolutional layers from inception V3. We will freeze the bottom N layers
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment