Commit 0ce025e7 authored by mihaivanea's avatar mihaivanea
Browse files

Need add regression to DenseNet.

parent 6a5d0897
......@@ -4,7 +4,7 @@ from keras.preprocessing import image
from keras.models import Model
from keras.layers import Dense, GlobalAveragePooling2D
from keras import backend as K
from generate_arrays_fddb import generate_arrays_fddb
from generate_arrays_fddb import load_arrays_fddb
# create the base pre-trained model
base_model = InceptionV3(weights='imagenet', include_top=False)
......@@ -28,17 +28,31 @@ for layer in base_model.layers:
# compile the model (should be done *after* setting layers to non-trainable)
model.compile(optimizer='rmsprop', loss='categorical_crossentropy')
fddb_path = "../fddb/FDDB-folds/FDDB-fold-01-ellipseList.txt"
fddb_path_train = "../fddb/FDDB-folds/FDDB-fold-01-ellipseList.txt"
fddb_path_valid = "../fddb/FDDB-folds/FDDB-fold-02-ellipseList.txt"
# loading the training and validation data from FDDB.
x_train, y_train = load_arrays_fddb(fddb_path_train)
x_valid, y_valid = load_arrays_fddb(fddb_path_valid)
## train the model on the new data for a few epochs
steps_per_epoch=1000, epochs=10
x_train, y_train,
validation_data=(x_valid, y_valid)
except Exception as e:
# Make predictions
predictions_valid = model.predict(x_valid, batch_size=32, verbose=1)
# Cross-entropy loss score
performance = log_loss(y_valid, predictions_valid)
# at this point, the top layers are well trained and we can start fine-tuning
# convolutional layers from inception V3. We will freeze the bottom N layers
# and train the remaining top layers.
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment