Commit 0e6c74cb authored by mihaivanea's avatar mihaivanea
Browse files

Started working on the ResNet model. Need to find out the original learning rate.

parent 0ce025e7
......@@ -57,9 +57,11 @@ def load_arrays_fddb(path):
if len(data.shape) == 3:
x.append(data)
result = convert_to_cartesian(lb[2])
y.append(result)
y.append([result])
x = np.array(x)
y = np.array(y)
print(x.shape)
print(y.shape)
return x, y
#generator = generate_arrays_fddb(path)
......
#!/vol/bitbucket/mv1315/urop/venv/bin/python3.5
from keras.applications.resnet50 import ResNet50
from keras.preprocessing import image
from keras.models import Model
from keras.layers import Dense, GlobalAveragePooling2D
from keras import backend as K
from generate_arrays_fddb import load_arrays_fddb
from keras.wrappers.scikit_learn import KerasRegressor
from keras.optimizers import SGD
from keras.optimizers import RMSprop
from keras.losses import categorical_crossentropy, mean_squared_error
# instantiate a ResNet model with pre-trained wights
base_model = ResNet50(
include_top=True,
weights="imagenet",
classes=1000)
# Get the output layer of ResNet
x = base_model.output
## Add a pooling layer after the last convolutional
## layer to reduce the spatial size of the representation.
## NB: This also reduces the number of parameters and
## reduces the amount of computation to be executed.
#x = base_model.output
#x = GlobalAveragePooling2D()(x)
# Bifurcate from output to the regression layer and classifier.
# add a fully connected layer with softmax
# to work for 1000 classes
classifier_branch = Dense(
1000,
activation="softmax")(x)
# Add a regression layer to be trained on FDDB.
regression_branch = Dense(
4,
input_dim=4,
kernel_initializer="normal",
activation="relu")(x)
# my model to train
model = Model(
inputs=[base_model.input],
outputs=[classifier_branch, regression_branch])
# Freeze the convolutional layers of ResNet
# Train only the two added top layers.
for layer in base_model.layer:
layer.trainable = False
# compile the modl
# Define an optimiser for the classifier branch.
# Learning rate has to be 10 times smaller than the one
# originally used.
# TODO: Find out the learning rate used on ResNet.
rms = RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0)
# Defiene an optimiser for the regression branch.
sgd = SGD(lr=0.001, momentum=0.0, decay=0.0, nesterov=False)
##Define a loss function for the classifier.
#classifier_loss = categorical_crossentropy()
model.compile(optimizer=[rms, sgd], loss=)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment