Commit d426a026 authored by mihaivanea's avatar mihaivanea
Browse files

The data set if correctly organised. Might need to experiment with lower batch...

The data set if correctly organised. Might need to experiment with lower batch size or change the model.
parent e959b342
......@@ -6,9 +6,9 @@ from generate_arrays_fddb import load_arrays_fddb
inputs = Input(shape=(224, 224,3,))
x = Dense(100, activation="relu")(inputs)
x = Dense(32, activation="relu")(x)
x = Dense(32, activation="relu")(inputs)
x = Dense(10, activation="relu")(x)
#x = Dense(4, activation="relu")(x)
x = Flatten()(x)
predictions = Dense(4, activation="relu")(x)
......@@ -21,14 +21,14 @@ model.compile(
fddb_path_train = "../fddb/FDDB-folds/FDDB-fold-01-ellipseList.txt"
x_train = np.empty([224,224,3])
y_train = np.empty([4])
x_train = np.empty((0 ,224,224,3))
y_train = np.empty((0 , 4))
for i in range(1, 10):
x_t, y_t = load_arrays_fddb(fddb_path_train[:30] + str(i) + \
fddb_path_train[31:])
np.concatenate((x_train, x_t), axis=0)
np.concatenate((y_train, y_t), axis=0)
for i in range(1, 11):
x_t, y_t = load_arrays_fddb(fddb_path_train[:29] + \
"{}".format(str(i).zfill(2)) + fddb_path_train[31:])
x_train = np.concatenate((x_train, x_t), axis=0)
y_train = np.concatenate((y_train, y_t), axis=0)
model.fit(
x_train, y_train,
......
......@@ -6,12 +6,12 @@ benchmark = imp.load_source("benchmark",
"/vol/bitbucket/mv1315/urop/benchmark.py")
import numpy as np
from skimage.io import imread
from skimage.io import imread, imsave
from os import system
from sys import stdout, argv
from PIL import Image
# generate arrays only for a minibatch for now"
# generate arrays only for a minibatch for now
path = "../fddb/FDDB-folds/FDDB-fold-01-ellipseList.txt"
def convert_to_cartesian(ellip_array):
......@@ -22,25 +22,27 @@ def convert_to_cartesian(ellip_array):
minor_r = float(ellip[1])
c_x = float(ellip[3])
c_y = float(ellip[4])
x1 = round(c_x - minor_r, 4)
y1 = round(c_y - major_r, 4)
x2 = round(c_x + minor_r, 4)
y2 = round(c_y + major_r, 4)
x1 = c_x - minor_r
y1 = c_y - major_r
x2 = c_x + minor_r
y2 = c_y + major_r
cartesian_output.append([x1, y1, x2, y2])
break
return cartesian_output
def generate_arrays_fddb(path):
no_labels, labels = (l for l in benchmark.tokenise(path))
for lb in labels:
img = lb[0]
img_path = "../fddb/" + img + ".jpg"
img = Image.open(img_path)
img = img.resize((224, 224), Image.ANTIALIAS)
img.load()
data = np.asarray(img, dtype="float32")
if len(data.shape) == 3:
yield data, convert_to_cartesian(lb[2])
def scale_coordonates(coordonates, original_w, original_h):
scaled_coordonates = []
for c in coordonates:
scaled_x1 = round(float(c[0] / (original_w / 224)), 3)
scaled_y1 = round(float(c[1] / (original_h / 224)), 3)
scaled_x2 = round(float(c[2] / (original_w / 224)), 3)
scaled_y2 = round(float(c[3] / (original_h / 224)), 3)
box = [scaled_x1, scaled_y1, scaled_x2, scaled_y2]
for i in range(len(box)):
if box[i] < 0:
box[i] = 0.
scaled_coordonates.append(box)
return scaled_coordonates
def load_arrays_fddb(path):
no_labels, labels = [l for l in benchmark.tokenise(path)]
......@@ -49,19 +51,40 @@ def load_arrays_fddb(path):
for lb in labels:
img = lb[0]
img_path = "../fddb/" + img + ".jpg"
#save_name = img.split('/')[len(img.split('/')) - 1]
img = Image.open(img_path)
width, height = img.size
img = img.resize((224, 224), Image.ANTIALIAS)
img.load()
data = np.asarray(img, dtype="float32")
#print(save_name)
#img.save("./" + save_name + ".jpg")
if len(data.shape) == 3:
x.append(data)
result = convert_to_cartesian(lb[2])
for r in result:
y.append(r)
labels = convert_to_cartesian(lb[2])
scaled_labels = scale_coordonates(labels, width, height)
#in_img = imread("./" + save_name + ".jpg")
#save_name += "_labeled"
#out_img = facenet.add_labels(in_img, scaled_labels, [])
#imsave("./" + save_name + ".jpg", out_img)
for l in scaled_labels:
y.append(l)
x = np.array(x)
y = np.array(y)
return x, y
def generate_arrays_fddb(path):
no_labels, labels = (l for l in benchmark.tokenise(path))
for lb in labels:
img = lb[0]
img_path = "../fddb/" + img + ".jpg"
img = Image.open(img_path)
img = img.resize((224, 224), Image.ANTIALIAS)
img.load()
data = np.asarray(img, dtype="float32")
if len(data.shape) == 3:
yield data, convert_to_cartesian(lb[2])
#generator = generate_arrays_fddb(path)
#
#for g in generator:
......@@ -72,4 +95,4 @@ def load_arrays_fddb(path):
#for g in generator:
# print(g)
#print("DONE")
#load_arrays_fddb(path)
load_arrays_fddb(path)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment