FaceRecognition / classifier.py
datnguyentien204's picture
6797ab7cc662a6915657900b76bbadf3d8ccd13b941db166ca7352788794f756
03b1ce7 verified
raw
history blame
2.59 kB
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import facenet
import os
import math
import pickle
from sklearn.svm import SVC
import sys
import tensorflow.compat.v1 as tf
class training:
def __init__(self, datadir, modeldir,classifier_filename):
self.datadir = datadir
self.modeldir = modeldir
self.classifier_filename = classifier_filename
def main_train(self):
with tf.Graph().as_default():
with tf.Session() as sess:
img_data = facenet.get_dataset(self.datadir)
path, label = facenet.get_image_paths_and_labels(img_data)
print('Classes: %d' % len(img_data))
print('Images: %d' % len(path))
facenet.load_model(self.modeldir)
images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0")
embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0")
phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0")
embedding_size = embeddings.get_shape()[1]
print('Extracting features of images for model')
batch_size = 256
image_size = 160 #160
nrof_images = len(path)
nrof_batches_per_epoch = int(math.ceil(1.0 * nrof_images / batch_size))
emb_array = np.zeros((nrof_images, embedding_size))
for i in range(nrof_batches_per_epoch):
start_index = i * batch_size
end_index = min((i + 1) * batch_size, nrof_images)
paths_batch = path[start_index:end_index]
images = facenet.load_data(paths_batch, False, False, image_size)
feed_dict = {images_placeholder: images, phase_train_placeholder: False}
emb_array[start_index:end_index, :] = sess.run(embeddings, feed_dict=feed_dict)
classifier_file_name = os.path.expanduser(self.classifier_filename)
# Training Started
print('Training Started')
model = SVC(kernel='linear', probability=True)
model.fit(emb_array, label)
class_names = [cls.name.replace('_', ' ') for cls in img_data]
# Saving model
with open(classifier_file_name, 'wb') as outfile:
pickle.dump((model, class_names), outfile)
return classifier_file_name