-
Notifications
You must be signed in to change notification settings - Fork 99
Expand file tree
/
Copy pathMake_classifier_git.py
More file actions
66 lines (54 loc) · 2.56 KB
/
Make_classifier_git.py
File metadata and controls
66 lines (54 loc) · 2.56 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import argparse
import facenet
import detect_face
import os
import sys
import math
import pickle
from sklearn.svm import SVC
with tf.Graph().as_default():
with tf.Session() as sess:
datadir = '/..Path to align face data../'
dataset = facenet.get_dataset(datadir)
paths, labels = facenet.get_image_paths_and_labels(dataset)
print('Number of classes: %d' % len(dataset))
print('Number of images: %d' % len(paths))
print('Loading feature extraction model')
modeldir = '/..Path to Pre-trained model../20170512-110547/20170512-110547.pb'
facenet.load_model(modeldir)
images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0")
embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0")
phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0")
embedding_size = embeddings.get_shape()[1]
# Run forward pass to calculate embeddings
print('Calculating features for images')
batch_size = 1000
image_size = 160
nrof_images = len(paths)
nrof_batches_per_epoch = int(math.ceil(1.0 * nrof_images / batch_size))
emb_array = np.zeros((nrof_images, embedding_size))
for i in range(nrof_batches_per_epoch):
start_index = i * batch_size
end_index = min((i + 1) * batch_size, nrof_images)
paths_batch = paths[start_index:end_index]
images = facenet.load_data(paths_batch, False, False, image_size)
feed_dict = {images_placeholder: images, phase_train_placeholder: False}
emb_array[start_index:end_index, :] = sess.run(embeddings, feed_dict=feed_dict)
classifier_filename = '/..Path to save classifier../my_classifier.pkl'
classifier_filename_exp = os.path.expanduser(classifier_filename)
# Train classifier
print('Training classifier')
model = SVC(kernel='linear', probability=True)
model.fit(emb_array, labels)
# Create a list of class names
class_names = [cls.name.replace('_', ' ') for cls in dataset]
# Saving classifier model
with open(classifier_filename_exp, 'wb') as outfile:
pickle.dump((model, class_names), outfile)
print('Saved classifier model to file "%s"' % classifier_filename_exp)
print('Goodluck')