-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathidentifier.py
More file actions
155 lines (126 loc) · 3.97 KB
/
identifier.py
File metadata and controls
155 lines (126 loc) · 3.97 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
from imutils import paths
import face_recognition
import pickle
import cv2
import os
import mysql.connector
from imgarray import save_array_img, load_array_img
###SQL Database
##connection = mysql.connector.connect(
##
## host="localhost",
## user="root",
## password="nuclesnoor9",
## database = "smart_camera")
##
##cursor = connection.cursor()
##
##def db_creation():
##
## gallary = "CREATE TABLE IF NOT EXISTS capture (\
## pic_id int not null primary key AUTO_INCREMENT,\
## photo longblob not null,\
## time_stamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP)"
## face_ids = "CREATE TABLE IF NOT EXISTS face_ids( \
## name varchar(200) not null,face_id int not null primary key AUTO_INCREMENT,face_enc longblob not null, \
## time_stamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP);"
##
##
##
##
##
##
## faces = "CREATE TABLE IF NOT EXISTS face_id(\
## name varchar(200) not null,\
## pic_id int primary key,\
## foreign key (pic_id) references capture(pic_id))"
##
## cursor.execute(gallary)
## connection.commit()
## cursor.execute(faces)
## connection.commit()
## cursor.execute(face_ids)
## connection.commit()
##
##
##db_creation()
##def save_array_to_binary(numpy_array):
##
## save_array_img(numpy_array, 'image.png', img_format='png')
## with open('image.png', 'rb') as file:
## binaryData = file.read()
## return binaryData
##
##def faceDB(faces_data):
##
## a = faces_data["encodings"]
## b = faces_data["names"]
##
##
## # store as blob in DB
## for enc,name in zip(a, b):
##
##
## encc = save_array_to_binary(enc)
## sqlStatment = "INSERT INTO face_ids (name,face_enc) VALUES (%s,%s)"
## cursor.execute(sqlStatment, (name,encc))
## connection.commit()
def is_image_file(file_name):
image_file_extensions = ('.rgb', '.gif', '.pbm', '.pgm', '.ppm', '.tiff', '.rast' '.xbm',
'.jpeg', '.jpg', '.JPG', '.bmp', '.png', '.PNG', '.webp', '.exr')
return file_name.endswith((image_file_extensions))
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
image_dir = os.path.join(BASE_DIR, 'faces')
known_encodings = []
known_names = []
## # testing
##
##test1 = False
##
with open('scanned-faces', 'rt') as f:
scanned = f.readlines()
scanned = [i.split('\n')[0] for i in scanned]
print(scanned)
def get_faces_data(mode):
for root, dirs, files in os.walk(image_dir):
for file in files:
path = os.path.join(root, file)
print(path)
if is_image_file(file) and path not in scanned:
# extract the person name from the image path
name = os.path.basename(root)
# load the input image and convert it from BGR (OpenCV ordering) to dlib ordering (RGB)
image = cv2.imread(path)
rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# Use Face_recognition to locate faces
boxes = face_recognition.face_locations(rgb, model='hog')
# compute the facial embedding for the face
encodings = face_recognition.face_encodings(rgb, boxes)
# loop over the encodings
for encoding in encodings:
known_encodings.append(encoding)
known_names.append(name)
#save scanned faces
with open('scanned-faces', 'at') as f:
f.write(path+'\n')
#save encodings along with their names in dictionary faces_data
faces_data = {"encodings": known_encodings, "names": known_names}
#print(faces_data['names'])
#use pickle to save data into a file for later use
if len(faces_data["encodings"]) != 0:
with open('faces-data.pickle', mode) as f:
pickle.dump(faces_data, f)
## #save encodings along with their names in dictionary faces_data
## faces_data = {"encodings": known_encodings, "names": known_names}
##
##
##
##
##
## #use pickle to save data into a file for later use
## if len(faces_data["encodings"]) != 0:
## with open('faces-data.pickle', mode) as f:
## pickle.dump(faces_data, f)
## faceDB(faces_data)
#get_faces_data("wb")
#get_faces_data("wb")