|
2 | 2 |
|
3 | 3 | import cv2 |
4 | 4 | import sys |
5 | | -sys.path.append('../') |
| 5 | +from EmoPy.src.fermodel import FERModel |
| 6 | +from pkg_resources import resource_filename |
6 | 7 |
|
7 | | -#Choose the type of Face Expression Model |
8 | | -from src.fermodel import FERModel |
| 8 | +fontFace = cv2.FONT_HERSHEY_SIMPLEX; |
| 9 | +fontScale = 1; |
| 10 | +thickness = 2; |
9 | 11 |
|
10 | | -#Frame Number |
11 | | -FRAME_NUM = 0 |
12 | | - |
13 | | -#Choose the type of face detector cascade you want to use |
14 | | -cascPath = "~/EmoPy/venv/lib/python3.5/site-packages/cv2/data/haarcascade_frontalface_default.xml" |
15 | | -faceCascade = cv2.CascadeClassifier(cascPath) |
16 | 12 | #Specify the camera which you want to use. The default argument is '0' |
17 | 13 | video_capture = cv2.VideoCapture(0) |
| 14 | +#Capturing a smaller image fçor speed purposes |
| 15 | +video_capture.set(cv2.CAP_PROP_FRAME_WIDTH, 640) |
| 16 | +video_capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) |
| 17 | +video_capture.set(cv2.CAP_PROP_FPS, 15) |
| 18 | + |
| 19 | +#Can choose other target emotions from the emotion subset defined in fermodel.py in src directory. The function |
| 20 | +# defined as `def _check_emotion_set_is_supported(self):` |
| 21 | +target_emotions = ['calm', 'anger', 'happiness'] |
| 22 | +model = FERModel(target_emotions, verbose=True) |
18 | 23 |
|
19 | 24 | while True: |
20 | 25 | #Capture frame-by-frame |
21 | 26 | ret, frame = video_capture.read() |
22 | 27 | #Save the captured frame on disk |
23 | | - file = '~/EmoPy/models/examples/image_data/image.jpg' |
| 28 | + file = 'image_data/image.jpg' |
24 | 29 | cv2.imwrite(file, frame) |
25 | | - #Can choose other target emotions from the emotion subset defined in fermodel.py in src directory. The function |
26 | | - # defined as `def _check_emotion_set_is_supported(self):` |
27 | | - target_emotions = ['calm', 'anger', 'happiness'] |
28 | | - model = FERModel(target_emotions, verbose=True) |
| 30 | + |
29 | 31 | frameString = model.predict(file) |
30 | | - #Display frame number and emotion |
31 | | - cv2.putText(frame, 'Frame:' + str(FRAME_NUM), (10, 40), cv2.FONT_HERSHEY_COMPLEX_SMALL, 3, (0, 0, 255), 2, cv2.LINE_AA) |
32 | | - cv2.putText(frame, frameString, (10,450), cv2.FONT_HERSHEY_COMPLEX_SMALL, 3, (0,255,0), 2, cv2.LINE_AA) |
| 32 | + |
| 33 | + #Display emotion |
| 34 | + retval, baseline = cv2.getTextSize(frameString, fontFace, fontScale, thickness) |
| 35 | + cv2.rectangle(frame, (0, 0 ), (20 + retval[0], 50 ), (0,0,0), -1 ) |
| 36 | + cv2.putText(frame, frameString, (10, 35), fontFace, fontScale, (255, 255, 255), thickness, cv2.LINE_AA) |
33 | 37 | cv2.imshow('Video', frame) |
34 | 38 | cv2.waitKey(1) |
35 | | - FRAME_NUM += 1 |
| 39 | + |
36 | 40 | #Press Esc to exit the window |
37 | 41 | if cv2.waitKey(1) & 0xFF == 27: |
38 | 42 | break |
39 | 43 | #Closes all windows |
40 | 44 | cv2.destroyAllWindows() |
41 | | - |
|
0 commit comments