Skip to content

Commit a20c37d

Browse files
committed
Add eye status predict
Former-commit-id: 5a928a4
1 parent cf41574 commit a20c37d

File tree

10 files changed

+227
-4
lines changed

10 files changed

+227
-4
lines changed

CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3")
99
# Current version
1010
set(INSPIRE_FACE_VERSION_MAJOR 1)
1111
set(INSPIRE_FACE_VERSION_MINOR 1)
12-
set(INSPIRE_FACE_VERSION_PATCH 2)
12+
set(INSPIRE_FACE_VERSION_PATCH 3)
1313

1414
# Converts the version number to a string
1515
string(CONCAT INSPIRE_FACE_VERSION_MAJOR_STR ${INSPIRE_FACE_VERSION_MAJOR})

cpp/inspireface/common/face_info/face_object.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -312,6 +312,10 @@ class INSPIRE_API FaceObject {
312312
face_id_ = id;
313313
}
314314

315+
std::vector<float> left_eye_status_;
316+
317+
std::vector<float> right_eye_status_;
318+
315319
private:
316320
TRACK_STATE tracking_state_;
317321
// std::shared_ptr<FaceAction> face_action_;

cpp/inspireface/face_context.cpp

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -174,6 +174,28 @@ int32_t FaceContext::FacesProcess(CameraStream &image, const std::vector<HyperFa
174174
// Get eyes status
175175
m_react_left_eye_results_cache_[i] = m_face_pipeline_->eyesStatusCache[0];
176176
m_react_right_eye_results_cache_[i] = m_face_pipeline_->eyesStatusCache[1];
177+
// Special handling: ff it is a tracking state, it needs to be filtered
178+
if (face.trackState > 0)
179+
{
180+
auto idx = face.inGroupIndex;
181+
if (idx < m_face_track_->trackingFace.size()) {
182+
auto& target = m_face_track_->trackingFace[idx];
183+
if (target.GetTrackingId() == face.trackId) {
184+
auto new_eye_left = EmaFilter(m_face_pipeline_->eyesStatusCache[0], target.left_eye_status_, 8, 0.2f);
185+
auto new_eye_right = EmaFilter(m_face_pipeline_->eyesStatusCache[1], target.right_eye_status_, 8, 0.2f);
186+
if (face.trackState > 1) {
187+
// The filtered value can be obtained only in the tracking state
188+
m_react_left_eye_results_cache_[i] = new_eye_left;
189+
m_react_right_eye_results_cache_[i] = new_eye_right;
190+
}
191+
192+
} else {
193+
INSPIRE_LOGD("Serialized objects cannot connect to trace objects in memory, and there may be some problems");
194+
}
195+
} else {
196+
INSPIRE_LOGW("The index of the trace object does not match the trace list in memory, and there may be some problems");
197+
}
198+
}
177199
}
178200

179201
}

cpp/inspireface/information.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,6 @@
77

88
#define INSPIRE_FACE_VERSION_MAJOR_STR "1"
99
#define INSPIRE_FACE_VERSION_MINOR_STR "1"
10-
#define INSPIRE_FACE_VERSION_PATCH_STR "2"
10+
#define INSPIRE_FACE_VERSION_PATCH_STR "3"
1111

1212
#endif //HYPERFACEREPO_INFORMATION_H

cpp/inspireface/middleware/utils.h

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -675,6 +675,25 @@ inline cv::Mat ComputeCropMatrix(const cv::Rect2f &rect, int width, int height)
675675
}
676676

677677

678+
// Exponential Moving Average (EMA) filter function
679+
inline float EmaFilter(float currentProb, std::vector<float>& history, int max, float alpha = 0.2f) {
680+
// Add current probability to history
681+
history.push_back(currentProb);
682+
683+
// Trim history if it exceeds max size
684+
if (history.size() > max) {
685+
history.erase(history.begin(), history.begin() + (history.size() - max));
686+
}
687+
688+
// Compute EMA
689+
float ema = history[0]; // Initial value
690+
for (size_t i = 1; i < history.size(); ++i) {
691+
ema = alpha * history[i] + (1 - alpha) * ema;
692+
}
693+
694+
return ema;
695+
}
696+
678697
} // namespace inspire
679698

680699
#endif

cpp/inspireface/pipeline_module/face_pipeline.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,7 @@ int32_t FacePipeline::Process(CameraStream &image, const HyperFaceData &face, Fa
156156
auto eyeStatus = (*m_blink_predict_)(pre_crop);
157157
eyesStatusCache[i] = eyeStatus;
158158
}
159+
159160
break;
160161
}
161162
case PROCESS_AGE: {

cpp/inspireface/version.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
InspireFace Version: 1.1.2
1+
InspireFace Version: 1.1.3

cpp/sample/cpp/sample_face_track_video.cpp

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,15 @@ int main(int argc, char* argv[]) {
129129
std::cout << "HFMultipleFacePipelineProcessOptional error: " << ret << std::endl;
130130
return ret;
131131
}
132+
HFFaceIntereactionResult result;
133+
ret = HFGetFaceIntereactionResult(session, &result);
134+
if (ret != HSUCCEED)
135+
{
136+
std::cout << "HFGetFaceIntereactionResult error: " << ret << std::endl;
137+
return ret;
138+
}
139+
std::cout << "Left eye status: " << result.leftEyeStatusConfidence[0] << std::endl;
140+
std::cout << "Righ eye status: " << result.rightEyeStatusConfidence[0] << std::endl;
132141

133142
}
134143

cpp/test/unit/api/test_face_pipeline.cpp

Lines changed: 113 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,8 +184,120 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
184184

185185
ret = HFReleaseInspireFaceSession(session);
186186
REQUIRE(ret == HSUCCEED);
187+
}
188+
189+
}
190+
191+
TEST_CASE("test_FaceReaction", "[face_reaction]") {
192+
DRAW_SPLIT_LINE
193+
TEST_PRINT_OUTPUT(true);
194+
195+
HResult ret;
196+
HFSessionCustomParameter parameter = {0};
197+
parameter.enable_interaction_liveness = 1;
198+
parameter.enable_liveness = 1;
199+
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
200+
HFSession session;
201+
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
202+
REQUIRE(ret == HSUCCEED);
203+
204+
SECTION("open eyes") {
205+
// Get a face picture
206+
HFImageStream imgHandle;
207+
auto img = cv::imread(GET_DATA("data/reaction/open_eyes.png"));
208+
ret = CVImageToImageStream(img, imgHandle);
209+
REQUIRE(ret == HSUCCEED);
210+
211+
// Extract basic face information from photos
212+
HFMultipleFaceData multipleFaceData = {0};
213+
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
214+
REQUIRE(ret == HSUCCEED);
215+
REQUIRE(multipleFaceData.detectedNum > 0);
216+
217+
// Predict eyes status
218+
ret = HFMultipleFacePipelineProcess(session, imgHandle, &multipleFaceData, parameter);
219+
REQUIRE(ret == HSUCCEED);
220+
221+
// Get results
222+
HFFaceIntereactionResult result;
223+
ret = HFGetFaceIntereactionResult(session, &result);
224+
REQUIRE(multipleFaceData.detectedNum == result.num);
225+
REQUIRE(ret == HSUCCEED);
226+
227+
// Check
228+
CHECK(result.leftEyeStatusConfidence[0] > 0.5f);
229+
CHECK(result.rightEyeStatusConfidence[0] > 0.5f);
230+
231+
ret = HFReleaseImageStream(imgHandle);
232+
REQUIRE(ret == HSUCCEED);
233+
234+
}
235+
236+
SECTION("close eyes") {
237+
// Get a face picture
238+
HFImageStream imgHandle;
239+
auto img = cv::imread(GET_DATA("data/reaction/close_eyes.jpeg"));
240+
ret = CVImageToImageStream(img, imgHandle);
241+
REQUIRE(ret == HSUCCEED);
242+
243+
// Extract basic face information from photos
244+
HFMultipleFaceData multipleFaceData = {0};
245+
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
246+
REQUIRE(ret == HSUCCEED);
247+
REQUIRE(multipleFaceData.detectedNum > 0);
248+
249+
// Predict eyes status
250+
ret = HFMultipleFacePipelineProcess(session, imgHandle, &multipleFaceData, parameter);
251+
REQUIRE(ret == HSUCCEED);
252+
253+
// Get results
254+
HFFaceIntereactionResult result;
255+
ret = HFGetFaceIntereactionResult(session, &result);
256+
REQUIRE(multipleFaceData.detectedNum == result.num);
257+
REQUIRE(ret == HSUCCEED);
258+
259+
// Check
260+
CHECK(result.leftEyeStatusConfidence[0] < 0.5f);
261+
CHECK(result.rightEyeStatusConfidence[0] < 0.5f);
262+
263+
ret = HFReleaseImageStream(imgHandle);
264+
REQUIRE(ret == HSUCCEED);
265+
266+
}
267+
268+
SECTION("Close one eye and open the other") {
269+
// Get a face picture
270+
HFImageStream imgHandle;
271+
auto img = cv::imread(GET_DATA("data/reaction/close_open_eyes.jpeg"));
272+
ret = CVImageToImageStream(img, imgHandle);
273+
REQUIRE(ret == HSUCCEED);
187274

275+
// Extract basic face information from photos
276+
HFMultipleFaceData multipleFaceData = {0};
277+
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
278+
REQUIRE(ret == HSUCCEED);
279+
REQUIRE(multipleFaceData.detectedNum > 0);
280+
281+
// Predict eyes status
282+
ret = HFMultipleFacePipelineProcess(session, imgHandle, &multipleFaceData, parameter);
283+
REQUIRE(ret == HSUCCEED);
284+
285+
// Get results
286+
HFFaceIntereactionResult result;
287+
ret = HFGetFaceIntereactionResult(session, &result);
288+
REQUIRE(multipleFaceData.detectedNum == result.num);
289+
REQUIRE(ret == HSUCCEED);
290+
291+
// Check
292+
CHECK(result.leftEyeStatusConfidence[0] < 0.5f);
293+
CHECK(result.rightEyeStatusConfidence[0] > 0.5f);
294+
295+
ret = HFReleaseImageStream(imgHandle);
296+
REQUIRE(ret == HSUCCEED);
188297

189298
}
190299

191-
}
300+
ret = HFReleaseInspireFaceSession(session);
301+
REQUIRE(ret == HSUCCEED);
302+
303+
}

cpp/test/unit/api/test_face_track.cpp

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -483,4 +483,60 @@ TEST_CASE("test_MultipleLevelFaceDetect", "[face_detect]") {
483483
}
484484

485485

486+
}
487+
488+
TEST_CASE("test_FaceShowLandmark", "[face_landmark]") {
489+
DRAW_SPLIT_LINE
490+
TEST_PRINT_OUTPUT(true);
491+
492+
std::vector<std::string> images_path = {
493+
GET_DATA("data/reaction/close_open_eyes.jpeg"),
494+
GET_DATA("data/reaction/open_eyes.png"),
495+
GET_DATA("data/reaction/close_eyes.jpeg"),
496+
};
497+
498+
HResult ret;
499+
HFSessionCustomParameter parameter = {0};
500+
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
501+
HFSession session;
502+
HInt32 detectPixelLevel = 160;
503+
ret = HFCreateInspireFaceSession(parameter, detMode, 20, detectPixelLevel, -1, &session);
504+
REQUIRE(ret == HSUCCEED);
505+
HFSessionSetTrackPreviewSize(session, detectPixelLevel);
506+
HFSessionSetFilterMinimumFacePixelSize(session, 0);
507+
508+
for (size_t i = 0; i < images_path.size(); i++)
509+
{
510+
HFImageStream imgHandle;
511+
auto image = cv::imread(images_path[i]);
512+
ret = CVImageToImageStream(image, imgHandle);
513+
REQUIRE(ret == HSUCCEED);
514+
515+
// Extract basic face information from photos
516+
HFMultipleFaceData multipleFaceData = {0};
517+
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
518+
REQUIRE(ret == HSUCCEED);
519+
520+
REQUIRE(multipleFaceData.detectedNum > 0);
521+
522+
523+
HInt32 numOfLmk;
524+
HFGetNumOfFaceDenseLandmark(&numOfLmk);
525+
HPoint2f denseLandmarkPoints[numOfLmk];
526+
ret = HFGetFaceDenseLandmarkFromFaceToken(multipleFaceData.tokens[0], denseLandmarkPoints, numOfLmk);
527+
REQUIRE(ret == HSUCCEED);
528+
for (size_t i = 0; i < numOfLmk; i++) {
529+
cv::Point2f p(denseLandmarkPoints[i].x, denseLandmarkPoints[i].y);
530+
cv::circle(image, p, 0, (0, 0, 255), 2);
531+
}
532+
533+
cv::imwrite("lml_" + std::to_string(i) + ".jpg", image);
534+
535+
ret = HFReleaseImageStream(imgHandle);
536+
REQUIRE(ret == HSUCCEED);
537+
538+
}
539+
ret = HFReleaseInspireFaceSession(session);
540+
REQUIRE(ret == HSUCCEED);
541+
486542
}

0 commit comments

Comments
 (0)