@@ -2,7 +2,7 @@ import type {
2
2
LivenessResponseStream ,
3
3
SessionInformation as ServerSessionInformation ,
4
4
} from '@aws-sdk/client-rekognitionstreaming' ;
5
- import { nanoid } from 'nanoid ' ;
5
+ import { v4 as uuidv4 } from 'uuid ' ;
6
6
import { createMachine , assign , actions , spawn } from 'xstate' ;
7
7
8
8
import {
@@ -138,7 +138,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
138
138
initial : 'initCamera' ,
139
139
predictableActionArguments : true ,
140
140
context : {
141
- challengeId : nanoid ( ) ,
141
+ challengeId : uuidv4 ( ) ,
142
142
errorMessage : undefined ,
143
143
maxFailedAttempts : 0 , // Set to 0 for now as we are not allowing front end based retries for streaming
144
144
failedAttempts : 0 ,
@@ -196,10 +196,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
196
196
target : 'error' ,
197
197
actions : 'updateErrorStateForConnectionTimeout' ,
198
198
} ,
199
- RUNTIME_ERROR : {
200
- target : 'error' ,
201
- actions : 'updateErrorStateForRuntime' ,
202
- } ,
199
+ RUNTIME_ERROR : { target : 'error' , actions : 'updateErrorStateForRuntime' } ,
203
200
MOBILE_LANDSCAPE_WARNING : {
204
201
target : 'mobileLandscapeWarning' ,
205
202
actions : 'updateErrorStateForServer' ,
@@ -223,9 +220,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
223
220
target : 'waitForDOMAndCameraDetails' ,
224
221
actions : 'updateVideoMediaStream' ,
225
222
} ,
226
- onError : {
227
- target : '#livenessMachine.permissionDenied' ,
228
- } ,
223
+ onError : { target : '#livenessMachine.permissionDenied' } ,
229
224
} ,
230
225
} ,
231
226
waitForDOMAndCameraDetails : { } ,
@@ -262,14 +257,9 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
262
257
start : {
263
258
entry : [ 'initializeFaceDetector' , ( ) => { } ] ,
264
259
always : [
265
- {
266
- target : 'detectFaceBeforeStart' ,
267
- cond : 'shouldSkipStartScreen' ,
268
- } ,
260
+ { target : 'detectFaceBeforeStart' , cond : 'shouldSkipStartScreen' } ,
269
261
] ,
270
- on : {
271
- BEGIN : 'detectFaceBeforeStart' ,
272
- } ,
262
+ on : { BEGIN : 'detectFaceBeforeStart' } ,
273
263
} ,
274
264
detectFaceBeforeStart : {
275
265
invoke : {
@@ -333,10 +323,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
333
323
} ,
334
324
checkFaceDetected : {
335
325
after : {
336
- 0 : {
337
- target : 'cancelOvalDrawingTimeout' ,
338
- cond : 'hasSingleFace' ,
339
- } ,
326
+ 0 : { target : 'cancelOvalDrawingTimeout' , cond : 'hasSingleFace' } ,
340
327
100 : { target : 'ovalDrawing' } ,
341
328
} ,
342
329
} ,
@@ -345,11 +332,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
345
332
'cancelOvalDrawingTimeout' ,
346
333
'sendTimeoutAfterRecordingDelay' ,
347
334
] ,
348
- after : {
349
- 0 : {
350
- target : 'checkRecordingStarted' ,
351
- } ,
352
- } ,
335
+ after : { 0 : { target : 'checkRecordingStarted' } } ,
353
336
} ,
354
337
checkRecordingStarted : {
355
338
entry : ( ) => { } ,
@@ -401,10 +384,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
401
384
target : 'delayBeforeFlash' ,
402
385
cond : 'isFaceMovementAndLightChallenge' ,
403
386
} ,
404
- {
405
- target : 'success' ,
406
- cond : 'isFaceMovementChallenge' ,
407
- } ,
387
+ { target : 'success' , cond : 'isFaceMovementChallenge' } ,
408
388
] ,
409
389
} ,
410
390
delayBeforeFlash : {
@@ -415,21 +395,15 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
415
395
invoke : {
416
396
src : 'flashColors' ,
417
397
onDone : [
418
- {
419
- target : 'success' ,
420
- cond : 'hasFreshnessColorShown' ,
421
- } ,
398
+ { target : 'success' , cond : 'hasFreshnessColorShown' } ,
422
399
{
423
400
target : 'flashFreshnessColors' ,
424
401
actions : 'updateFreshnessDetails' ,
425
402
} ,
426
403
] ,
427
404
} ,
428
405
} ,
429
- success : {
430
- entry : 'stopRecording' ,
431
- type : 'final' ,
432
- } ,
406
+ success : { entry : 'stopRecording' , type : 'final' } ,
433
407
} ,
434
408
onDone : 'uploading' ,
435
409
} ,
@@ -469,10 +443,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
469
443
retryableTimeout : {
470
444
entry : 'updateFailedAttempts' ,
471
445
always : [
472
- {
473
- target : 'timeout' ,
474
- cond : 'shouldTimeoutOnFailedAttempts' ,
475
- } ,
446
+ { target : 'timeout' , cond : 'shouldTimeoutOnFailedAttempts' } ,
476
447
{ target : 'start' } ,
477
448
] ,
478
449
} ,
@@ -723,35 +694,23 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
723
694
sendTimeoutAfterOvalDrawingDelay : actions . send (
724
695
{
725
696
type : 'RUNTIME_ERROR' ,
726
- data : {
727
- message : 'Client failed to draw oval.' ,
728
- } ,
697
+ data : { message : 'Client failed to draw oval.' } ,
729
698
} ,
730
- {
731
- delay : 5000 ,
732
- id : 'ovalDrawingTimeout' ,
733
- }
699
+ { delay : 5000 , id : 'ovalDrawingTimeout' }
734
700
) ,
735
701
cancelOvalDrawingTimeout : actions . cancel ( 'ovalDrawingTimeout' ) ,
736
702
sendTimeoutAfterRecordingDelay : actions . send (
737
703
{
738
704
type : 'RUNTIME_ERROR' ,
739
- data : {
740
- message : 'Client failed to start recording.' ,
741
- } ,
705
+ data : { message : 'Client failed to start recording.' } ,
742
706
} ,
743
- {
744
- delay : 5000 ,
745
- id : 'recordingTimeout' ,
746
- }
707
+ { delay : 5000 , id : 'recordingTimeout' }
747
708
) ,
748
709
cancelRecordingTimeout : actions . cancel ( 'recordingTimeout' ) ,
749
710
sendTimeoutAfterOvalMatchDelay : actions . send (
750
711
{
751
712
type : 'TIMEOUT' ,
752
- data : {
753
- message : 'Client timed out waiting for face to match oval.' ,
754
- } ,
713
+ data : { message : 'Client timed out waiting for face to match oval.' } ,
755
714
} ,
756
715
{
757
716
delay : ( context ) => {
@@ -852,15 +811,13 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
852
811
videoEl ! . pause ( ) ;
853
812
} ,
854
813
resetContext : assign ( {
855
- challengeId : nanoid ( ) ,
814
+ challengeId : uuidv4 ( ) ,
856
815
maxFailedAttempts : 0 , // Set to 0 for now as we are not allowing front end based retries for streaming
857
816
failedAttempts : 0 ,
858
817
componentProps : ( context ) => context . componentProps ,
859
818
parsedSessionInformation : ( _ ) => undefined ,
860
819
videoAssociatedParams : ( _ ) => {
861
- return {
862
- videoConstraints : STATIC_VIDEO_CONSTRAINTS ,
863
- } ;
820
+ return { videoConstraints : STATIC_VIDEO_CONSTRAINTS } ;
864
821
} ,
865
822
ovalAssociatedParams : ( _ ) => undefined ,
866
823
errorState : ( _ ) => undefined ,
@@ -939,9 +896,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
939
896
940
897
// Get initial stream to enumerate devices with non-empty labels
941
898
const initialStream = await navigator . mediaDevices . getUserMedia ( {
942
- video : {
943
- ...videoConstraints ,
944
- } ,
899
+ video : { ...videoConstraints } ,
945
900
audio : false ,
946
901
} ) ;
947
902
const devices = await navigator . mediaDevices . enumerateDevices ( ) ;
@@ -979,10 +934,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
979
934
let realVideoDeviceStream = initialStream ;
980
935
if ( ! isInitialStreamFromRealDevice ) {
981
936
realVideoDeviceStream = await navigator . mediaDevices . getUserMedia ( {
982
- video : {
983
- ...videoConstraints ,
984
- deviceId : { exact : deviceId } ,
985
- } ,
937
+ video : { ...videoConstraints , deviceId : { exact : deviceId } } ,
986
938
audio : false ,
987
939
} ) ;
988
940
}
@@ -1168,12 +1120,7 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
1168
1120
videoEl : videoEl ! ,
1169
1121
} ) ;
1170
1122
1171
- return {
1172
- faceMatchState,
1173
- ovalDetails,
1174
- scaleFactor,
1175
- initialFace,
1176
- } ;
1123
+ return { faceMatchState, ovalDetails, scaleFactor, initialFace } ;
1177
1124
} ,
1178
1125
async detectFaceAndMatchOval ( context ) {
1179
1126
const { parsedSessionInformation } = context ;
0 commit comments