|
18 | 18 |
|
19 | 19 | from shared.sfn_middleware import PersonalizeResource |
20 | 20 |
|
| 21 | +RESOURCE = "batchInferenceJob" |
| 22 | +STATUS = "batchInferenceJob.status" |
| 23 | +CONFIG = { |
| 24 | + "jobName": { |
| 25 | + "source": "event", |
| 26 | + "path": "serviceConfig.jobName", |
| 27 | + }, |
| 28 | + "solutionVersionArn": { |
| 29 | + "source": "event", |
| 30 | + "path": "serviceConfig.solutionVersionArn", |
| 31 | + }, |
| 32 | + "filterArn": { |
| 33 | + "source": "event", |
| 34 | + "path": "serviceConfig.filterArn", |
| 35 | + "default": "omit", |
| 36 | + }, |
| 37 | + "numResults": { |
| 38 | + "source": "event", |
| 39 | + "path": "serviceConfig.numResults", |
| 40 | + "default": "omit", |
| 41 | + }, |
| 42 | + "jobInput": { |
| 43 | + "source": "event", |
| 44 | + "path": "serviceConfig.jobInput", |
| 45 | + }, |
| 46 | + "jobOutput": {"source": "event", "path": "serviceConfig.jobOutput"}, |
| 47 | + "roleArn": {"source": "environment", "path": "ROLE_ARN"}, |
| 48 | + "batchInferenceJobConfig": { |
| 49 | + "source": "event", |
| 50 | + "path": "serviceConfig.batchInferenceJobConfig", |
| 51 | + "default": "omit", |
| 52 | + }, |
| 53 | + "maxAge": { |
| 54 | + "source": "event", |
| 55 | + "path": "workflowConfig.maxAge", |
| 56 | + "default": "omit", |
| 57 | + "as": "seconds", |
| 58 | + }, |
| 59 | + "timeStarted": { |
| 60 | + "source": "event", |
| 61 | + "path": "workflowConfig.timeStarted", |
| 62 | + "default": "omit", |
| 63 | + "as": "iso8601", |
| 64 | + }, |
| 65 | +} |
| 66 | + |
21 | 67 | logger = Logger() |
22 | 68 | tracer = Tracer() |
23 | 69 | metrics = Metrics() |
|
26 | 72 | @metrics.log_metrics |
27 | 73 | @tracer.capture_lambda_handler |
28 | 74 | @PersonalizeResource( |
29 | | - resource="batchInferenceJob", |
30 | | - status="batchInferenceJob.status", |
31 | | - config={ |
32 | | - "jobName": { |
33 | | - "source": "event", |
34 | | - "path": "serviceConfig.jobName", |
35 | | - }, |
36 | | - "solutionVersionArn": { |
37 | | - "source": "event", |
38 | | - "path": "serviceConfig.solutionVersionArn", |
39 | | - }, |
40 | | - "filterArn": { |
41 | | - "source": "event", |
42 | | - "path": "serviceConfig.filterArn", |
43 | | - "default": "omit", |
44 | | - }, |
45 | | - "numResults": { |
46 | | - "source": "event", |
47 | | - "path": "serviceConfig.numResults", |
48 | | - "default": "omit", |
49 | | - }, |
50 | | - "jobInput": { |
51 | | - "source": "event", |
52 | | - "path": "serviceConfig.jobInput", |
53 | | - }, |
54 | | - "jobOutput": {"source": "event", "path": "serviceConfig.jobOutput"}, |
55 | | - "roleArn": {"source": "environment", "path": "ROLE_ARN"}, |
56 | | - "batchInferenceJobConfig": { |
57 | | - "source": "event", |
58 | | - "path": "serviceConfig.batchInferenceJobConfig", |
59 | | - "default": "omit", |
60 | | - }, |
61 | | - "maxAge": { |
62 | | - "source": "event", |
63 | | - "path": "workflowConfig.maxAge", |
64 | | - "default": "omit", |
65 | | - "as": "seconds", |
66 | | - }, |
67 | | - "timeStarted": { |
68 | | - "source": "event", |
69 | | - "path": "workflowConfig.timeStarted", |
70 | | - "default": "omit", |
71 | | - "as": "iso8601", |
72 | | - }, |
73 | | - }, |
| 75 | + resource=RESOURCE, |
| 76 | + status=STATUS, |
| 77 | + config=CONFIG, |
74 | 78 | ) |
75 | 79 | def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: |
76 | 80 | """Create a batch inference job in Amazon Personalize based on the configuration in `event` |
|
0 commit comments