1
+ import json
2
+ import make_vcsp_2018
3
+
4
+ def lambda_handler (event , context ):
5
+ buf = 'Lambda Content Library Handler. '
6
+ try :
7
+ buf = buf + "Event triggered by file: " + event ["Records" ][0 ]["s3" ]["object" ]["key" ]
8
+ except :
9
+ print ("No event key found." )
10
+ buf = buf + " No S3 event key found."
11
+ return {
12
+ 'statusCode' : 200 ,
13
+ 'body' : buf
14
+ }
15
+
16
+ # If we don't filter out .json files, this script keeps firing in a loop.
17
+ # We don't want the script to fire again when the script itself writes JSON files to the bucket.
18
+ # You could also solve this problem by using a suffix filter in the S3 trigger configuration,
19
+ # but you can only have one suffix per trigger. You would have to create a trigger for every
20
+ # possible filetype that might get uploaded to the bucket.
21
+ filename = (event ["Records" ][0 ]["s3" ]["object" ]["key" ]).lower ()
22
+ if filename [- 5 :] == ".json" :
23
+ filter_status = "filtered"
24
+ else :
25
+ # Example usage: make_vcsp_2018.make_vcsp_s3('my-library','library-bucket/lib1',False,'us-east-2')
26
+ # Argument description:
27
+ # my-library - name of the library,
28
+ # library-bucket/lib1 - S3 bucket name and folder name
29
+ # false - Flag configured not to skip SSL validation
30
+ # us-east-2 - default region
31
+ # We pass the default region directly to the boto library so we don't have to configure environment variables in Lambda
32
+ make_vcsp_2018 .make_vcsp_s3 ('REPLACE-ME' ,'REPLACE-ME' ,False ,'REPLACE-ME' )
33
+ filter_status = "unfiltered"
34
+
35
+ return {
36
+ 'statusCode' : 200 ,
37
+ 'body' : buf ,
38
+ 'filterStatus' : filter_status
39
+ }
0 commit comments