@@ -27,53 +27,57 @@ def _is_duplicate_file(self, file_key: str) -> bool:
2727 return self ._batch_audit_repository .is_duplicate_file (file_key )
2828
2929 def apply_filter (self , batch_file_created_event : BatchFileCreatedEvent ) -> None :
30- filename = batch_file_created_event ["filename" ]
31- message_id = batch_file_created_event ["message_id" ]
32- supplier = batch_file_created_event ["supplier" ]
33- vaccine_type = batch_file_created_event ["vaccine_type" ]
30+ try :
31+ filename = batch_file_created_event ["filename" ]
32+ message_id = batch_file_created_event ["message_id" ]
33+ supplier = batch_file_created_event ["supplier" ]
34+ vaccine_type = batch_file_created_event ["vaccine_type" ]
3435
35- # debug
36- print ("SAW ----------- apply_filter DEBUG -------------" )
37- print (f"filename: { filename } " )
38- print (f"message_id: { message_id } " )
39- print (f"supplier: { supplier } " )
40- print (f"vaccine_type: { vaccine_type } " )
36+ # debug
37+ print ("SAW ----------- apply_filter DEBUG -------------" )
38+ print (f"filename: { filename } " )
39+ print (f"message_id: { message_id } " )
40+ print (f"supplier: { supplier } " )
41+ print (f"vaccine_type: { vaccine_type } " )
4142
42- print ("apply_filter...checking for duplicate file..." )
43- if self ._is_duplicate_file (filename ):
44- print ("apply_filter...duplicate file found" )
45- # Mark as processed and return without error so next event will be picked up from queue
46- logger .error ("A duplicate file has already been processed. Filename: %s" , filename )
47- self ._batch_audit_repository .update_status (
48- message_id ,
49- f"{ FileStatus .NOT_PROCESSED } - { FileNotProcessedReason .DUPLICATE } "
50- )
51- self ._batch_file_repo .upload_failure_ack (batch_file_created_event )
52- self ._batch_file_repo .move_source_file_to_archive (filename )
53- return
43+ print ("apply_filter...checking for duplicate file..." )
44+ if self ._is_duplicate_file (filename ):
45+ print ("apply_filter...duplicate file found" )
46+ # Mark as processed and return without error so next event will be picked up from queue
47+ logger .error ("A duplicate file has already been processed. Filename: %s" , filename )
48+ self ._batch_audit_repository .update_status (
49+ message_id ,
50+ f"{ FileStatus .NOT_PROCESSED } - { FileNotProcessedReason .DUPLICATE } "
51+ )
52+ self ._batch_file_repo .upload_failure_ack (batch_file_created_event )
53+ self ._batch_file_repo .move_source_file_to_archive (filename )
54+ return
5455
55- print ("apply_filter...check for event already processing for supplier and vacc type..." )
56- if self ._batch_audit_repository .is_event_processing_or_failed_for_supplier_and_vacc_type (
57- supplier ,
58- vaccine_type
59- ):
60- print ("apply_filter...event already processing for supplier and vacc type found" )
61- # Raise error so event is returned to queue and retried again later
62- logger .info ("Batch event already processing for supplier and vacc type. Filename: %s" , filename )
63- raise EventAlreadyProcessingForSupplierAndVaccTypeError (f"Batch event already processing for supplier: "
64- f"{ supplier } and vacc type: { vaccine_type } " )
56+ print ("apply_filter...check for event already processing for supplier and vacc type..." )
57+ if self ._batch_audit_repository .is_event_processing_or_failed_for_supplier_and_vacc_type (
58+ supplier ,
59+ vaccine_type
60+ ):
61+ print ("apply_filter...event already processing for supplier and vacc type found" )
62+ # Raise error so event is returned to queue and retried again later
63+ logger .info ("Batch event already processing for supplier and vacc type. Filename: %s" , filename )
64+ raise EventAlreadyProcessingForSupplierAndVaccTypeError (f"Batch event already processing for supplier: "
65+ f"{ supplier } and vacc type: { vaccine_type } " )
6566
66- print ("apply_filter...forwarding file for processing..." )
67- self ._queue_client .send_message (
68- QueueUrl = QUEUE_URL ,
69- MessageBody = json .dumps (batch_file_created_event ),
70- MessageGroupId = f"{ supplier } _{ vaccine_type } "
71- )
72- print ("apply_filter...updating status to processing..." )
73- self ._batch_audit_repository .update_status (message_id , FileStatus .PROCESSING )
67+ print ("apply_filter...forwarding file for processing..." )
68+ self ._queue_client .send_message (
69+ QueueUrl = QUEUE_URL ,
70+ MessageBody = json .dumps (batch_file_created_event ),
71+ MessageGroupId = f"{ supplier } _{ vaccine_type } "
72+ )
73+ print ("apply_filter...updating status to processing..." )
74+ self ._batch_audit_repository .update_status (message_id , FileStatus .PROCESSING )
7475
75- print ("apply_filter...sending log to firehose..." )
76- successful_log_message = f"File forwarded for processing by ECS. Filename: { filename } "
77- logger .info (successful_log_message )
78- send_log_to_firehose ({** batch_file_created_event , "message" : successful_log_message })
79- print ("apply_filter...done" )
76+ print ("apply_filter...sending log to firehose..." )
77+ successful_log_message = f"File forwarded for processing by ECS. Filename: { filename } "
78+ logger .info (successful_log_message )
79+ send_log_to_firehose ({** batch_file_created_event , "message" : successful_log_message })
80+ print ("apply_filter...done" )
81+ except Exception as ex :
82+ logger .error ("Error in batch processor filter service: %s" , str (ex ))
83+ raise ex
0 commit comments