55import json
66from io import StringIO
77from contextlib import ExitStack
8- from moto import mock_s3
8+ from moto import mock_s3 , mock_dynamodb
99from boto3 import client as boto3_client
1010
1111from tests .utils .values_for_ack_backend_tests import (
1919from tests .utils .utils_for_ack_backend_tests import generate_event
2020
2121with patch .dict ("os.environ" , MOCK_ENVIRONMENT_DICT ):
22+ from clients import REGION_NAME
2223 from ack_processor import lambda_handler
2324
2425s3_client = boto3_client ("s3" )
26+ dynamodb_client = boto3_client ("dynamodb" , region_name = REGION_NAME )
2527
2628
27- @patch .dict ("os.environ" , MOCK_ENVIRONMENT_DICT )
2829@mock_s3
30+ @mock_dynamodb
31+ @patch .dict ("os.environ" , MOCK_ENVIRONMENT_DICT )
2932class TestLoggingDecorators (unittest .TestCase ):
3033 """Tests for the ack lambda logging decorators"""
3134
3235 def setUp (self ):
33- GenericSetUp (s3_client )
36+ GenericSetUp (s3_client , None , dynamodb_client )
3437
3538 # MOCK SOURCE FILE WITH 100 ROWS TO SIMULATE THE SCENARIO WHERE THE ACK FILE IS NO FULL.
3639 # TODO: Test all other scenarios.
@@ -59,8 +62,8 @@ def run(self, result=None):
5962 # Any uses of the logger in other files will confound the tests and should be patched here.
6063 patch ("update_ack_file.logger" ),
6164 # Time is incremented by 1.0 for each call to time.time for ease of testing.
62- # Range is set to a large number (100 ) due to many calls being made to time.time for some tests.
63- patch ("logging_decorators.time.time" , side_effect = [0.0 + i for i in range (100 )]),
65+ # Range is set to a large number (300 ) due to many calls being made to time.time for some tests.
66+ patch ("logging_decorators.time.time" , side_effect = [0.0 + i for i in range (300 )]),
6467 ]
6568
6669 # Set up the ExitStack. Note that patches need to be explicitly started so that they will be applied even when
@@ -85,11 +88,16 @@ def extract_all_call_args_for_logger_error(self, mock_logger):
8588 """Extracts all arguments for logger.error."""
8689 return [args [0 ] for args , _ in mock_logger .error .call_args_list ]
8790
88- def expected_lambda_handler_logs (self , success : bool , number_of_rows , diagnostics = None ):
91+ def expected_lambda_handler_logs (self , success : bool , number_of_rows , ingestion_complete = False , diagnostics = None ):
8992 """Returns the expected logs for the lambda handler function."""
9093 # Mocking of timings is such that the time taken is 2 seconds for each row,
91- # plus 2 seconds for the handler if it succeeds (i.e. it calls update_ack_file) or 1 second if it doesn't
92- time_taken = f"{ number_of_rows * 2 + 2 } .0s" if success else f"{ number_of_rows * 2 + 1 } .0s"
94+ # plus 2 seconds for the handler if it succeeds (i.e. it calls update_ack_file) or 1 second if it doesn't;
95+ # plus an extra second if ingestion is complete
96+ if success :
97+ time_taken = f"{ number_of_rows * 2 + 3 } .0s" if ingestion_complete else f"{ number_of_rows * 2 + 2 } .0s"
98+ else :
99+ time_taken = f"{ number_of_rows * 2 + 1 } .0s"
100+
93101 base_log = (
94102 ValidValues .lambda_handler_success_expected_log
95103 if success
@@ -139,7 +147,7 @@ def test_splunk_logging_missing_data(self):
139147 expected_first_logger_info_data = {** InvalidValues .Logging_with_no_values }
140148
141149 expected_first_logger_error_data = self .expected_lambda_handler_logs (
142- success = False , number_of_rows = 1 , diagnostics = "'NoneType' object has no attribute 'replace'"
150+ success = False , number_of_rows = 1 , ingestion_complete = False , diagnostics = "'NoneType' object has no attribute 'replace'"
143151 )
144152
145153 first_logger_info_call_args = json .loads (self .extract_all_call_args_for_logger_info (mock_logger )[0 ])
@@ -305,6 +313,90 @@ def test_splunk_logging_multiple_with_diagnostics(
305313 ]
306314 )
307315
316+ def test_splunk_update_ack_file_not_logged (self ):
317+ self .maxDiff = None
318+ """Tests that update_ack_file is not logged if we have sent acks for less than the whole file"""
319+ # send 98 messages
320+ messages = []
321+ for i in range (1 , 99 ):
322+ message_value = "test" + str (i )
323+ messages .append ({"row_id" : message_value })
324+
325+ with (
326+ patch ("logging_decorators.send_log_to_firehose" ) as mock_send_log_to_firehose ,
327+ patch ("logging_decorators.logger" ) as mock_logger ,
328+ patch ("update_ack_file.handle_ingestion_complete" ) as mock_handle_ingestion_complete , # noqa: E999
329+ ):
330+ result = lambda_handler (generate_event (messages ), context = {})
331+
332+ self .assertEqual (result , EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS )
333+
334+ expected_secondlast_logger_info_data = {
335+ ** ValidValues .mock_message_expected_log_value ,
336+ "message_id" : "test98" ,
337+ }
338+ expected_last_logger_info_data = self .expected_lambda_handler_logs (success = True , number_of_rows = 98 )
339+
340+ all_logger_info_call_args = self .extract_all_call_args_for_logger_info (mock_logger )
341+ secondlast_logger_info_call_args = json .loads (all_logger_info_call_args [97 ])
342+ last_logger_info_call_args = json .loads (all_logger_info_call_args [98 ])
343+
344+ self .assertEqual (secondlast_logger_info_call_args , expected_secondlast_logger_info_data )
345+ self .assertEqual (last_logger_info_call_args , expected_last_logger_info_data )
346+
347+ mock_send_log_to_firehose .assert_has_calls (
348+ [
349+ call (secondlast_logger_info_call_args ),
350+ call (last_logger_info_call_args ),
351+ ]
352+ )
353+ mock_handle_ingestion_complete .assert_not_called ()
354+
355+ def test_splunk_update_ack_file_logged (self ):
356+ """Tests that update_ack_file is logged if we have sent acks for the whole file"""
357+ # send 99 messages
358+ messages = []
359+ for i in range (1 , 100 ):
360+ message_value = "test" + str (i )
361+ messages .append ({"row_id" : message_value })
362+
363+ with (
364+ patch ("logging_decorators.send_log_to_firehose" ) as mock_send_log_to_firehose ,
365+ patch ("logging_decorators.logger" ) as mock_logger ,
366+ patch ("update_ack_file.handle_ingestion_complete" ) as mock_handle_ingestion_complete , # noqa: E999
367+ ):
368+ result = lambda_handler (generate_event (messages ), context = {})
369+
370+ self .assertEqual (result , EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS )
371+
372+ expected_thirdlast_logger_info_data = {
373+ ** ValidValues .mock_message_expected_log_value ,
374+ "message_id" : "test99" ,
375+ }
376+ expected_secondlast_logger_info_data = {
377+ ** ValidValues .upload_ack_file_expected_log ,
378+ "message_id" : "test1" ,
379+ "time_taken" : "1.0s"
380+ }
381+ expected_last_logger_info_data = self .expected_lambda_handler_logs (success = True , number_of_rows = 99 , ingestion_complete = True )
382+
383+ all_logger_info_call_args = self .extract_all_call_args_for_logger_info (mock_logger )
384+ thirdlast_logger_info_call_args = json .loads (all_logger_info_call_args [98 ])
385+ secondlast_logger_info_call_args = json .loads (all_logger_info_call_args [99 ])
386+ last_logger_info_call_args = json .loads (all_logger_info_call_args [100 ])
387+ self .assertEqual (thirdlast_logger_info_call_args , expected_thirdlast_logger_info_data )
388+ self .assertEqual (secondlast_logger_info_call_args , expected_secondlast_logger_info_data )
389+ self .assertEqual (last_logger_info_call_args , expected_last_logger_info_data )
390+
391+ mock_send_log_to_firehose .assert_has_calls (
392+ [
393+ call (thirdlast_logger_info_call_args ),
394+ call (secondlast_logger_info_call_args ),
395+ call (last_logger_info_call_args ),
396+ ]
397+ )
398+ mock_handle_ingestion_complete .assert_called ()
399+
308400
309401if __name__ == "__main__" :
310402 unittest .main ()
0 commit comments