Skip to content

Commit 5fe7d91

Browse files
committed
handler decorator tests
1 parent bb4cbe2 commit 5fe7d91

File tree

5 files changed

+122
-75
lines changed

5 files changed

+122
-75
lines changed

redis_sync/src/record_processor.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@ def process_record(record: S3EventRecord) -> dict:
1616
file_key = record.get_object_key()
1717

1818
base_log_data = {
19-
"bucket_name": bucket_name,
2019
"file_key": file_key
2120
}
2221

redis_sync/src/redis_sync.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,16 +26,20 @@ def handler(event, _):
2626
return {"status": "success", "message": "No records found in event"}
2727
else:
2828
error_count = 0
29+
file_keys = []
2930
for record in s3_event.get_s3_records():
3031
record_result = process_record(record)
32+
file_keys.append(record_result["file_key"])
3133
if record_result["status"] == "error":
3234
error_count += 1
3335
if error_count > 0:
3436
logger.error("Processed %d records with %d errors", record_count, error_count)
35-
return {"status": "error", "message": f"Processed {record_count} records with {error_count} errors"}
37+
return {"status": "error", "message": f"Processed {record_count} records with {error_count} errors",
38+
"file_keys": file_keys}
3639
else:
3740
logger.info("Successfully processed all %d records", record_count)
38-
return {"status": "success", "message": f"Successfully processed {record_count} records"}
41+
return {"status": "success", "message": f"Successfully processed {record_count} records",
42+
"file_keys": file_keys}
3943
else:
4044
logger.info("No records found in event")
4145
return {"status": "success", "message": "No records found in event"}

redis_sync/tests/test_handler.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,12 @@ def test_handler_success(self):
4646
mock_event = {'Records': [self.s3_vaccine]}
4747
self.mock_get_s3_records.return_value = [self.s3_vaccine]
4848
with patch("redis_sync.process_record") as mock_record_processor:
49-
mock_record_processor.return_value = {'status': 'success', 'message': 'Processed successfully'}
49+
mock_record_processor.return_value = {'status': 'success', 'message': 'Processed successfully',
50+
'file_key': 'test-key'}
5051
result = redis_sync.handler(mock_event, None)
51-
self.assertEqual(result, {'status': 'success', 'message': 'Successfully processed 1 records'})
52+
self.assertEqual(result["status"], "success")
53+
self.assertEqual(result["message"], "Successfully processed 1 records")
54+
self.assertEqual(result["file_keys"], ['test-key'])
5255

5356
def test_handler_failure(self):
5457
with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)):
@@ -57,7 +60,7 @@ def test_handler_failure(self):
5760
mock_event = {'Records': [self.s3_vaccine]}
5861
with patch("redis_sync.process_record") as mock_record_processor:
5962
self.mock_get_s3_records.return_value = [self.s3_vaccine]
60-
mock_record_processor.side_effect = Exception("Processing error")
63+
mock_record_processor.side_effect = Exception("Processing error 1")
6164

6265
result = redis_sync.handler(mock_event, None)
6366

@@ -77,7 +80,7 @@ def test_handler_exception(self):
7780
mock_event = {'Records': [self.s3_vaccine]}
7881
self.mock_get_s3_records.return_value = [self.s3_vaccine]
7982
with patch("redis_sync.process_record") as mock_record_processor:
80-
mock_record_processor.side_effect = Exception("Processing error")
83+
mock_record_processor.side_effect = Exception("Processing error 2")
8184
result = redis_sync.handler(mock_event, None)
8285
self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'})
8386

@@ -99,9 +102,15 @@ def test_handler_multi_record(self):
99102
# ]
100103
self.mock_get_s3_records.return_value = [self.s3_vaccine, self.s3_supplier]
101104
with patch("redis_sync.process_record") as mock_record_processor:
102-
mock_record_processor.return_value = {'status': 'success', 'message': 'Processed successfully'}
105+
mock_record_processor.side_effect = [{'status': 'success', 'message': 'Processed successfully',
106+
'file_key': 'test-key1'},
107+
{'status': 'success', 'message': 'Processed successfully',
108+
'file_key': 'test-key2'}]
103109
result = redis_sync.handler(mock_event, None)
104-
self.assertEqual(result, {'status': 'success', 'message': 'Successfully processed 2 records'})
110+
self.assertEqual(result['status'], 'success')
111+
self.assertEqual(result['message'], 'Successfully processed 2 records')
112+
self.assertEqual(result['file_keys'][0], 'test-key1')
113+
self.assertEqual(result['file_keys'][1], 'test-key2')
105114

106115
def test_handler_read_event(self):
107116
with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)):

redis_sync/tests/test_handler_with_decorator.py

Lines changed: 101 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,11 @@
88

99

1010
class TestHandlerDecorator(unittest.TestCase):
11+
""" Unit tests for the handler decorator in redis_sync.py
12+
these will check what is sent to firehose and the logging
13+
Note: test_handler.py will check the actual business logic of the handler
14+
the decorator is used to log the function execution and send logs to firehose
15+
"""
1116
s3_vaccine = {
1217
's3': {
1318
'bucket': {'name': 'test-bucket1'},
@@ -39,87 +44,91 @@ def setUp(self):
3944
def tearDown(self):
4045
patch.stopall()
4146

42-
def test_handler_decorator_success(self): # , mock_firehose_client):
47+
def test_handler_decorator_success(self):
4348
mock_event = {'Records': [self.s3_vaccine]}
4449
self.mock_get_s3_records.return_value = [self.s3_vaccine]
50+
bucket_name = self.s3_vaccine['s3']['bucket']['name']
51+
file_key = self.s3_vaccine['s3']['object']['key']
52+
self.mock_record_processor.return_value = {'status': 'success',
53+
'message': 'Successfully processed 1 records',
54+
'bucket_name': bucket_name,
55+
'file_key': file_key}
4556

46-
result = handler(mock_event, None)
57+
handler(mock_event, None)
4758

48-
self.assertEqual(result, {'status': 'success', 'message': 'Successfully processed 1 records'})
49-
50-
self.mock_firehose_client.put_record.assert_called()
51-
52-
# Get the actual call arguments
59+
# Get put_record arguments
5360
args, kwargs = self.mock_firehose_client.put_record.call_args
5461
record = kwargs.get("Record") or args[1]
5562
data_bytes = record["Data"]
5663
log_json = data_bytes.decode("utf-8")
5764
log_dict = json.loads(log_json)
5865

59-
# Now check for the expected content
66+
# check expected content
6067
event = log_dict["event"]
6168
self.assertIn("function_name", event)
6269
self.assertEqual(event["function_name"], "redis_sync_handler")
6370
self.assertEqual(event["status"], "success")
6471
self.assertEqual(event["message"], "Successfully processed 1 records")
72+
self.assertEqual(event["file_keys"], [file_key])
6573

66-
def test_handler_failure1(self):
74+
def test_handler_decorator_failure(self):
6775
mock_event = {'Records': [self.s3_vaccine]}
6876

6977
self.mock_get_s3_records.return_value = [self.s3_vaccine]
70-
self.mock_record_processor.side_effect = Exception("Processing error")
71-
72-
result = handler(mock_event, None)
73-
74-
self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'})
75-
self.mock_firehose_client.put_record.assert_called()
76-
77-
# Get the actual call arguments
78-
args, kwargs = self.mock_firehose_client.put_record.call_args
79-
record = kwargs.get("Record") or args[1]
80-
data_bytes = record["Data"]
81-
log_json = data_bytes.decode("utf-8")
82-
log_dict = json.loads(log_json)
83-
# Now check for the expected content
84-
event = log_dict["event"]
85-
self.assertIn("function_name", event)
86-
self.assertEqual(event["function_name"], "redis_sync_handler")
87-
self.assertEqual(event["status"], "error")
88-
self.assertEqual(event["message"], "Error processing S3 event")
78+
bucket_name = self.s3_vaccine['s3']['bucket']['name']
79+
file_key = self.s3_vaccine['s3']['object']['key']
80+
with patch("redis_sync.process_record") as mock_record_processor:
81+
mock_record_processor.return_value = {'status': 'error',
82+
'message': 'my-error',
83+
'bucket_name': bucket_name,
84+
'file_key': file_key}
85+
86+
handler(mock_event, None)
87+
88+
# Get put_record arguments
89+
args, kwargs = self.mock_firehose_client.put_record.call_args
90+
record = kwargs.get("Record") or args[1]
91+
data_bytes = record["Data"]
92+
log_json = data_bytes.decode("utf-8")
93+
log_dict = json.loads(log_json)
94+
# check expected content
95+
event = log_dict["event"]
96+
self.assertIn("function_name", event)
97+
self.assertEqual(event["function_name"], "redis_sync_handler")
98+
self.assertEqual(event["status"], "error")
99+
self.assertEqual(event["message"], 'Processed 1 records with 1 errors')
100+
self.assertEqual(event["file_keys"], [file_key])
89101

90102
def test_handler_decorator_no_records1(self):
91103
mock_event = {'Records': []}
92104

93105
self.mock_get_s3_records.return_value = []
94106
self.mock_record_processor.return_value = {'status': 'success', 'message': 'No records found in event'}
95107

96-
result = handler(mock_event, None)
108+
handler(mock_event, None)
97109

98-
self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'})
99-
self.mock_firehose_client.put_record.assert_called()
100-
101-
# Get the actual call arguments
110+
# Get put_record arguments
102111
args, kwargs = self.mock_firehose_client.put_record.call_args
103112
record = kwargs.get("Record") or args[1]
104113
data_bytes = record["Data"]
105114
log_json = data_bytes.decode("utf-8")
106115
log_dict = json.loads(log_json)
107-
# Now check for the expected content
116+
# check expected content
108117
event = log_dict["event"]
109118
self.assertIn("function_name", event)
110119
self.assertEqual(event["function_name"], "redis_sync_handler")
111120
self.assertEqual(event["status"], "success")
121+
# filename is empty since no records were processed
122+
self.assertEqual(event["message"], "No records found in event")
123+
self.assertNotIn("file_key", event)
112124

113-
def test_handler_exception1(self):
125+
def test_handler_decorator_exception(self):
114126
mock_event = {'Records': [self.s3_vaccine]}
115-
self.mock_get_s3_records.return_value = [self.s3_vaccine]
116-
self.mock_record_processor.side_effect = Exception("Processing error")
127+
self.mock_get_s3_records.side_effect = Exception("Test exception")
117128

118-
result = handler(mock_event, None)
129+
handler(mock_event, None)
119130

120-
self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'})
121-
self.mock_firehose_client.put_record.assert_called()
122-
# Get the actual call arguments
131+
# check put_record arguments
123132
args, kwargs = self.mock_firehose_client.put_record.call_args
124133
record = kwargs.get("Record") or args[1]
125134
data_bytes = record["Data"]
@@ -134,43 +143,71 @@ def test_handler_exception1(self):
134143
def test_handler_with_empty_event(self):
135144
self.mock_get_s3_records.return_value = []
136145

137-
result = handler({}, None)
138-
139-
self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'})
140-
141-
def test_handler_multi_record(self):
142-
mock_event = {'Records': [self.s3_vaccine, self.s3_supplier]}
143-
144-
self.mock_get_s3_records.return_value = [
145-
S3EventRecord(self.s3_vaccine),
146-
S3EventRecord(self.s3_supplier)
147-
]
148-
self.mock_record_processor.return_value = {'status': 'success', 'message': 'Processed successfully'}
146+
handler({}, None)
149147

150-
result = handler(mock_event, None)
151-
152-
self.assertEqual(result, {'status': 'success', 'message': 'Successfully processed 2 records'})
153-
self.mock_firehose_client.put_record.assert_called()
154-
# Get the actual call arguments
148+
# get put_record arguments
155149
args, kwargs = self.mock_firehose_client.put_record.call_args
156150
record = kwargs.get("Record") or args[1]
157151
data_bytes = record["Data"]
158152
log_json = data_bytes.decode("utf-8")
159153
log_dict = json.loads(log_json)
160-
# Now check for the expected content
154+
# check expected content
161155
event = log_dict["event"]
162156
self.assertIn("function_name", event)
163157
self.assertEqual(event["function_name"], "redis_sync_handler")
164158
self.assertEqual(event["status"], "success")
159+
self.assertEqual(event["message"], "No records found in event")
160+
161+
def test_handler_multi_record(self):
162+
mock_event = {'Records': [self.s3_vaccine, self.s3_supplier]}
163+
164+
with patch.object(self.mock_get_s3_records,
165+
"return_value",
166+
[S3EventRecord(self.s3_vaccine), S3EventRecord(self.s3_supplier)]):
167+
# repatch mock_record_processor
168+
with patch("redis_sync.process_record") as mock_record_processor:
169+
# Mock the return value for each record
170+
mock_record_processor.side_effect = [
171+
{'status': 'success', 'message': 'Processed successfully',
172+
'file_key': RedisCacheKey.DISEASE_MAPPING_FILE_KEY},
173+
{'status': 'success', 'message': 'Processed successfully',
174+
'file_key': RedisCacheKey.PERMISSIONS_CONFIG_FILE_KEY}
175+
]
176+
177+
handler(mock_event, None)
178+
179+
# Get put_record arguments
180+
args, kwargs = self.mock_firehose_client.put_record.call_args
181+
record = kwargs.get("Record") or args[1]
182+
data_bytes = record["Data"]
183+
log_json = data_bytes.decode("utf-8")
184+
log_dict = json.loads(log_json)
185+
# check expected content
186+
event = log_dict["event"]
187+
self.assertIn("function_name", event)
188+
self.assertEqual(event["function_name"], "redis_sync_handler")
189+
self.assertEqual(event["status"], "success")
165190

166191
# test to check that event_read is called when "read" key is passed in the event
167192
def test_handler_read_event(self):
168193
mock_event = {'read': 'myhash'}
169-
mock_read_event_response = {'field1': 'value1'}
194+
return_key = 'field1'
195+
return_value = 'value1'
196+
mock_read_event_response = {return_key: return_value}
170197

171198
with patch('redis_sync.read_event') as mock_read_event:
172199
mock_read_event.return_value = mock_read_event_response
173-
result = handler(mock_event, None)
174-
175-
mock_read_event.assert_called_once()
176-
self.assertEqual(result, mock_read_event_response)
200+
handler(mock_event, None)
201+
202+
# get put_record arguments
203+
args, kwargs = self.mock_firehose_client.put_record.call_args
204+
record = kwargs.get("Record") or args[1]
205+
data_bytes = record["Data"]
206+
log_json = data_bytes.decode("utf-8")
207+
log_dict = json.loads(log_json)
208+
# check expected content
209+
event = log_dict["event"]
210+
self.assertIn("function_name", event)
211+
self.assertEqual(event["function_name"], "redis_sync_handler")
212+
actual_return_value = event.get(return_key)
213+
self.assertEqual(actual_return_value, return_value)

redis_sync/tests/test_record_processor.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@ def test_record_processor_success(self):
4040
result = process_record(S3EventRecord(self.s3_vaccine))
4141

4242
self.assertEqual(result["status"], "success")
43-
self.assertEqual(result["bucket_name"], "test-bucket1")
4443
self.assertEqual(result["file_key"], RedisCacheKey.DISEASE_MAPPING_FILE_KEY)
4544

4645
def test_record_processor_failure(self):
@@ -50,7 +49,6 @@ def test_record_processor_failure(self):
5049
result = process_record(S3EventRecord(self.s3_vaccine))
5150

5251
self.assertEqual(result["status"], "error")
53-
self.assertEqual(result["bucket_name"], "test-bucket1")
5452
self.assertEqual(result["file_key"], RedisCacheKey.DISEASE_MAPPING_FILE_KEY)
5553

5654
def test_record_processor_exception(self):

0 commit comments

Comments
 (0)