|
1 | 1 | ''' unit tests for redis_sync.py ''' |
2 | 2 | import unittest |
| 3 | +import importlib |
3 | 4 | from unittest.mock import patch |
4 | | -from redis_sync import handler |
5 | | -from s3_event import S3EventRecord |
6 | 5 | from constants import RedisCacheKey |
| 6 | +import redis_sync |
7 | 7 |
|
8 | 8 |
|
9 | 9 | class TestHandler(unittest.TestCase): |
@@ -40,74 +40,84 @@ def tearDown(self): |
40 | 40 | self.logger_exception_patcher.stop() |
41 | 41 |
|
42 | 42 | def test_handler_success(self): |
43 | | - mock_event = {'Records': [self.s3_vaccine]} |
44 | | - self.mock_get_s3_records.return_value = [self.s3_vaccine] |
45 | | - |
46 | | - result = handler(mock_event, None) |
47 | | - |
48 | | - self.assertTrue(result) |
49 | | - self.mock_logger_info.assert_called_with("Successfully processed all %d records", 1) |
| 43 | + with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): |
| 44 | + importlib.reload(redis_sync) |
| 45 | + mock_event = {'Records': [self.s3_vaccine]} |
| 46 | + self.mock_get_s3_records.return_value = [self.s3_vaccine] |
| 47 | + with patch("redis_sync.process_record") as mock_record_processor: |
| 48 | + mock_record_processor.return_value = {'status': 'success', 'message': 'Processed successfully', |
| 49 | + 'file_key': 'test-key'} |
| 50 | + result = redis_sync.handler(mock_event, None) |
| 51 | + self.assertEqual(result["status"], "success") |
| 52 | + self.assertEqual(result["message"], "Successfully processed 1 records") |
| 53 | + self.assertEqual(result["file_keys"], ['test-key']) |
50 | 54 |
|
51 | 55 | def test_handler_failure(self): |
52 | | - mock_event = {'Records': [self.s3_vaccine]} |
| 56 | + with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): |
| 57 | + importlib.reload(redis_sync) |
53 | 58 |
|
54 | | - self.mock_get_s3_records.return_value = [self.s3_vaccine] |
55 | | - self.mock_record_processor.side_effect = Exception("Processing error") |
| 59 | + mock_event = {'Records': [self.s3_vaccine]} |
| 60 | + with patch("redis_sync.process_record") as mock_record_processor: |
| 61 | + self.mock_get_s3_records.return_value = [self.s3_vaccine] |
| 62 | + mock_record_processor.side_effect = Exception("Processing error 1") |
56 | 63 |
|
57 | | - result = handler(mock_event, None) |
| 64 | + result = redis_sync.handler(mock_event, None) |
58 | 65 |
|
59 | | - self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'}) |
60 | | - self.mock_logger_info.assert_called_with("Processing S3 event with %d records", 1) |
| 66 | + self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'}) |
61 | 67 |
|
62 | 68 | def test_handler_no_records(self): |
63 | | - mock_event = {'Records': []} |
64 | | - |
65 | | - self.mock_get_s3_records.return_value = [] |
66 | | - |
67 | | - result = handler(mock_event, None) |
68 | | - |
69 | | - self.assertTrue(result) |
70 | | - self.mock_logger_info.assert_called_with("Successfully processed all %d records", 0) |
| 69 | + with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): |
| 70 | + importlib.reload(redis_sync) |
| 71 | + mock_event = {'Records': []} |
| 72 | + self.mock_get_s3_records.return_value = [] |
| 73 | + result = redis_sync.handler(mock_event, None) |
| 74 | + self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'}) |
71 | 75 |
|
72 | 76 | def test_handler_exception(self): |
73 | | - mock_event = {'Records': [self.s3_vaccine]} |
74 | | - self.mock_get_s3_records.return_value = [self.s3_vaccine] |
75 | | - self.mock_record_processor.side_effect = Exception("Processing error") |
76 | | - |
77 | | - result = handler(mock_event, None) |
78 | | - |
79 | | - self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'}) |
80 | | - self.mock_logger_info.assert_called_with("Processing S3 event with %d records", 1) |
| 77 | + with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): |
| 78 | + importlib.reload(redis_sync) |
| 79 | + mock_event = {'Records': [self.s3_vaccine]} |
| 80 | + self.mock_get_s3_records.return_value = [self.s3_vaccine] |
| 81 | + with patch("redis_sync.process_record") as mock_record_processor: |
| 82 | + mock_record_processor.side_effect = Exception("Processing error 2") |
| 83 | + result = redis_sync.handler(mock_event, None) |
| 84 | + self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'}) |
81 | 85 |
|
82 | 86 | def test_handler_with_empty_event(self): |
83 | | - self.mock_get_s3_records.return_value = [] |
84 | | - |
85 | | - result = handler({}, None) |
86 | | - |
87 | | - self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'}) |
| 87 | + with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): |
| 88 | + importlib.reload(redis_sync) |
| 89 | + self.mock_get_s3_records.return_value = [] |
| 90 | + result = redis_sync.handler({}, None) |
| 91 | + self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'}) |
88 | 92 |
|
89 | 93 | def test_handler_multi_record(self): |
90 | | - mock_event = {'Records': [self.s3_vaccine, self.s3_supplier]} |
91 | | - |
92 | | - self.mock_get_s3_records.return_value = [ |
93 | | - S3EventRecord(self.s3_vaccine), |
94 | | - S3EventRecord(self.s3_supplier) |
95 | | - ] |
96 | | - self.mock_record_processor.return_value = True |
97 | | - |
98 | | - result = handler(mock_event, None) |
| 94 | + with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): |
| 95 | + importlib.reload(redis_sync) |
| 96 | + mock_event = {'Records': [self.s3_vaccine, self.s3_supplier]} |
| 97 | + # If you need S3EventRecord, uncomment the import and use it here |
| 98 | + # self.mock_get_s3_records.return_value = [ |
| 99 | + # S3EventRecord(self.s3_vaccine), |
| 100 | + # S3EventRecord(self.s3_supplier) |
| 101 | + # ] |
| 102 | + self.mock_get_s3_records.return_value = [self.s3_vaccine, self.s3_supplier] |
| 103 | + with patch("redis_sync.process_record") as mock_record_processor: |
| 104 | + mock_record_processor.side_effect = [{'status': 'success', 'message': 'Processed successfully', |
| 105 | + 'file_key': 'test-key1'}, |
| 106 | + {'status': 'success', 'message': 'Processed successfully', |
| 107 | + 'file_key': 'test-key2'}] |
| 108 | + result = redis_sync.handler(mock_event, None) |
| 109 | + self.assertEqual(result['status'], 'success') |
| 110 | + self.assertEqual(result['message'], 'Successfully processed 2 records') |
| 111 | + self.assertEqual(result['file_keys'][0], 'test-key1') |
| 112 | + self.assertEqual(result['file_keys'][1], 'test-key2') |
99 | 113 |
|
100 | | - self.assertTrue(result) |
101 | | - self.mock_logger_info.assert_called_with("Processing S3 event with %d records", 2) |
102 | | - |
103 | | - # test to check that event_read is called when "read" key is passed in the event |
104 | 114 | def test_handler_read_event(self): |
105 | | - mock_event = {'read': 'myhash'} |
106 | | - mock_read_event_response = {'field1': 'value1'} |
107 | | - |
108 | | - with patch('redis_sync.read_event') as mock_read_event: |
109 | | - mock_read_event.return_value = mock_read_event_response |
110 | | - result = handler(mock_event, None) |
111 | | - |
112 | | - mock_read_event.assert_called_once() |
113 | | - self.assertEqual(result, mock_read_event_response) |
| 115 | + with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): |
| 116 | + importlib.reload(redis_sync) |
| 117 | + mock_event = {'read': 'myhash'} |
| 118 | + mock_read_event_response = {'field1': 'value1'} |
| 119 | + with patch('redis_sync.read_event') as mock_read_event: |
| 120 | + mock_read_event.return_value = mock_read_event_response |
| 121 | + result = redis_sync.handler(mock_event, None) |
| 122 | + mock_read_event.assert_called_once() |
| 123 | + self.assertEqual(result, mock_read_event_response) |
0 commit comments