Skip to content

Commit 4841d4b

Browse files
committed
implementation
ServiceReturn
1 parent ea949b9 commit 4841d4b

File tree

5 files changed

+119
-26
lines changed

5 files changed

+119
-26
lines changed
Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,15 @@
11
from redis_cacher import RedisCacher
22
from common.clients import logger
33
from common.s3_event import S3EventRecord
4+
from common.service_return import ServiceReturn
45
'''
56
Record Processor
67
This module processes individual S3 records from an event.
78
It is used to upload data to Redis ElastiCache.
89
'''
910

1011

11-
def process_record(record: S3EventRecord) -> dict:
12+
def process_record(record: S3EventRecord) -> ServiceReturn:
1213
try:
1314
logger.info("Processing S3 r bucket: %s, key: %s",
1415
record.get_bucket_name(), record.get_object_key())
@@ -20,17 +21,21 @@ def process_record(record: S3EventRecord) -> dict:
2021
}
2122

2223
try:
23-
result = RedisCacher.upload(bucket_name, file_key)
24-
result.update(base_log_data)
25-
return result
24+
service_result = RedisCacher.upload(bucket_name, file_key)
25+
if service_result.is_success:
26+
result = service_result.value
27+
result.update(base_log_data)
28+
return ServiceReturn(value=result)
29+
else:
30+
return ServiceReturn(
31+
status=500,
32+
message=f"Failed to upload to cache for filename '{file_key}': {service_result.message}")
2633

2734
except Exception as error: # pylint: disable=broad-except
2835
logger.exception("Error uploading to cache for filename '%s'", file_key)
2936
error_data = {"status": "error", "message": str(error)}
3037
error_data.update(base_log_data)
31-
return error_data
38+
return ServiceReturn(value=error_data)
3239

3340
except Exception: # pylint: disable=broad-except
34-
msg = "Error processing record"
35-
logger.exception(msg)
36-
return {"status": "error", "message": msg}
41+
return ServiceReturn(value={"status": "error", "message": "Error processing record"})

lambdas/redis_sync/src/redis_cacher.py

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,22 +5,28 @@
55
from common.clients import logger
66
from common.redis_client import get_redis_client
77
from common.s3_reader import S3Reader
8+
from common.service_return import ServiceReturn
89

910

1011
class RedisCacher:
1112
"""Class to handle interactions with ElastiCache (Redis) for configuration files."""
1213

1314
@staticmethod
14-
def upload(bucket_name: str, file_key: str) -> dict:
15+
def upload(bucket_name: str, file_key: str) -> ServiceReturn:
1516
try:
1617
logger.info("Upload from s3 to Redis cache. file '%s'. bucket '%s'", file_key, bucket_name)
1718

1819
# get from s3
19-
config_file_content = S3Reader.read(bucket_name, file_key)
20-
if isinstance(config_file_content, str):
21-
config_file_content = json.loads(config_file_content)
20+
result = S3Reader.read(bucket_name, file_key)
21+
if result.is_success:
22+
config_file_content = result.value
23+
if isinstance(config_file_content, str):
24+
config_file_content = json.loads(config_file_content)
2225

23-
logger.info("Config file content for '%s': %s", file_key, config_file_content)
26+
logger.info("Config file content for '%s': %s", file_key, config_file_content)
27+
else:
28+
logger.error("Failed to read S3 file '%s': %s", file_key, result.message)
29+
return ServiceReturn(status=500, message=result.message)
2430

2531
# Transform
2632
redis_mappings = transform_map(config_file_content, file_key)
@@ -40,8 +46,8 @@ def upload(bucket_name: str, file_key: str) -> dict:
4046
redis_client.hdel(key, *fields_to_delete)
4147
logger.info("Deleted mapping fields for %s: %s", key, fields_to_delete)
4248

43-
return {"status": "success", "message": f"File {file_key} uploaded to Redis cache."}
49+
return ServiceReturn(value={"status": "success", "message": f"File {file_key} uploaded to Redis cache."})
4450
except Exception:
4551
msg = f"Error uploading file '{file_key}' to Redis cache"
4652
logger.exception(msg)
47-
return {"status": "error", "message": msg}
53+
return ServiceReturn(status=500, message=msg)

lambdas/redis_sync/src/redis_sync.py

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from common.log_decorator import logging_decorator
55
from common.redis_client import get_redis_client
66
from common.s3_event import S3Event
7+
from common.service_return import ServiceReturn
78
'''
89
Event Processor
910
The Business Logic for the Redis Sync Lambda Function.
@@ -15,18 +16,20 @@ def _process_all_records(s3_records: list) -> dict:
1516
error_count = 0
1617
file_keys = []
1718
for record in s3_records:
18-
record_result = process_record(record)
19-
file_keys.append(record_result["file_key"])
20-
if record_result["status"] == "error":
19+
service_result = process_record(record)
20+
file_keys.append(service_result.value.get("file_key"))
21+
if service_result.status == 500:
2122
error_count += 1
2223
if error_count > 0:
2324
logger.error("Processed %d records with %d errors", record_count, error_count)
24-
return {"status": "error", "message": f"Processed {record_count} records with {error_count} errors",
25-
"file_keys": file_keys}
25+
return ServiceReturn(value={"status": "error", "message": f"Processed {record_count} records with {error_count} errors",
26+
"file_keys": file_keys})
2627
else:
2728
logger.info("Successfully processed all %d records", record_count)
28-
return {"status": "success", "message": f"Successfully processed {record_count} records",
29-
"file_keys": file_keys}
29+
return ServiceReturn(
30+
value={"status": "success",
31+
"message": f"Successfully processed {record_count} records",
32+
"file_keys": file_keys})
3033

3134

3235
@logging_decorator(prefix="redis_sync", stream_name=STREAM_NAME)
@@ -44,7 +47,12 @@ def handler(event, _):
4447
logger.info(no_records)
4548
return {"status": "success", "message": no_records}
4649
else:
47-
return _process_all_records(s3_records)
50+
service_result = _process_all_records(s3_records)
51+
if service_result.is_success:
52+
return service_result.value
53+
else:
54+
return {"status": "error", "message": service_result.value.get("message"),
55+
"file_keys": service_result.value.get("file_keys", [])}
4856
else:
4957
logger.info(no_records)
5058
return {"status": "success", "message": no_records}

lambdas/shared/src/common/s3_reader.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from common.clients import s3_client, logger
2+
from service_return import ServiceReturn
23

34

45
class S3Reader:
@@ -11,11 +12,11 @@ class S3Reader:
1112
"""
1213

1314
@staticmethod
14-
def read(bucket_name, file_key):
15+
def read(bucket_name, file_key) -> ServiceReturn:
1516
try:
1617
s3_file = s3_client.get_object(Bucket=bucket_name, Key=file_key)
17-
return s3_file["Body"].read().decode("utf-8")
18+
return ServiceReturn(value=s3_file["Body"].read().decode("utf-8"))
1819

1920
except Exception as error: # pylint: disable=broad-except
2021
logger.exception("Error reading S3 file '%s' from bucket '%s'", file_key, bucket_name)
21-
raise error
22+
return ServiceReturn(status=500, message="Error reading S3 file", exception=error)
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
import inspect
2+
from responses import logger
3+
4+
5+
class ServiceReturn:
6+
def __init__(
7+
self,
8+
status: int = 200,
9+
message: str = "OK",
10+
value: any = None,
11+
exception: Exception = None
12+
):
13+
self.status = status
14+
self.message = message
15+
self.value = value
16+
self.exception = exception
17+
self.call_location = self._get_call_location()
18+
19+
if self.status != 200 and (self.exception or self.message):
20+
msg = {
21+
"status": self.status,
22+
"Message": self.message,
23+
"Exception": self.exception,
24+
"Location": self.call_location
25+
}
26+
logger.warning(f"ServiceReturn : {msg}")
27+
28+
def _get_call_location(self):
29+
# Get the name of the function 2 frames up
30+
frame = inspect.currentframe()
31+
outer_frames = inspect.getouterframes(frame)
32+
if len(outer_frames) >= 3:
33+
caller_frame = outer_frames[2]
34+
return f"{caller_frame.function} ({caller_frame.filename}:{caller_frame.lineno})"
35+
return "Unknown"
36+
37+
def to_string(self):
38+
return f"{self.message} ({type(self.exception).__name__}: {self.exception}) @ {self.call_location}"
39+
40+
def __bool__(self):
41+
return self.exception is None
42+
43+
@property
44+
def is_success(self) -> bool:
45+
return self.exception is None
46+
47+
48+
# an example function that uses ServiceReturn
49+
def my_divide(a: int, b: int) -> ServiceReturn:
50+
try:
51+
result = a / b
52+
return ServiceReturn(value=result)
53+
except Exception as e:
54+
return ServiceReturn(
55+
status=500,
56+
message="Division failed",
57+
exception=e
58+
)
59+
60+
61+
# example of usage
62+
# Test
63+
result = my_divide(10, 0)
64+
65+
66+
if result: # or result.is_success as below
67+
print(f"Result: {result.value}")
68+
else:
69+
print(f"Error: {result.to_string()}")
70+
71+
# Optional explicit check
72+
if result.is_success:
73+
print("Division succeeded.")

0 commit comments

Comments
 (0)