Skip to content

Commit 7b2be5e

Browse files
committed
common folder patching
1 parent c826a45 commit 7b2be5e

File tree

10 files changed

+35
-24
lines changed

10 files changed

+35
-24
lines changed

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ build-proxy:
3838
scripts/build_proxy.sh
3939

4040
#Files to loop over in release
41-
_dist_include="pytest.ini poetry.lock poetry.toml pyproject.toml Makefile build/. e2e e2e_batch specification sandbox terraform scripts backend delta_backend ack_backend filenameprocessor recordprocessor mesh_processor redis_sync id_sync"
41+
_dist_include="pytest.ini poetry.lock poetry.toml pyproject.toml Makefile build/. e2e e2e_batch specification sandbox terraform scripts backend delta_backend ack_backend filenameprocessor recordprocessor mesh_processor redis_sync lambdas/id_sync"
4242

4343

4444
#Create /dist/ sub-directory and copy files into directory

lambdas/id_sync/Dockerfile

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -9,17 +9,12 @@ RUN mkdir -p /home/appuser && \
99
# Install Poetry as root
1010
COPY poetry.lock pyproject.toml README.md ./
1111
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main
12-
# -----------------------------
13-
FROM base AS test
14-
COPY src src
15-
COPY tests tests
16-
RUN poetry install --no-interaction --no-ansi --no-root && \
17-
pytest --disable-warnings tests
1812

1913
# -----------------------------
2014
FROM base AS build
2115
COPY src .
16+
COPY ../shared/src/common ./common
2217
RUN chmod 644 $(find . -type f) && chmod 755 $(find . -type d)
2318
# Build as non-root user
2419
USER 1001:1001
25-
CMD ["id_sync.handler"]
20+
CMD ["id_sync.handler"]

lambdas/id_sync/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11

22
test:
3-
@PYTHONPATH=src:tests python -m unittest
3+
@PYTHONPATH=src:tests:../shared/src python -m unittest
44

55
coverage-run:
66
coverage run -m unittest discover -v

lambdas/id_sync/pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@ description = ""
1313
authors = ["s.wates <[email protected]>"]
1414
readme = "README.md"
1515
packages = [
16-
{include = "src"}
16+
{include = "src"},
17+
{include = "common", from = "../shared/src/common"}
1718
]
1819

1920
[tool.poetry.dependencies]

lambdas/id_sync/src/clients.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,3 +13,4 @@
1313
REDIS_PORT = os.getenv("REDIS_PORT", 6379)
1414
s3_client = boto3_client("s3", region_name=REGION_NAME)
1515
firehose_client = boto3_client("firehose", region_name=REGION_NAME)
16+

lambdas/id_sync/src/id_sync.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
from clients import logger
2-
from log_decorator import logging_decorator
1+
from common.clients import logger
2+
from clients import STREAM_NAME
3+
from common.log_decorator import logging_decorator
34
from record_processor import process_record
45

56
'''
@@ -8,7 +9,7 @@
89
This module processes S3 events and iterates through each record to process them individually.'''
910

1011

11-
@logging_decorator(prefix="id_sync")
12+
@logging_decorator(prefix="id_sync", stream_name=STREAM_NAME)
1213
def handler(event, _):
1314

1415
try:

lambdas/id_sync/tests/test_handler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ def setUp(self):
1616
self.record_processor_patcher = patch("id_sync.process_record")
1717
self.mock_record_processor = self.record_processor_patcher.start()
1818
# patch log_decorator to pass through
19-
self.mock_log_decorator = patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)).start()
19+
self.mock_log_decorator = patch("common.log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)).start()
2020

2121
def tearDown(self):
2222
patch.stopall()

lambdas/shared/src/common/__init__.py

Whitespace-only changes.
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
import os
2+
import logging
3+
from boto3 import client as boto3_client
4+
5+
logging.basicConfig(level="INFO")
6+
logger = logging.getLogger()
7+
logger.setLevel("INFO")
8+
9+
REGION_NAME = os.getenv("AWS_REGION", "eu-west-2")
10+
s3_client = boto3_client("s3", region_name=REGION_NAME)
11+
firehose_client = boto3_client("firehose", region_name=REGION_NAME)

lambdas/id_sync/src/log_decorator.py renamed to lambdas/shared/src/common/log_decorator.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,31 +8,31 @@
88
import time
99
from datetime import datetime
1010
from functools import wraps
11-
from clients import firehose_client, logger, STREAM_NAME
11+
from clients import logger, firehose_client
1212

1313

14-
def send_log_to_firehose(log_data: dict) -> None:
14+
def send_log_to_firehose(stream_name, log_data: dict) -> None:
1515
"""Sends the log_message to Firehose"""
1616
try:
1717
record = {"Data": json.dumps({"event": log_data}).encode("utf-8")}
18-
response = firehose_client.put_record(DeliveryStreamName=STREAM_NAME, Record=record)
18+
response = firehose_client.put_record(DeliveryStreamName=stream_name, Record=record)
1919
logger.info("Log sent to Firehose: %s", response)
2020
except Exception as error: # pylint:disable = broad-exception-caught
2121
logger.exception("Error sending log to Firehose: %s", error)
2222

2323

24-
def generate_and_send_logs(
25-
start_time, base_log_data: dict, additional_log_data: dict, is_error_log: bool = False
26-
) -> None:
24+
def generate_and_send_logs(stream_name,
25+
start_time, base_log_data: dict, additional_log_data: dict, is_error_log: bool = False
26+
) -> None:
2727
"""Generates log data which includes the base_log_data, additional_log_data, and time taken (calculated using the
2828
current time and given start_time) and sends them to Cloudwatch and Firehose."""
2929
log_data = {**base_log_data, "time_taken": f"{round(time.time() - start_time, 5)}s", **additional_log_data}
3030
log_function = logger.error if is_error_log else logger.info
3131
log_function(json.dumps(log_data))
32-
send_log_to_firehose(log_data)
32+
send_log_to_firehose(stream_name, log_data)
3333

3434

35-
def logging_decorator(prefix="id_sync"):
35+
def logging_decorator(prefix: str, stream_name):
3636
def decorator(func):
3737
@wraps(func)
3838
def wrapper(*args, **kwargs):
@@ -43,11 +43,13 @@ def wrapper(*args, **kwargs):
4343
start_time = time.time()
4444
try:
4545
result = func(*args, **kwargs)
46-
generate_and_send_logs(start_time, base_log_data, additional_log_data=result)
46+
generate_and_send_logs(stream_name,
47+
start_time, base_log_data, additional_log_data=result)
4748
return result
4849
except Exception as e:
4950
additional_log_data = {"statusCode": 500, "error": str(e)}
50-
generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True)
51+
generate_and_send_logs(stream_name,
52+
start_time, base_log_data, additional_log_data, is_error_log=True)
5153
raise
5254
return wrapper
5355
return decorator

0 commit comments

Comments
 (0)