Skip to content

Commit fca8811

Browse files
Add CodeRabbit recommendations
1 parent fe82566 commit fca8811

File tree

12 files changed

+312
-185
lines changed

12 files changed

+312
-185
lines changed

.github/workflows/test.yml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,9 +87,6 @@ jobs:
8787
run: |
8888
pip install -r requirements.txt
8989
90-
- name: Set PYTHONPATH environment variable
91-
run: echo "PYTHONPATH=${GITHUB_WORKSPACE}/release_notes_generator/release_notes_generator" >> $GITHUB_ENV
92-
9390
- name: Check code coverage with Pytest
9491
run: pytest --cov=. -v tests/ --cov-fail-under=80
9592

DEVELOPER.md

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ Clone the repository and navigate to the project directory:
1414

1515
```shell
1616
git clone https://github.com/AbsaOSS/EventGate.git
17-
cd event-gate
17+
cd EventGate
1818
```
1919

2020
## Set Up Python Environment
@@ -75,23 +75,23 @@ All done! ✨ 🍰 ✨
7575

7676
## Run mypy Tool Locally
7777

78-
This project uses the [my[py]](https://mypy.readthedocs.io/en/stable/) tool, a static type checker for Python.
78+
This project uses the [mypy](https://mypy.readthedocs.io/en/stable/) tool, a static type checker for Python.
7979

8080
> Type checkers help ensure that you correctly use variables and functions in your code.
8181
> With mypy, add type hints (PEP 484) to your Python programs,
8282
> and mypy will warn you when you use those types incorrectly.
83-
my[py] configuration is in `pyproject.toml` file.
83+
mypy configuration is in `pyproject.toml` file.
8484

85-
Follow these steps to format your code with my[py] locally:
85+
Follow these steps to type-check your code with mypy locally:
8686

87-
### Run my[py]
87+
### Run mypy
8888

89-
Run my[py] on all files in the project.
89+
Run mypy on all files in the project.
9090
```shell
9191
mypy .
9292
```
9393

94-
To run my[py] check on a specific file, follow the pattern `mypy <path_to_file>/<name_of_file>.py --check-untyped-defs`.
94+
To run mypy on a specific file, follow the pattern `mypy <path_to_file>/<name_of_file>.py --check-untyped-defs`.
9595

9696
Example:
9797
```shell
@@ -124,4 +124,4 @@ See the coverage report on the path:
124124
open htmlcov/index.html
125125
```
126126

127-
##
127+
##

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[tool.black]
22
line-length = 120
33
target-version = ['py311']
4-
force-exclude = '''test'''
4+
# force-exclude = '''test'''
55

66
[tool.coverage.run]
77
omit = ["tests/*"]

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,5 +11,5 @@ jsonschema==4.25.1
1111
PyJWT==2.10.1
1212
requests==2.32.5
1313
boto3==1.40.25
14-
confluent_kafka
14+
confluent-kafka==2.11.1
1515
psycopg2-binary==2.9.10

src/event_gate_lambda.py

Lines changed: 31 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,7 @@
5050
logger.debug("Initialized LOGGER")
5151
logger.debug(f"Using CONF_DIR={_CONF_DIR}")
5252
if _INVALID_CONF_ENV:
53-
logger.warning(
54-
f"CONF_DIR env var set to non-existent path: {_INVALID_CONF_ENV}; fell back to {_CONF_DIR}"
55-
)
53+
logger.warning(f"CONF_DIR env var set to non-existent path: {_INVALID_CONF_ENV}; fell back to {_CONF_DIR}")
5654

5755
# Resolve project root (parent directory of this file's directory)
5856
_PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
@@ -210,17 +208,38 @@ def post_topic_message(topic_name: str, topic_message: Dict[str, Any], token_enc
210208

211209

212210
def extract_token(event_headers: Dict[str, str]) -> str:
213-
"""Extract bearer token from headers.
211+
"""Extract bearer token from headers (case-insensitive).
214212
215-
Supports lowercase custom 'bearer' header or standard 'Authorization: Bearer <token>'.
216-
Returns empty string if not present (caller handles auth error response).
213+
Supports:
214+
- Custom 'bearer' header (any casing) whose value is the raw token
215+
- Standard 'Authorization: Bearer <token>' header (case-insensitive scheme & key)
216+
Returns empty string if token not found or malformed.
217217
"""
218-
if "bearer" in event_headers:
219-
return event_headers["bearer"]
220-
auth_header = event_headers.get("Authorization", "")
221-
if auth_header.startswith("Bearer "):
222-
return auth_header[len("Bearer ") :]
223-
return ""
218+
if not event_headers:
219+
return ""
220+
221+
# Normalize keys to lowercase for case-insensitive lookup
222+
lowered = {str(k).lower(): v for k, v in event_headers.items()}
223+
224+
# Direct bearer header (raw token)
225+
if "bearer" in lowered and isinstance(lowered["bearer"], str):
226+
token_candidate = lowered["bearer"].strip()
227+
if token_candidate:
228+
return token_candidate
229+
230+
# Authorization header with Bearer scheme
231+
auth_val = lowered.get("authorization", "")
232+
if not isinstance(auth_val, str): # defensive
233+
return ""
234+
auth_val = auth_val.strip()
235+
if not auth_val:
236+
return ""
237+
238+
# Case-insensitive match for 'Bearer ' prefix
239+
if not auth_val.lower().startswith("bearer "):
240+
return ""
241+
token_part = auth_val[7:].strip() # len('Bearer ')==7
242+
return token_part
224243

225244

226245
def lambda_handler(event: Dict[str, Any], context: Any): # pylint: disable=unused-argument,too-many-return-statements

src/writer_eventbridge.py

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
import json
77
import logging
8-
from typing import Any, Dict, Optional, Tuple
8+
from typing import Any, Dict, Optional, Tuple, List
99

1010
import boto3
1111
from botocore.exceptions import BotoCoreError, ClientError
@@ -26,6 +26,12 @@ def init(logger: logging.Logger, config: Dict[str, Any]) -> None:
2626
STATE["logger"].debug("Initialized EVENTBRIDGE writer")
2727

2828

29+
def _format_failed_entries(entries: List[Dict[str, Any]]) -> str:
30+
failed = [e for e in entries if "ErrorCode" in e or "ErrorMessage" in e]
31+
# Keep message concise but informative
32+
return json.dumps(failed) if failed else "[]"
33+
34+
2935
def write(topic_name: str, message: Dict[str, Any]) -> Tuple[bool, Optional[str]]:
3036
"""Publish a message to EventBridge.
3137
@@ -58,21 +64,16 @@ def write(topic_name: str, message: Dict[str, Any]) -> Tuple[bool, Optional[str]
5864
}
5965
]
6066
)
61-
if response.get("FailedEntryCount", 0) > 0:
62-
msg = str(response)
67+
failed_count = response.get("FailedEntryCount", 0)
68+
if failed_count > 0:
69+
entries = response.get("Entries", [])
70+
failed_repr = _format_failed_entries(entries)
71+
msg = f"{failed_count} EventBridge entries failed: {failed_repr}"
6372
logger.error(msg)
6473
return False, msg
65-
except (BotoCoreError, ClientError) as err:
66-
err_msg = f"The EventBridge writer failed: {err}" # specific AWS error
67-
logger.error(err_msg)
68-
return False, err_msg
69-
except Exception as err: # pragma: no cover - unexpected failure path
70-
err_msg = (
71-
f"The EventBridge writer failed with unknown error: {err}"
72-
if not isinstance(err, (BotoCoreError, ClientError))
73-
else str(err)
74-
)
75-
logger.error(err_msg)
76-
return False, err_msg
74+
except (BotoCoreError, ClientError) as err: # explicit AWS client-related errors
75+
logger.exception("EventBridge put_events call failed")
76+
return False, str(err)
7777

78+
# Let any unexpected exception propagate for upstream handler (avoids broad except BLE001 / TRY400)
7879
return True, None

src/writer_kafka.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
_logger: logging.Logger = logging.getLogger(__name__)
1616
kafka_producer: Optional[Producer] = None
1717

18+
1819
def init(logger: logging.Logger, config: Dict[str, Any]) -> None:
1920
"""Initialize Kafka producer.
2021
@@ -67,9 +68,7 @@ def write(topic_name: str, message: Dict[str, Any]) -> Tuple[bool, Optional[str]
6768
topic_name,
6869
key="",
6970
value=json.dumps(message).encode("utf-8"),
70-
callback=lambda err, msg: (
71-
errors.append(str(err)) if err is not None else None
72-
),
71+
callback=lambda err, msg: (errors.append(str(err)) if err is not None else None),
7372
)
7473
kafka_producer.flush()
7574
if errors:

src/writer_postgres.py

Lines changed: 7 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -35,12 +35,8 @@ def init(logger: logging.Logger) -> None:
3535
secret_region = os.environ.get("POSTGRES_SECRET_REGION", "")
3636

3737
if secret_name and secret_region:
38-
aws_secrets = boto3.Session().client(
39-
service_name="secretsmanager", region_name=secret_region
40-
)
41-
postgres_secret = aws_secrets.get_secret_value(SecretId=secret_name)[
42-
"SecretString"
43-
]
38+
aws_secrets = boto3.Session().client(service_name="secretsmanager", region_name=secret_region)
39+
postgres_secret = aws_secrets.get_secret_value(SecretId=secret_name)["SecretString"]
4440
POSTGRES = json.loads(postgres_secret)
4541
else:
4642
POSTGRES = {"database": ""}
@@ -100,16 +96,8 @@ def postgres_edla_write(cursor, table: str, message: Dict[str, Any]) -> None:
10096
message["operation"],
10197
message.get("location"),
10298
message["format"],
103-
(
104-
json.dumps(message.get("format_options"))
105-
if "format_options" in message
106-
else None
107-
),
108-
(
109-
json.dumps(message.get("additional_info"))
110-
if "additional_info" in message
111-
else None
112-
),
99+
(json.dumps(message.get("format_options")) if "format_options" in message else None),
100+
(json.dumps(message.get("additional_info")) if "additional_info" in message else None),
113101
),
114102
)
115103

@@ -190,11 +178,7 @@ def postgres_run_write(cursor, table_runs: str, table_jobs: str, message: Dict[s
190178
job["timestamp_start"],
191179
job["timestamp_end"],
192180
job.get("message"),
193-
(
194-
json.dumps(job.get("additional_info"))
195-
if "additional_info" in job
196-
else None
197-
),
181+
(json.dumps(job.get("additional_info")) if "additional_info" in job else None),
198182
),
199183
)
200184

@@ -234,11 +218,7 @@ def postgres_test_write(cursor, table: str, message: Dict[str, Any]) -> None:
234218
message["source_app"],
235219
message["environment"],
236220
message["timestamp"],
237-
(
238-
json.dumps(message.get("additional_info"))
239-
if "additional_info" in message
240-
else None
241-
),
221+
(json.dumps(message.get("additional_info")) if "additional_info" in message else None),
242222
),
243223
)
244224

@@ -271,9 +251,7 @@ def write(topic_name: str, message: Dict[str, Any]) -> Tuple[bool, Optional[str]
271251
if topic_name == "public.cps.za.dlchange":
272252
postgres_edla_write(cursor, "public_cps_za_dlchange", message)
273253
elif topic_name == "public.cps.za.runs":
274-
postgres_run_write(
275-
cursor, "public_cps_za_runs", "public_cps_za_runs_jobs", message
276-
)
254+
postgres_run_write(cursor, "public_cps_za_runs", "public_cps_za_runs_jobs", message)
277255
elif topic_name == "public.cps.za.test":
278256
postgres_test_write(cursor, "public_cps_za_test", message)
279257
else:

tests/conftest.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,8 @@
1414
# limitations under the License.
1515
#
1616
import os, sys
17+
1718
# Ensure project root is on sys.path so 'src' package is importable during tests
18-
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
19+
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
1920
if PROJECT_ROOT not in sys.path:
2021
sys.path.insert(0, PROJECT_ROOT)

tests/test_conf_validation.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import os
2+
23
#
34
# Copyright 2025 ABSA Group Limited
45
#
@@ -28,25 +29,26 @@
2829
"event_bus_arn",
2930
}
3031

32+
3133
def load_json(path):
3234
with open(path, "r") as f:
3335
return json.load(f)
3436

37+
3538
@pytest.fixture(scope="module")
3639
def config_files():
37-
files = [
38-
f for f in glob(os.path.join(CONF_DIR, "config*.json"))
39-
if os.path.basename(f) not in {"access.json"}
40-
]
40+
files = [f for f in glob(os.path.join(CONF_DIR, "config*.json")) if os.path.basename(f) not in {"access.json"}]
4141
assert files, "No config files found matching pattern config*.json"
4242
return files
4343

44+
4445
@pytest.mark.parametrize("key", sorted(REQUIRED_CONFIG_KEYS))
4546
def test_config_files_have_required_keys(config_files, key):
4647
for path in config_files:
4748
data = load_json(path)
4849
assert key in data, f"Config {path} missing key: {key}"
4950

51+
5052
def test_access_json_structure():
5153
path = os.path.join(CONF_DIR, "access.json")
5254
data = load_json(path)
@@ -56,6 +58,7 @@ def test_access_json_structure():
5658
assert isinstance(users, list), f"Topic {topic} value must be a list of users"
5759
assert all(isinstance(u, str) for u in users), f"All users for topic {topic} must be strings"
5860

61+
5962
@pytest.mark.parametrize("topic_file", glob(os.path.join(CONF_DIR, "topic_*.json")))
6063
def test_topic_json_schemas_basic(topic_file):
6164
assert topic_file, "No topic_*.json files found"

0 commit comments

Comments
 (0)