Skip to content

Commit 159e280

Browse files
committed
refactor: standardize config, vault client, and test structure across all repos
- Updated all `config.py` files to include repo-specific getters for poller name, queue, and DLQ - Unified usage of `config_shared.py` and removed legacy config references - Replaced old `get_secret_from_vault` with `get_secret_or_env` in `vault_client.py` - Cleaned up and standardized `test_vault_client.py` across all repos - Synced `main.py`, `db_writer.py`, and related config imports to match shared structure - Removed redundant or outdated test and config files
1 parent 54d4f91 commit 159e280

File tree

4 files changed

+87
-31
lines changed

4 files changed

+87
-31
lines changed

.github/workflows/SBOM and License Audit.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@ jobs:
88
sbom:
99
name: Generate SBOM & License Report
1010
runs-on: ubuntu-latest
11+
permissions:
12+
contents: read
1113
steps:
1214
- uses: actions/checkout@v4
1315
- name: Set up Python

.hooks/check-compiled-requirements.py

Lines changed: 35 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
#!/usr/bin/env python3
21
"""Pre-commit hook to check and auto-fix requirements.txt and requirements-dev.txt
32
if they are out of sync with requirements.in and requirements-dev.in.
43
"""
@@ -7,18 +6,23 @@
76
import sys
87
from pathlib import Path
98
from shutil import which
10-
11-
PIP_COMPILE = which("pip-compile")
12-
if not PIP_COMPILE:
13-
print("[Error] pip-compile not found in PATH.")
14-
sys.exit(1)
9+
from typing import Optional
1510

1611

1712
def recompile(in_file: str, out_file: str) -> bool:
13+
"""Recompile the output requirements file from its corresponding .in file.
14+
15+
Args:
16+
in_file: Path to the .in file.
17+
out_file: Path to the output .txt file.
18+
19+
Returns:
20+
True if successful, False otherwise.
21+
"""
1822
print(f"[Fix] Recompiling {in_file} -> {out_file}")
1923
try:
2024
subprocess.run( # nosec B603
21-
[PIP_COMPILE, in_file, "--resolver=backtracking", "--output-file", out_file],
25+
["pip-compile", in_file, "--resolver=backtracking", "--output-file", out_file],
2226
check=True,
2327
)
2428
return True
@@ -28,13 +32,25 @@ def recompile(in_file: str, out_file: str) -> bool:
2832

2933

3034
def check_file(in_file: str, out_file: str, autofix: bool = True) -> bool:
35+
"""Check if output requirements file is in sync with its .in file and optionally fix it.
36+
37+
Args:
38+
in_file: Path to the .in file.
39+
out_file: Path to the output .txt file.
40+
autofix: Whether to fix mismatches automatically.
41+
42+
Returns:
43+
True if up to date or successfully fixed, False otherwise.
44+
"""
3145
if not Path(in_file).exists():
32-
return True # skip if .in file doesn't exist
46+
return True # Skip if input file does not exist
3347

3448
print(f"[Check] {in_file} -> {out_file}")
49+
tmp_out = Path(out_file + ".tmp")
50+
3551
try:
3652
subprocess.run( # nosec B603
37-
[PIP_COMPILE, in_file, "--resolver=backtracking", "--output-file", out_file + ".tmp"],
53+
["pip-compile", in_file, "--resolver=backtracking", "--output-file", str(tmp_out)],
3854
check=True,
3955
stdout=subprocess.DEVNULL,
4056
stderr=subprocess.DEVNULL,
@@ -43,27 +59,30 @@ def check_file(in_file: str, out_file: str, autofix: bool = True) -> bool:
4359
print(f"[Error] pip-compile failed during check for {in_file}: {e}")
4460
return False
4561

46-
expected = Path(out_file + ".tmp").read_text(encoding="utf-8")
62+
expected = tmp_out.read_text(encoding="utf-8")
4763
actual = Path(out_file).read_text(encoding="utf-8") if Path(out_file).exists() else ""
4864

49-
Path(out_file + ".tmp").unlink()
65+
tmp_out.unlink()
5066

5167
if expected != actual:
5268
print(f"[Mismatch] {out_file} is out of sync with {in_file}")
53-
if autofix:
54-
return recompile(in_file, out_file)
55-
return False
69+
return recompile(in_file, out_file) if autofix else False
5670

5771
return True
5872

5973

60-
def main():
74+
def main() -> int:
75+
"""Main entry point for the hook."""
76+
if not which("pip-compile"):
77+
print("[Error] pip-compile not found in PATH.")
78+
return 1
79+
6180
ok1 = check_file("requirements.in", "requirements.txt", autofix=True)
6281
ok2 = check_file("requirements-dev.in", "requirements-dev.txt", autofix=True)
6382

6483
if not (ok1 and ok2):
6584
print("[Error] Could not fix all mismatches. Check pip-compile output.")
66-
sys.exit(1)
85+
return 1
6786

6887
print("[OK] All requirements files are now up to date.")
6988
return 0

Makefile

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
12
.PHONY: help install test lint audit format clean clean-all compile preflight precommit security bump docker-build sbom sign-image watch
23

34
help:
@@ -72,35 +73,26 @@ sync:
7273

7374
sync-apply:
7475
@echo "🚀 Applying sync to all repositories..."
75-
python sync_if_needed.py --apply && echo "✅ Sync complete. Changes logged in sync.log"
76+
python sync_if_needed.py --apply && echo "✅ Sync complete. Changes logged in sync.log"
7677

7778
# -----------------------------------------------------------------------------
7879
# Attestation and SBOM Targets
7980
# -----------------------------------------------------------------------------
8081

81-
# ✅ Generate CycloneDX SBOM for Python dependencies
82-
# Requires: pip install cyclonedx-bom
8382
sbom-py:
8483
cyclonedx-py -o bom.json
8584

86-
# ✅ Generate full SBOM from Docker image context using Syft
87-
# Requires: https://github.com/anchore/syft
8885
sbom-image:
8986
syft . -o spdx-json > sbom.spdx.json
9087

91-
# ✅ Run both SBOM generators + audit (pip check + pip-audit + deptry)
9288
attest: sbom-py sbom-image audit
9389

9490
# -----------------------------------------------------------------------------
9591
# Optional Utilities (enable as needed)
9692
# -----------------------------------------------------------------------------
9793

98-
# 🟡 OPTIONAL: Sign Docker image using cosign
99-
# Requires: https://github.com/sigstore/cosign
10094
# sign-image:
10195
# cosign sign $(shell basename $(PWD)):latest
10296

103-
# 🟡 OPTIONAL: Pytest watch mode for test-driven development
104-
# Requires: pip install pytest-watch
10597
# watch:
10698
# ptw --onfail "notify-send 'Test failed!'"

src/app/utils/rate_limit.py

Lines changed: 48 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Thread-safe rate limiter using the token bucket algorithm."""
22

3+
import hashlib
34
import re
45
import threading
56
import time
@@ -24,14 +25,44 @@
2425

2526

2627
def _sanitize_context(context: str) -> str:
27-
"""Sanitize context string for safe logging."""
28+
"""Sanitize context string for safe metric labeling.
29+
30+
Replaces unsafe characters and truncates the string for Prometheus label compatibility.
31+
32+
Args:
33+
context (str): Original context string.
34+
35+
Returns:
36+
str: Sanitized and truncated context label.
37+
"""
2838
return re.sub(r"[^\w\-:.]", "_", context)[:64]
2939

3040

41+
def _hash_context(context: str) -> str:
42+
"""Hash the context string to produce a short, opaque identifier for logs.
43+
44+
Args:
45+
context (str): The original context string.
46+
47+
Returns:
48+
str: SHA-256-based short hash of the context.
49+
"""
50+
return hashlib.sha256(context.encode()).hexdigest()[:8]
51+
52+
3153
class RateLimiter:
3254
"""Token bucket rate limiter with Prometheus support."""
3355

3456
def __init__(self, max_requests: int, time_window: float) -> None:
57+
"""Initialize a new RateLimiter instance.
58+
59+
Args:
60+
max_requests (int): Maximum number of allowed requests in the time window.
61+
time_window (float): Time window in seconds.
62+
63+
Raises:
64+
ValueError: If max_requests or time_window is non-positive.
65+
"""
3566
if max_requests <= 0:
3667
raise ValueError("max_requests must be greater than 0")
3768
if time_window <= 0:
@@ -44,7 +75,19 @@ def __init__(self, max_requests: int, time_window: float) -> None:
4475
self._last_check = time.time()
4576

4677
def acquire(self, context: str = "RateLimiter") -> None:
78+
"""Acquire a token, blocking if needed to respect rate limits.
79+
80+
Replenishes tokens based on elapsed time and sleeps if tokens are unavailable.
81+
Also updates Prometheus metrics and logs token state.
82+
83+
Args:
84+
context (str): An identifier for the caller (e.g., service name).
85+
86+
Returns:
87+
None
88+
"""
4789
context = _sanitize_context(context)
90+
context_id = _hash_context(context)
4891

4992
with self._lock:
5093
current_time: float = time.time()
@@ -57,21 +100,21 @@ def acquire(self, context: str = "RateLimiter") -> None:
57100
rate_limiter_tokens_remaining.labels(context=context).set(self._tokens)
58101

59102
logger.debug(
60-
f"[{context}] Replenished {tokens_to_add:.2f} tokens. "
61-
f"Available tokens: {self._tokens:.2f}"
103+
f"[ctx:{context_id}] Replenished {tokens_to_add:.2f} tokens. "
104+
f"Available: {self._tokens:.2f}"
62105
)
63106

64107
if self._tokens < 1:
65108
sleep_time: float = (1 - self._tokens) * (self._time_window / self._max_requests)
66109
sleep_time = min(sleep_time, self._time_window)
67110

68111
logger.info(
69-
f"[{context}] Rate limit reached. Sleeping for {sleep_time:.2f} seconds."
112+
f"[ctx:{context_id}] Rate limit hit. Sleeping for {sleep_time:.2f} seconds."
70113
)
71114
rate_limiter_blocked_total.labels(context=context).inc()
72115
time.sleep(sleep_time)
73116
self._tokens = 1
74117

75118
self._tokens -= 1
76119
rate_limiter_tokens_remaining.labels(context=context).set(self._tokens)
77-
logger.debug(f"[{context}] Consumed a token. Remaining: {self._tokens:.2f}")
120+
logger.debug(f"[ctx:{context_id}] Token consumed. Remaining: {self._tokens:.2f}")

0 commit comments

Comments
 (0)