Skip to content

Commit 2eda58f

Browse files
committed
TODO Return True
cpd lambdas/shared/** refactor for duplicate checks
1 parent b313a23 commit 2eda58f

30 files changed

+1424
-229
lines changed

.github/workflows/sonarcloud.yml

Lines changed: 2 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,7 @@ jobs:
8686
poetry install
8787
poetry run coverage run -m unittest discover || echo "delta tests failed" >> ../failed_tests.txt
8888
poetry run coverage xml -o ../delta-coverage.xml
89+
cat ../delta-coverage.xml
8990
9091
- name: Run unittest with coverage-fhir-api
9192
working-directory: backend
@@ -125,18 +126,10 @@ jobs:
125126
PYTHONPATH: ${{ env.SHARED_PATH }}
126127
continue-on-error: true
127128
run: |
128-
echo "shared coverage - Current directory: $(pwd)"
129129
poetry env use 3.11
130130
poetry install
131-
poetry run coverage run -m unittest discover || echo "shared tests failed" >> ../../failed_tests.txt
131+
poetry run coverage run --source=src -m unittest discover -s tests -p "test_*.py" -v || echo "shared tests failed" >> ../../failed_tests.txt
132132
poetry run coverage xml -o ../../shared-coverage.xml
133-
ls ../../shared-coverage.xml
134-
# show director of ../.. as absolute path
135-
echo "Parent directory of shared-coverage.xml: $(pwd)/../../"
136-
#check files created
137-
if [ ! -f ../../shared-coverage.xml ]; then
138-
echo "shared-coverage.xml not found"
139-
fi
140133
141134
- name: Run unittest with id_sync
142135
working-directory: lambdas/id_sync
@@ -145,22 +138,14 @@ jobs:
145138
PYTHONPATH: ${{ env.LAMBDA_PATH }}/id_sync/src:${{ env.LAMBDA_PATH }}/id_sync/tests:${{ env.SHARED_PATH }}
146139
continue-on-error: true
147140
run: |
148-
echo "id_sync coverage - Current directory: $(pwd)"
149141
poetry env use 3.11
150142
poetry install
151143
poetry run coverage run -m unittest discover || echo "id_sync tests failed" >> ../../failed_tests.txt
152144
poetry run coverage xml -o ../../id_sync-coverage.xml
153-
#check files created
154-
if [ ! -f ../../id_sync-coverage.xml ]; then
155-
echo "id_sync-coverage.xml not found, exiting with error"
156-
fi
157145
158146
- name: Run Test Failure Summary
159147
id: check_failure
160148
run: |
161-
echo "Checking for test failures..."
162-
ls *-coverage.xml
163-
cat shared-coverage.xml || echo "shared-coverage.xml not found"
164149
if [ -s failed_tests.txt ]; then
165150
echo "The following tests failed:"
166151
cat failed_tests.txt

.gitignore

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,4 @@ openapi.json
2929
!**/.vscode/settings.json.default
3030

3131
devtools/volume/
32-
backend/tests/.coverage
33-
redis_sync/.vscode/settings.json.default
34-
lambdas/shared/.coverage
32+
**/.coverage
Lines changed: 38 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,41 +1,41 @@
11
{
22
"folders": [
3-
{
4-
"path": "."
5-
},
6-
{
7-
"path": "backend"
8-
},
9-
{
10-
"path": "filenameprocessor"
11-
},
12-
{
13-
"path": "recordprocessor"
14-
},
15-
{
16-
"path": "ack_backend"
17-
},
18-
{
19-
"path": "delta_backend"
20-
},
21-
{
22-
"path": "mesh_processor"
23-
},
24-
{
25-
"path": "e2e"
26-
},
27-
{
28-
"path": "e2e_batch"
29-
},
30-
{
31-
"path": "redis_sync"
32-
},
33-
{
34-
"path": "lambdas/id_sync"
35-
},
36-
{
37-
"path": "lambdas/shared"
38-
}
39-
],
40-
"settings": {},
3+
{
4+
"path": "."
5+
},
6+
{
7+
"path": "backend"
8+
},
9+
{
10+
"path": "filenameprocessor"
11+
},
12+
{
13+
"path": "recordprocessor"
14+
},
15+
{
16+
"path": "ack_backend"
17+
},
18+
{
19+
"path": "delta_backend"
20+
},
21+
{
22+
"path": "mesh_processor"
23+
},
24+
{
25+
"path": "e2e"
26+
},
27+
{
28+
"path": "e2e_batch"
29+
},
30+
{
31+
"path": "redis_sync"
32+
},
33+
{
34+
"path": "lambdas/id_sync"
35+
},
36+
{
37+
"path": "lambdas/shared"
38+
}
39+
],
40+
"settings": {}
4141
}

lambdas/id_sync/.vscode/settings.json.default

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,5 +14,14 @@
1414
"pylint.args": [
1515
"--init-hook",
1616
"import sys; sys.path.append('./src')"
17-
]
17+
],
18+
"[makefile]": {
19+
"editor.insertSpaces": false,
20+
"editor.detectIndentation": false
21+
},
22+
"files.trimTrailingWhitespace": true,
23+
"[python]": {
24+
"files.trimTrailingWhitespace": true
25+
},
26+
"files.insertFinalNewline": true
1827
}

lambdas/id_sync/poetry.lock

Lines changed: 34 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

lambdas/id_sync/pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@ moto = "~5.1.5"
2525
python-stdnum = "^2.1"
2626
coverage = "^7.8.0"
2727
redis = "^4.6.0"
28+
jwt = "^1.4.0"
29+
cache = "^1.0.3"
2830

2931
[tool.poetry.group.dev.dependencies]
3032
coverage = "^7.8.0"

lambdas/id_sync/src/clients.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
import os
2+
3+
4+
pds_env: str = os.getenv("PDS_ENV", "int")

lambdas/id_sync/src/id_sync.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,9 @@
44
from common.aws_lambda_event import AwsLambdaEvent
55
from common.aws_lambda_sqs_event_record import AwsLambdaSqsEventRecord
66
from record_processor import process_record
7-
87
'''
9-
Event Processor
10-
The Business Logic for the Redis Sync Lambda Function.
11-
This module processes S3 events and iterates through each record to process them individually.'''
8+
Lambda function handler for processing SQS events.Lambda for ID Sync. Fired by SQS
9+
'''
1210

1311

1412
@logging_decorator(prefix="id_sync", stream_name=STREAM_NAME)
@@ -18,7 +16,7 @@ def handler(event_data, _):
1816
event = AwsLambdaEvent(event_data)
1917
record_count = len(event.records)
2018
if record_count > 0:
21-
logger.info("Processing SQS event with %d records", record_count)
19+
logger.info("id_sync processing event with %d records", record_count)
2220
error_count = 0
2321
file_keys = []
2422
for record in event.records:
@@ -28,17 +26,18 @@ def handler(event_data, _):
2826
if record_result["status"] == "error":
2927
error_count += 1
3028
if error_count > 0:
31-
logger.error("Processed %d records with %d errors", record_count, error_count)
29+
logger.error("id_sync processed %d records with %d errors", record_count, error_count)
3230
return {"status": "error", "message": f"Processed {record_count} records with {error_count} errors",
3331
"file_keys": file_keys}
3432
else:
35-
logger.info("Successfully processed all %d records", record_count)
33+
logger.info("id_sync successfully processed all %d records", record_count)
3634
return {"status": "success", "message": f"Successfully processed {record_count} records",
3735
"file_keys": file_keys}
3836
else:
3937
logger.info("No records found in event")
4038
return {"status": "success", "message": "No records found in event"}
4139

4240
except Exception:
43-
logger.exception("Error processing S3 event")
44-
return {"status": "error", "message": "Error processing S3 event"}
41+
msg = "Error processing id_sync event"
42+
logger.exception(msg)
43+
return {"status": "error", "message": msg}

lambdas/id_sync/src/pds_details.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
'''
2+
record Processor
3+
'''
4+
from common.clients import logger, secrets_manager_client
5+
from common.cache import Cache
6+
from clients import pds_env
7+
from common.pds_service import PdsService
8+
from common.authentication import AppRestrictedAuth, Service
9+
10+
11+
def get_pds_patient_details(nhs_number: str) -> dict:
12+
try:
13+
logger.info(f"Get PDS patient details for {nhs_number}")
14+
15+
cache = Cache("tmp")
16+
authenticator = AppRestrictedAuth(
17+
service=Service.PDS,
18+
secret_manager_client=secrets_manager_client,
19+
environment=pds_env,
20+
cache=cache,
21+
)
22+
pds_service = PdsService(authenticator, pds_env)
23+
24+
patient = pds_service.get_patient_details(nhs_number)
25+
26+
if patient:
27+
pds_nhs_number = patient["identifier"][0]["value"]
28+
return pds_nhs_number
29+
else:
30+
logger.info(f"No patient details found for ID: {nhs_number}")
31+
return None
32+
except Exception:
33+
logger.exception(f"Error getting PDS patient details for {nhs_number}")
34+
return None

lambdas/id_sync/src/record_processor.py

Lines changed: 57 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,64 @@
33
'''
44
from common.aws_lambda_sqs_event_record import AwsLambdaSqsEventRecord
55
from common.clients import logger
6+
from pds_details import get_pds_patient_details
7+
import json
8+
from typing import Optional
69

710

8-
def process_record(event_record, _):
11+
def process_record(event_record: AwsLambdaSqsEventRecord):
912
record = AwsLambdaSqsEventRecord(event_record) if isinstance(event_record, dict) else event_record
1013
logger.info("Processing record: %s", record)
11-
return f"hello world {record}"
14+
15+
id = get_id(event_record.body)
16+
17+
if id:
18+
# TODO This code is a placeholder for checking if records exist in the database - defaulting to True for now
19+
exists = check_records_exist(id)
20+
21+
if exists:
22+
# get patient details from PDS
23+
patient_details = get_pds_patient_details(id)
24+
if not patient_details:
25+
return {"status": "error", "message": f"No records returned for ID: {id}"}
26+
27+
patient_details_id = patient_details.get("id")
28+
29+
# if patient NHS != id, update patient index of vax events to new number
30+
if patient_details_id != id:
31+
return update_patient_index(id, patient_details_id)
32+
else:
33+
return {"status": "success", "message": "No update required"}
34+
else:
35+
return {"status": "error", "message": f"No records found for ID: {id}"}
36+
else:
37+
return {"status": "error", "message": "No ID found in event record"}
38+
39+
40+
def check_records_exist(id: str) -> bool:
41+
# TODO: Implement logic to check if records exist in the database
42+
logger.info(f"TODO Check if records exist for {id}")
43+
return True
44+
45+
46+
def update_patient_index(old_id: str, new_id: str):
47+
# TODO: Implement logic to update patient index in Redis or other data store
48+
logger.info(f"TODO Update patient index from {old_id} to {new_id}")
49+
return {"status": "success", "message": f"Updated patient idx from {old_id} to {new_id}", "TODO": "Implement logic"}
50+
51+
52+
def get_id(event_body) -> Optional[str]:
53+
"""Extract subject identifier from FHIR Bundle notification event"""
54+
try:
55+
# Parse JSON if it's a string
56+
if isinstance(event_body, str):
57+
data = json.loads(event_body)
58+
else:
59+
data = event_body
60+
# Navigate through the nested structure
61+
subject = data.get("subject")
62+
return subject
63+
64+
except (json.JSONDecodeError, KeyError, AttributeError) as e:
65+
logger.error("Error extracting subject identifier: %s", e)
66+
return None

0 commit comments

Comments
 (0)