Skip to content

Commit 9bd9f71

Browse files
committed
log rows returned. Tests Pass
1 parent f2d17ed commit 9bd9f71

File tree

8 files changed

+352
-472
lines changed

8 files changed

+352
-472
lines changed

.github/workflows/sonarcloud.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ jobs:
1313
runs-on: ubuntu-latest
1414

1515
steps:
16-
- uses: actions/checkout@v5
16+
- uses: actions/checkout@v4
1717
with:
1818
fetch-depth: 0
1919

@@ -60,7 +60,7 @@ jobs:
6060
continue-on-error: true
6161
run: |
6262
poetry install
63-
poetry run coverage run -m unittest discover -p "*batch*.py" || echo "recordforwarder tests failed" >> ../failed_tests.txt
63+
poetry run coverage run -m unittest discover -s "./tests" -p "*batch*.py" || echo "recordforwarder tests failed" >> ../failed_tests.txt
6464
poetry run coverage xml -o ../recordforwarder-coverage.xml
6565
6666
- name: Run unittest with coverage-ack-lambda

recordprocessor/poetry.lock

Lines changed: 130 additions & 251 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

recordprocessor/pyproject.toml

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,34 @@
11
[tool.poetry]
2-
name = "processor"
3-
version = "0.1.0"
2+
name = "filenameprocessor"
3+
version = "0.1.0"
44
description = ""
5-
authors = ["Your Name <[email protected]>"]
6-
readme = "README.md"
7-
packages = [{include = "src"}]
5+
readme = "README.md"
6+
authors = ["Your Name <[email protected]>"]
7+
packages = [{ include = "src" }]
88

99
[tool.poetry.dependencies]
10-
python = "~3.11"
11-
"fhir.resources" = "~7.0.2"
12-
boto3 = "~1.38.42"
13-
boto3-stubs-lite = {extras = ["dynamodb"], version = "~1.38.42"}
14-
aws-lambda-typing = "~2.20.0"
15-
moto = "^4"
16-
requests = "~2.32.4"
17-
responses = "~0.25.7"
18-
pydantic = "~1.10.13"
19-
pyjwt = "~2.10.1"
20-
cryptography = "~42.0.4"
21-
cffi = "~1.17.1"
22-
jsonpath-ng = "^1.6.0"
23-
simplejson = "^3.20.1"
24-
structlog = "^24.1.0"
25-
pandas = "^2.3.0"
26-
freezegun = "^1.5.2"
27-
coverage = "^7.9.1"
28-
redis = "^6.2.0"
29-
numpy = "~2.2.6"
10+
python = "~3.11"
11+
"fhir.resources" = "~7.0.2"
12+
boto3 = "~1.38.42"
13+
boto3-stubs-lite = { extras = ["dynamodb"], version = "~1.38.42" }
14+
aws-lambda-typing = "~2.20.0"
15+
requests = "~2.32.4"
16+
responses = "~0.25.7"
17+
pydantic = "~1.10.13"
18+
pyjwt = "~2.10.1"
19+
cryptography = "~42.0.4"
20+
cffi = "~1.17.1"
21+
jsonpath-ng = "^1.6.0"
22+
simplejson = "^3.20.1"
23+
structlog = "^24.1.0"
24+
redis = "^5.1.1"
25+
coverage = "^7.9.1"
26+
freezegun = "^1.5.2"
27+
fakeredis = "^2.30.1"
3028

31-
[build-system]
32-
requires = ["poetry-core ~= 1.5.0"]
29+
[tool.poetry.group.dev.dependencies]
30+
moto = {extras = ["s3"], version = "^5.1.12"}
3331

34-
build-backend = "poetry.core.masonry.api"
32+
[build-system]
33+
requires = ["poetry-core >= 1.5.0"]
34+
build-backend = "poetry.core.masonry.api"

recordprocessor/src/batch_processor.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,12 @@
1313
from typing import Optional
1414

1515

16-
def process_csv_to_fhir(incoming_message_body: dict) -> None:
16+
def process_csv_to_fhir(incoming_message_body: dict) -> int:
1717
"""
1818
For each row of the csv, attempts to transform into FHIR format, sends a message to kinesis,
1919
and documents the outcome for each row in the ack file.
20+
Returns the number of rows processed. While this is not used by the handler, the number of rows
21+
processed must be correct and therefore is returned for logging and test purposes.
2022
"""
2123
encoder = "utf-8" # default encoding
2224
try:
@@ -50,10 +52,10 @@ def process_csv_to_fhir(incoming_message_body: dict) -> None:
5052
logger.info(f"process with encoder {encoder} from row {row_count+1}")
5153
row_count, err = process_rows(file_id, vaccine, supplier, file_key, allowed_operations,
5254
created_at_formatted_string, csv_reader, target_disease)
55+
5356
if err:
54-
logger.warning(f"Error processing: {err}.")
55-
# check if it's a decode error
56-
if err.reason == "invalid continuation byte":
57+
logger.warning(f"Processing Error: {err}.")
58+
if isinstance(err, InvalidEncoding):
5759
new_encoder = "cp1252"
5860
logger.info(f"Encode error at row {row_count} with {encoder}. Switch to {new_encoder}")
5961
encoder = new_encoder
@@ -63,10 +65,9 @@ def process_csv_to_fhir(incoming_message_body: dict) -> None:
6365
row_count, err = process_rows(file_id, vaccine, supplier, file_key, allowed_operations,
6466
created_at_formatted_string, csv_reader, target_disease, row_count)
6567
else:
66-
logger.error(f"Non-decode error: {err}. Cannot retry. Call someone.")
68+
logger.error(f"Row Processing error: {err}")
6769
raise err
6870

69-
logger.info("Total rows processed: %s", row_count)
7071
return row_count
7172

7273

@@ -108,7 +109,7 @@ def process_rows(file_id, vaccine, supplier, file_key, allowed_operations, creat
108109
# if error reason is 'invalid continuation byte', then it's a decode error
109110
logger.error("Error processing row %s: %s", row_count, error)
110111
if hasattr(error, 'reason') and error.reason == "invalid continuation byte":
111-
return total_rows_processed_count, error
112+
return total_rows_processed_count, InvalidEncoding("Invalid continuation byte")
112113
else:
113114
raise error
114115
return total_rows_processed_count, None
@@ -118,11 +119,13 @@ def main(event: str) -> None:
118119
"""Process each row of the file"""
119120
logger.info("task started")
120121
start = time.time()
122+
n_rows_processed = 0
121123
try:
122-
process_csv_to_fhir(incoming_message_body=json.loads(event))
124+
n_rows_processed = process_csv_to_fhir(incoming_message_body=json.loads(event))
123125
except Exception as error: # pylint: disable=broad-exception-caught
124126
logger.error("Error processing message: %s", error)
125127
end = time.time()
128+
logger.info("Total rows processed: %s", n_rows_processed)
126129
logger.info("Total time for completion: %ss", round(end - start, 5))
127130

128131

recordprocessor/tests/test_batch_processor.py

Lines changed: 0 additions & 183 deletions
This file was deleted.

0 commit comments

Comments
 (0)