Skip to content

Commit 62ec1c9

Browse files
committed
lint
1 parent 8a3a624 commit 62ec1c9

File tree

2 files changed

+17
-11
lines changed

2 files changed

+17
-11
lines changed

request-processor/src/application/core/pipeline.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,6 @@
77

88
from digital_land.pipeline import Pipeline, Lookups
99
from digital_land.commands import get_resource_unidentified_lookups
10-
from application.core.utils import append_endpoint, append_source
11-
from datetime import datetime
1210
from pathlib import Path
1311

1412
logger = get_logger(__name__)
@@ -42,7 +40,7 @@ def fetch_response_data(
4240
try:
4341
for file_name in files_in_resource:
4442
file_path = os.path.join(input_path, file_name)
45-
# retrieve unnassigned entities and assign
43+
# retrieve unnassigned entities and assign, TODO: Is this necessary here?
4644
assign_entries(
4745
resource_path=file_path,
4846
dataset=dataset,
@@ -197,7 +195,7 @@ def fetch_add_data_response(
197195
output_path,
198196
specification_dir,
199197
cache_dir,
200-
url
198+
url,
201199
):
202200
try:
203201
specification = Specification(specification_dir)
@@ -262,7 +260,9 @@ def fetch_add_data_response(
262260
new_entities.extend(new_lookups)
263261

264262
# Reload pipeline to pick up newly saved lookups
265-
pipeline = Pipeline(pipeline_dir, dataset, specification=specification)
263+
pipeline = Pipeline(
264+
pipeline_dir, dataset, specification=specification
265+
)
266266

267267
# Now re-run transform to check and return issue log
268268
issues_log = pipeline.transform(
@@ -401,4 +401,4 @@ def _map_transformed_entities(transformed_csv_path, pipeline_dir): # noqa: C901
401401
}
402402
)
403403

404-
return mapped_entities
404+
return mapped_entities

request-processor/src/application/core/workflow.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -453,10 +453,14 @@ def add_data_workflow(
453453

454454
# Loads csvs for Pipeline and Config
455455
if not fetch_add_data_pipeline_csvs(collection, pipeline_dir):
456-
response_data['message'] = f"Unable to find lookups for collection '{collection}', dataset '{dataset}'"
456+
response_data[
457+
"message"
458+
] = f"Unable to find lookups for collection '{collection}', dataset '{dataset}'"
457459
return response_data
458460
if not fetch_add_data_collection_csvs(collection, collection_dir):
459-
response_data['message'] = f"Unable to find lookups for collection '{collection}', dataset '{dataset}'"
461+
response_data[
462+
"message"
463+
] = f"Unable to find lookups for collection '{collection}', dataset '{dataset}'"
460464
return response_data
461465

462466
# All processes arount transforming the data and generating pipeline summary
@@ -468,7 +472,7 @@ def add_data_workflow(
468472
output_path=output_path,
469473
specification_dir=directories.SPECIFICATION_DIR,
470474
cache_dir=directories.CACHE_DIR,
471-
url=url
475+
url=url,
472476
)
473477

474478
# Create endpoint and source summaries in workflow
@@ -498,8 +502,10 @@ def add_data_workflow(
498502
logger.info(f"add data response is for id {request_id} : {response_data}")
499503

500504
except Exception as e:
501-
logger.warning(f"An error occurred in add_data_workflow: {e} for request id {request_id}")
502-
response_data['message'] = f"An error occurred in add_data_workflow: {e}"
505+
logger.warning(
506+
f"An error occurred in add_data_workflow: {e} for request id {request_id}"
507+
)
508+
response_data["message"] = f"An error occurred in add_data_workflow: {e}"
503509

504510
finally:
505511
clean_up(

0 commit comments

Comments
 (0)