Skip to content

Commit b41101e

Browse files
committed
fix tests imports
1 parent ed05df1 commit b41101e

File tree

8 files changed

+31
-21
lines changed

8 files changed

+31
-21
lines changed

functions-python/batch_datasets/src/main.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@
2929
from sqlalchemy.orm import Session
3030

3131
from shared.database_gen.sqlacodegen_models import Gtfsfeed, Gtfsdataset
32-
from shared.dataset_service.main import BatchExecutionService, BatchExecution
32+
from shared.dataset_service.dataset_service_commons import BatchExecution
33+
from shared.dataset_service.main import BatchExecutionService
3334
from shared.database.database import with_db_session
3435
from shared.helpers.logger import init_logger
3536

functions-python/dataset_service/main.py

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,19 +13,30 @@
1313
# See the License for the specific language governing permissions and
1414
# limitations under the License.
1515
#
16+
import importlib
1617
import logging
1718
import uuid
1819
from dataclasses import asdict
1920
from typing import Final
2021
from google.cloud import datastore
2122
from google.cloud.datastore import Client
2223

23-
from dataset_service_commons import (
24-
DatasetTrace,
25-
Status,
26-
PipelineStage,
27-
BatchExecution,
28-
)
24+
# This allows the module to be run as a script or imported as a module
25+
if __package__ is None or __package__ == "":
26+
import os
27+
import sys
28+
29+
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
30+
import dataset_service_commons
31+
else:
32+
dataset_service_commons = importlib.import_module(
33+
".dataset_service_commons", package=__package__
34+
)
35+
36+
Status = dataset_service_commons.Status
37+
PipelineStage = dataset_service_commons.PipelineStage
38+
BatchExecution = dataset_service_commons.BatchExecution
39+
DatasetTrace = dataset_service_commons.DatasetTrace
2940

3041
# This files contains the dataset trace and batch execution models and services.
3142
# The dataset trace is used to store the trace of a dataset and the batch execution

functions-python/dataset_service/tests/__init__.py

Whitespace-only changes.

functions-python/dataset_service/tests/test_dataset_service.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,8 @@
22
from datetime import datetime
33
from unittest.mock import patch, MagicMock
44

5-
from main import (
6-
DatasetTrace,
7-
DatasetTraceService,
8-
Status,
9-
BatchExecutionService,
10-
BatchExecution,
11-
)
5+
from dataset_service_commons import DatasetTrace, Status, BatchExecution
6+
from main import DatasetTraceService, BatchExecutionService
127

138

149
class TestDatasetService(unittest.TestCase):

functions-python/helpers/utils.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,6 @@
2828
from urllib3.util.ssl_ import create_urllib3_context
2929
from pathlib import Path
3030

31-
from shared.dataset_service.dataset_service_commons import DatasetTrace
32-
from shared.helpers.logger import get_logger
33-
3431

3532
def create_bucket(bucket_name):
3633
"""
@@ -283,6 +280,8 @@ def record_execution_trace(
283280
Record the trace in the datastore
284281
"""
285282
from shared.dataset_service.main import DatasetTraceService
283+
from shared.dataset_service.dataset_service_commons import DatasetTrace
284+
from shared.helpers.logger import get_logger
286285

287286
trace_service = DatasetTraceService()
288287

functions-python/reverse_geolocation/src/location_group_utils.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -221,13 +221,18 @@ def extract_location_aggregate_geopolygons(
221221
) -> Optional[GeopolygonAggregate]:
222222
admin_levels = {g.admin_level for g in geopolygons}
223223
# If duplicates per admin_level exist, resolve instead of returning None
224-
if admin_levels != len(geopolygons):
224+
if len(admin_levels) != len(geopolygons):
225225
logger.warning(
226226
"Duplicate admin levels for point: %s -> %s",
227227
stop_point,
228228
geopolygons_as_string(geopolygons),
229229
)
230230
geopolygons = dedupe_by_admin_level(geopolygons, logger)
231+
logger.warning(
232+
"Deduplicated admin levels for point: %s -> %s",
233+
stop_point,
234+
geopolygons_as_string(geopolygons),
235+
)
231236

232237
valid_iso_3166_1 = any(g.iso_3166_1_code for g in geopolygons)
233238
valid_iso_3166_2 = any(g.iso_3166_2_code for g in geopolygons)

functions-python/reverse_geolocation/src/scripts/reverse_geolocation_process_verifier.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@
8080
},
8181
]
8282
run_with_feed_index = (
83-
5 # Set to an integer index to run with a specific feed from the list above
83+
6 # Set to an integer index to run with a specific feed from the list above
8484
)
8585

8686

functions-python/reverse_geolocation/src/strategy_extraction_per_polygon.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,6 @@ def extract_location_aggregates_per_polygon(
106106
logger.warning("No geopolygons found for point: %s", stop_point)
107107
continue
108108

109-
rep_geom = highest.geometry
110109
country_code = get_country_code_from_polygons(geopolygons)
111110
if highest.admin_level >= get_country_locality_admin_level(country_code):
112111
# If admin_level >= locality_admin_level, we can filter points inside this polygon
@@ -144,7 +143,7 @@ def extract_location_aggregates_per_polygon(
144143

145144
# Process ONLY ONE representative point for this stop "cluster"
146145
location_aggregate = extract_location_aggregate_geopolygons(
147-
stop_point=rep_geom,
146+
stop_point=stop_point,
148147
geopolygons=geopolygons,
149148
logger=logger,
150149
db_session=db_session,

0 commit comments

Comments
 (0)