Skip to content

Commit d78d88f

Browse files
committed
Added shared. and test_shared. to the import.
1 parent 240fb8e commit d78d88f

File tree

57 files changed

+155
-127
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+155
-127
lines changed

functions-python/batch_datasets/src/main.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,9 @@
2525
from google.cloud.pubsub_v1.futures import Future
2626
from sqlalchemy import or_
2727
from sqlalchemy.orm import Session
28-
from database_gen.sqlacodegen_models import Gtfsfeed, Gtfsdataset
29-
from dataset_service.main import BatchExecutionService, BatchExecution
30-
from helpers.database import Database
28+
from shared.database_gen.sqlacodegen_models import Gtfsfeed, Gtfsdataset
29+
from shared.dataset_service.main import BatchExecutionService, BatchExecution
30+
from shared.helpers.database import Database
3131

3232
pubsub_topic_name = os.getenv("PUBSUB_TOPIC_NAME")
3333
project_id = os.getenv("PROJECT_ID")

functions-python/batch_datasets/tests/conftest.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,12 @@
1717
from faker import Faker
1818
from faker.generator import random
1919
from datetime import datetime
20-
from database_gen.sqlacodegen_models import Gtfsfeed, Gtfsrealtimefeed, Gtfsdataset
21-
from test_utils.database_utils import clean_testing_db, get_testing_session
20+
from shared.database_gen.sqlacodegen_models import (
21+
Gtfsfeed,
22+
Gtfsrealtimefeed,
23+
Gtfsdataset,
24+
)
25+
from test_shared.test_utils.database_utils import clean_testing_db, get_testing_session
2226

2327

2428
def populate_database():

functions-python/batch_datasets/tests/test_batch_datasets_main.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
import pytest
2020
from unittest.mock import Mock, patch, MagicMock
2121
from main import get_non_deprecated_feeds, batch_datasets
22-
from test_utils.database_utils import get_testing_session, default_db_url
22+
from test_shared.test_utils.database_utils import get_testing_session, default_db_url
2323

2424

2525
def test_get_non_deprecated_feeds():
@@ -46,10 +46,12 @@ def test_batch_datasets(mock_client, mock_publish):
4646
with get_testing_session() as session:
4747
feeds = get_non_deprecated_feeds(session)
4848
with patch(
49-
"dataset_service.main.BatchExecutionService.__init__", return_value=None
49+
"shared.dataset_service.main.BatchExecutionService.__init__",
50+
return_value=None,
5051
):
5152
with patch(
52-
"dataset_service.main.BatchExecutionService.save", return_value=None
53+
"shared.dataset_service.main.BatchExecutionService.save",
54+
return_value=None,
5355
):
5456
batch_datasets(Mock())
5557
assert mock_publish.call_count == 5

functions-python/batch_process_dataset/src/main.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,13 @@
2929
from google.cloud import storage
3030
from sqlalchemy import func
3131

32-
from database_gen.sqlacodegen_models import Gtfsdataset, t_feedsearch
33-
from dataset_service.main import DatasetTraceService, DatasetTrace, Status
34-
from helpers.database import Database, refresh_materialized_view, with_db_session
32+
from shared.database_gen.sqlacodegen_models import Gtfsdataset, t_feedsearch
33+
from shared.dataset_service.main import DatasetTraceService, DatasetTrace, Status
34+
from shared.helpers.database import Database, refresh_materialized_view, with_db_session
3535
import logging
3636

37-
from helpers.logger import Logger
38-
from helpers.utils import download_and_get_hash
37+
from shared.helpers.logger import Logger
38+
from shared.helpers.utils import download_and_get_hash
3939

4040
if TYPE_CHECKING:
4141
from sqlalchemy.orm import Session

functions-python/batch_process_dataset/tests/conftest.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,12 @@
1717
from faker import Faker
1818
from faker.generator import random
1919
from datetime import datetime
20-
from database_gen.sqlacodegen_models import Gtfsfeed, Gtfsrealtimefeed, Gtfsdataset
21-
from test_utils.database_utils import clean_testing_db, get_testing_session
20+
from shared.database_gen.sqlacodegen_models import (
21+
Gtfsfeed,
22+
Gtfsrealtimefeed,
23+
Gtfsdataset,
24+
)
25+
from test_shared.test_utils.database_utils import clean_testing_db, get_testing_session
2226

2327

2428
def populate_database():

functions-python/batch_process_dataset/tests/test_batch_process_dataset_main.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@
1111
DatasetFile,
1212
process_dataset,
1313
)
14-
from database_gen.sqlacodegen_models import Gtfsfeed
15-
from test_utils.database_utils import get_testing_session, default_db_url
14+
from shared.database_gen.sqlacodegen_models import Gtfsfeed
15+
from test_shared.test_utils.database_utils import get_testing_session, default_db_url
1616
from cloudevents.http import CloudEvent
1717

1818
public_url = (

functions-python/big_query_ingestion/src/common/bq_data_transfer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from google.cloud import bigquery, storage
66
from google.cloud.bigquery.job import LoadJobConfig, SourceFormat
77

8-
from helpers.bq_schema.schema import json_schema_to_bigquery, load_json_schema
8+
from shared.helpers.bq_schema.schema import json_schema_to_bigquery, load_json_schema
99

1010
# Environment variables
1111
project_id = os.getenv("PROJECT_ID")

functions-python/big_query_ingestion/src/main.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import functions_framework
44

5-
from helpers.logger import Logger
5+
from shared.helpers.logger import Logger
66
from gbfs.gbfs_big_query_ingest import BigQueryDataTransferGBFS
77
from gtfs.gtfs_big_query_ingest import BigQueryDataTransferGTFS
88

functions-python/big_query_ingestion/tests/test_main.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99

1010
class TestMain(unittest.TestCase):
1111
@patch("main.BigQueryDataTransferGTFS")
12-
@patch("helpers.logger.Logger.init_logger")
12+
@patch("shared.helpers.logger.Logger.init_logger")
1313
@patch("main.logging.info")
1414
def test_ingest_data_to_big_query_gtfs(
1515
self, mock_logging_info, mock_init_logger, mock_big_query_transfer_gtfs
@@ -28,7 +28,7 @@ def test_ingest_data_to_big_query_gtfs(
2828
self.assertEqual(response, ("Data successfully loaded to BigQuery", 200))
2929

3030
@patch("main.BigQueryDataTransferGBFS")
31-
@patch("helpers.logger.Logger.init_logger")
31+
@patch("shared.helpers.logger.Logger.init_logger")
3232
@patch("main.logging.info")
3333
def test_ingest_data_to_big_query_gbfs(
3434
self, mock_logging_info, mock_init_logger, mock_biq_query_transfer_gbfs

functions-python/extract_location/src/bounding_box/bounding_box_extractor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import numpy
22
from geoalchemy2 import WKTElement
33

4-
from database_gen.sqlacodegen_models import Gtfsdataset
4+
from shared.database_gen.sqlacodegen_models import Gtfsdataset
55

66

77
def create_polygon_wkt_element(bounds: numpy.ndarray) -> WKTElement:

0 commit comments

Comments
 (0)