Skip to content

Commit 2658e25

Browse files
committed
Merge branch 'main' into 1034-change-how-often-were-pulling-the-data-to-daily
2 parents 5161242 + 98e2765 commit 2658e25

File tree

33 files changed

+865
-235
lines changed

33 files changed

+865
-235
lines changed

.github/workflows/integration-tests-pr.yml

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -49,12 +49,6 @@ jobs:
4949
id: getpath
5050
run: echo "FILE_PATH=$(realpath sources.csv)" >> $GITHUB_ENV
5151

52-
- name: Install dependencies
53-
run: |
54-
python -m pip install --upgrade pip
55-
pip install -r integration-tests/requirements.txt
56-
pip install -r api/requirements.txt
57-
5852
- name: Set up JDK ${{ env.java_version }}
5953
uses: actions/setup-java@v4
6054
with:
@@ -110,11 +104,6 @@ jobs:
110104
./scripts/tunnel-create.sh -project_id ${{ vars.QA_MOBILITY_FEEDS_PROJECT_ID }} -zone ${{ vars.MOBILITY_FEEDS_REGION }}-a -instance ${{ env.GCP_FEED_BASTION_NAME }}-${{ vars.QA_MOBILITY_FEEDS_ENVIRONMENT }} -target_account ${{ env.GCP_FEED_SSH_USER }} -db_instance ${{ secrets.DB_INSTANCE_NAME }} -port 5454
111105
sleep 10 # Wait for the tunnel to establish
112106
113-
- name: Test Database Connection Through Tunnel
114-
run: |
115-
sudo apt-get update && sudo apt-get install -y postgresql-client
116-
PGPASSWORD=${{ secrets.QA_POSTGRE_USER_PASSWORD }} psql -h localhost -p 5454 -U ${{ secrets.QA_POSTGRE_USER_NAME }} -d ${{ vars.QA_POSTGRE_SQL_DB_NAME }} -c "SELECT version();"
117-
118107
- name: Update .env.local
119108
run: |
120109
echo "FEEDS_DATABASE_URL=postgresql://${{ secrets.QA_POSTGRE_USER_NAME }}:${{ secrets.QA_POSTGRE_USER_PASSWORD }}@localhost:5454/${{ vars.QA_POSTGRE_SQL_DB_NAME }}" >> $GITHUB_ENV

.github/workflows/integration-tests.yml

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,6 @@ jobs:
4343
id: getpath
4444
run: echo "FILE_PATH=$(realpath sources.csv)" >> $GITHUB_ENV
4545

46-
- name: Install dependencies
47-
working-directory: integration-tests
48-
run: |
49-
python -m pip install --upgrade pip
50-
pip install -r requirements.txt
51-
5246
- name: Health Check
5347
# When triggered by a repo dispatch the API code is not deployed so there could be discrepancies
5448
# between the code running the tests and the code running the API. In that case don't do the

api/src/feeds/impl/models/gtfs_dataset_impl.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,4 +51,5 @@ def from_orm(cls, gtfs_dataset: Gtfsdataset | None) -> GtfsDataset | None:
5151
validation_report=cls.from_orm_latest_validation_report(gtfs_dataset.validation_reports),
5252
service_date_range_start=gtfs_dataset.service_date_range_start,
5353
service_date_range_end=gtfs_dataset.service_date_range_end,
54+
agency_timezone=gtfs_dataset.agency_timezone,
5455
)

api/src/feeds/impl/models/latest_dataset_impl.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ def from_orm(cls, dataset: Gtfsdataset | None) -> LatestDataset | None:
5353
downloaded_at=dataset.downloaded_at,
5454
service_date_range_start=dataset.service_date_range_start,
5555
service_date_range_end=dataset.service_date_range_end,
56+
agency_timezone=dataset.agency_timezone,
5657
hash=dataset.hash,
5758
validation_report=validation_report,
5859
)

api/src/feeds/impl/models/search_feed_item_result_impl.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ def from_orm_gtfs(cls, feed_search_row):
4646
hash=feed_search_row.latest_dataset_hash,
4747
service_date_range_start=feed_search_row.latest_dataset_service_date_range_start,
4848
service_date_range_end=feed_search_row.latest_dataset_service_date_range_end,
49+
agency_timezone=feed_search_row.latest_dataset_agency_timezone,
4950
)
5051
if feed_search_row.latest_dataset_id
5152
else None,

api/tests/test_data/extra_test_data.json

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,9 @@
88
"hash": "hash",
99
"downloaded_at": "2024-01-31T00:00:00+00:00",
1010
"feed_stable_id": "mdb-1",
11-
"service_date_range_start": "2024-01-01",
12-
"service_date_range_end":"2025-01-01"
11+
"service_date_range_start": "2024-09-29T00:00:00+00:00",
12+
"service_date_range_end":"2025-09-29T00:00:00+00:00",
13+
"agency_timezone": "Canada/Atlantic"
1314
},
1415
{
1516
"id": "dataset-2",
@@ -19,8 +20,9 @@
1920
"hash": "hash",
2021
"downloaded_at": "2024-02-01T00:00:00+00:00",
2122
"feed_stable_id": "mdb-1",
22-
"service_date_range_start": "2024-01-01",
23-
"service_date_range_end":"2025-01-01"
23+
"service_date_range_start": "2024-09-29T00:00:00+00:00",
24+
"service_date_range_end":"2025-09-29T00:00:00+00:00",
25+
"agency_timezone": "Canada/Atlantic"
2426
},
2527
{
2628
"id": "dataset-3",
@@ -30,8 +32,9 @@
3032
"hash": "hash",
3133
"downloaded_at": "2024-02-02T00:00:00+00:00",
3234
"feed_stable_id": "mdb-10",
33-
"service_date_range_start": "2024-01-01",
34-
"service_date_range_end":"2025-01-01"
35+
"service_date_range_start": "2024-09-29T00:00:00+00:00",
36+
"service_date_range_end":"2025-09-29T00:00:00+00:00",
37+
"agency_timezone": "Canada/Atlantic"
3538
},
3639
{
3740
"id": "dataset-4",
@@ -41,8 +44,9 @@
4144
"hash": "hash",
4245
"downloaded_at": "2024-02-03T00:00:00+00:00",
4346
"feed_stable_id": "mdb-10",
44-
"service_date_range_start": "2024-01-01",
45-
"service_date_range_end":"2025-01-01"
47+
"service_date_range_start": "2024-09-29T00:00:00+00:00",
48+
"service_date_range_end":"2025-09-29T00:00:00+00:00",
49+
"agency_timezone": "Canada/Atlantic"
4650
}
4751
],
4852
"validation_reports": [

api/tests/test_data/test_datasets.json

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,9 @@
99
"hash": "hash-1",
1010
"latest": true,
1111
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
12-
"service_date_range_start": "2024-01-01",
13-
"service_date_range_end":"2025-01-01"
12+
"service_date_range_start": "2024-09-29T00:00:00+00:00",
13+
"service_date_range_end":"2025-09-29T00:00:00+00:00",
14+
"agency_timezone": "Canada/Atlantic"
1415
},
1516
{
1617
"id": "dataset-2",
@@ -20,8 +21,9 @@
2021
"hash": "hash-2",
2122
"latest": false,
2223
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
23-
"service_date_range_start": "2024-01-01",
24-
"service_date_range_end":"2025-01-01"
24+
"service_date_range_start": "2024-09-29T00:00:00+00:00",
25+
"service_date_range_end":"2025-09-29T00:00:00+00:00",
26+
"agency_timezone": "Canada/Atlantic"
2527
},
2628
{
2729
"id": "dataset-3",
@@ -31,8 +33,9 @@
3133
"hash": "hash-3",
3234
"latest": true,
3335
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
34-
"service_date_range_start": "2024-01-01",
35-
"service_date_range_end":"2025-01-01"
36+
"service_date_range_start": "2024-09-29T00:00:00+00:00",
37+
"service_date_range_end":"2025-09-29T00:00:00+00:00",
38+
"agency_timezone": "Canada/Atlantic"
3639
},
3740
{
3841
"id": "dataset-4",
@@ -42,8 +45,9 @@
4245
"hash": "hash-4",
4346
"latest": false,
4447
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
45-
"service_date_range_start": "2024-01-01",
46-
"service_date_range_end":"2025-01-01"
48+
"service_date_range_start": "2024-09-29T00:00:00+00:00",
49+
"service_date_range_end":"2025-09-29T00:00:00+00:00",
50+
"agency_timezone": "Canada/Atlantic"
4751
},
4852
{
4953
"id": "dataset-5",
@@ -53,8 +57,9 @@
5357
"hash": "hash-5",
5458
"latest": true,
5559
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
56-
"service_date_range_start": "2024-01-01",
57-
"service_date_range_end":"2025-01-01"
60+
"service_date_range_start": "2024-09-29T00:00:00+00:00",
61+
"service_date_range_end":"2025-09-29T00:00:00+00:00",
62+
"agency_timezone": "Canada/Atlantic"
5863
}
5964
],
6065
"validation_reports": [

api/tests/unittest/models/test_basic_feed_impl.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import copy
22
import unittest
3-
from datetime import datetime, date
3+
from datetime import datetime
4+
from zoneinfo import ZoneInfo
45

56
from shared.database_gen.sqlacodegen_models import (
67
Feed,
@@ -64,8 +65,9 @@
6465
downloaded_at="downloaded_at",
6566
hash="hash",
6667
bounding_box="bounding_box",
67-
service_date_range_start=date(2024, 1, 1),
68-
service_date_range_end=date(2025, 1, 1),
68+
service_date_range_start=datetime(2024, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic")),
69+
service_date_range_end=datetime(2025, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic")),
70+
agency_timezone="Canada/Atlantic",
6971
validation_reports=[
7072
Validationreport(
7173
id="id",

api/tests/unittest/models/test_gtfs_dataset_impl.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import unittest
2-
from datetime import datetime, date
2+
from datetime import datetime
3+
from zoneinfo import ZoneInfo
34

45
from geoalchemy2 import WKTElement
56

@@ -42,8 +43,9 @@ def test_from_orm(self):
4243
Validationreport(validator_version="0.2.0"),
4344
Validationreport(validator_version="1.1.1"),
4445
],
45-
service_date_range_start=date(2024, 1, 1),
46-
service_date_range_end=date(2025, 1, 1),
46+
service_date_range_start=datetime(2024, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic")),
47+
service_date_range_end=datetime(2025, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic")),
48+
agency_timezone="Canada/Atlantic",
4749
)
4850
result = GtfsDatasetImpl.from_orm(orm)
4951
assert result.id == "stable_id"
@@ -58,7 +60,8 @@ def test_from_orm(self):
5860
assert result.bounding_box.minimum_longitude == 3.0
5961
assert result.bounding_box.maximum_longitude == 4.0
6062
assert result.validation_report.validator_version == "1.1.1"
61-
assert result.service_date_range_start == date(2024, 1, 1)
62-
assert result.service_date_range_end == date(2025, 1, 1)
63+
assert result.service_date_range_start == datetime(2024, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic"))
64+
assert result.service_date_range_end == datetime(2025, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic"))
65+
assert result.agency_timezone == "Canada/Atlantic"
6366

6467
assert GtfsDatasetImpl.from_orm(None) is None

api/tests/unittest/models/test_gtfs_feed_impl.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import copy
22
import unittest
3-
from datetime import datetime, date
3+
from datetime import datetime
4+
from zoneinfo import ZoneInfo
45

56
from geoalchemy2 import WKTElement
67

@@ -87,8 +88,9 @@ def create_test_notice(notice_code: str, total_notices: int, severity: str):
8788
note="note",
8889
downloaded_at=datetime(year=2022, month=12, day=31, hour=13, minute=45, second=56),
8990
hash="hash",
90-
service_date_range_start=date(2024, 1, 1),
91-
service_date_range_end=date(2025, 1, 1),
91+
service_date_range_start=datetime(2024, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic")),
92+
service_date_range_end=datetime(2025, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic")),
93+
agency_timezone="Canada/Atlantic",
9294
bounding_box=WKTElement(POLYGON, srid=4326),
9395
latest=True,
9496
validation_reports=[
@@ -171,8 +173,9 @@ def create_test_notice(notice_code: str, total_notices: int, severity: str):
171173
unique_warning_count=4,
172174
unique_info_count=2,
173175
),
174-
service_date_range_start="2024-01-01",
175-
service_date_range_end="2025-01-01",
176+
service_date_range_start=datetime(2024, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic")),
177+
service_date_range_end=datetime(2025, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Canada/Atlantic")),
178+
agency_timezone="Canada/Atlantic",
176179
),
177180
)
178181

0 commit comments

Comments
 (0)