Skip to content

Commit be40bd9

Browse files
committed
merge: main
2 parents 509935c + 68f77fa commit be40bd9

File tree

114 files changed

+2040
-889
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

114 files changed

+2040
-889
lines changed

.github/workflows/api-deployer.yml

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ jobs:
199199
- uses: actions/download-artifact@v4
200200
with:
201201
name: database_gen
202-
path: api/src/database_gen/
202+
path: api/src/shared/database_gen/
203203

204204
# api schema was generated and uploaded in api-build-test job above.
205205
- uses: actions/download-artifact@v4
@@ -219,7 +219,7 @@ jobs:
219219
- name: Build & Publish Docker Image
220220
run: |
221221
# We want to generate the image even if it's the same commit that has been tagged. So use the version
222-
# (coming from the tag) in the docker image tag (If the docket tag does not change it's won't be uploaded)
222+
# (coming from the tag) in the docker image tag (If the docker tag does not change it won't be uploaded)
223223
DOCKER_IMAGE_VERSION=$EXTRACTED_VERSION.$FEED_API_IMAGE_VERSION
224224
scripts/docker-build-push.sh -project_id $PROJECT_ID -repo_name feeds-$ENVIRONMENT -service feed-api -region $REGION -version $DOCKER_IMAGE_VERSION
225225
@@ -243,7 +243,7 @@ jobs:
243243
- uses: actions/download-artifact@v4
244244
with:
245245
name: database_gen
246-
path: api/src/database_gen/
246+
path: api/src/shared/database_gen/
247247

248248
# api schema was generated and uploaded in api-build-test job above.
249249
- uses: actions/download-artifact@v4
@@ -318,18 +318,18 @@ jobs:
318318
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
319319
PLAN_OUTPUT: ${{ steps.plan.outputs.stdout }}
320320

321+
- name: Persist TF plan
322+
uses: actions/upload-artifact@v4
323+
with:
324+
name: terraform-plan.txt
325+
path: infra/terraform-plan.txt
326+
overwrite: true
327+
321328
- name: Terraform Apply
322329
if: ${{ inputs.TF_APPLY }}
323330
run: |
324331
cd infra
325332
terraform apply -auto-approve tf.plan
326333
env:
327334
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
328-
PLAN_OUTPUT: ${{ steps.plan.outputs.stdout }}
329-
330-
- name: Persist TF plan
331-
uses: actions/upload-artifact@v4
332-
with:
333-
name: terraform-plan.txt
334-
path: infra/terraform-plan.txt
335-
overwrite: true
335+
PLAN_OUTPUT: ${{ steps.plan.outputs.stdout }}

.github/workflows/build-test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ jobs:
105105
uses: actions/upload-artifact@v4
106106
with:
107107
name: database_gen
108-
path: api/src/database_gen/
108+
path: api/src/shared/database_gen/
109109
overwrite: true
110110

111111
- name: Upload API generated code

.github/workflows/datasets-batch-deployer.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ jobs:
119119
uses: actions/upload-artifact@v4
120120
with:
121121
name: database_gen
122-
path: api/src/database_gen/
122+
path: api/src/shared/database_gen/
123123

124124
- name: Build python functions
125125
run: |

api/.flake8

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[flake8]
22
max-line-length = 120
3-
exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache,venv,build,src/feeds_gen,src/database_gen,src/shared/database_gen
3+
exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache,venv,build,feeds_gen,database_gen
44
# Ignored because conflict with black
55
extend-ignore = E203

api/src/feeds/impl/datasets_api_impl.py

Lines changed: 4 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,17 @@
11
from typing import List
22
from typing import Tuple
33

4-
from geoalchemy2 import WKTElement
5-
from sqlalchemy import or_
64
from sqlalchemy.orm import Query, Session
75

8-
from database.database import Database, with_db_session
9-
from database_gen.sqlacodegen_models import (
6+
from shared.database.database import Database, with_db_session
7+
from shared.database_gen.sqlacodegen_models import (
108
Gtfsdataset,
119
Feed,
1210
)
1311
from feeds.impl.error_handling import (
14-
invalid_bounding_coordinates,
15-
invalid_bounding_method,
16-
raise_http_validation_error,
1712
raise_http_error,
13+
)
14+
from shared.common.error_handling import (
1815
dataset_not_found,
1916
)
2017
from feeds.impl.models.gtfs_dataset_impl import GtfsDatasetImpl
@@ -39,59 +36,6 @@ def create_dataset_query() -> Query:
3936
]
4037
).join(Feed, Feed.id == Gtfsdataset.feed_id)
4138

42-
@staticmethod
43-
def apply_bounding_filtering(
44-
query: Query,
45-
bounding_latitudes: str,
46-
bounding_longitudes: str,
47-
bounding_filter_method: str,
48-
) -> Query:
49-
"""Create a new query based on the bounding parameters."""
50-
51-
if not bounding_latitudes or not bounding_longitudes or not bounding_filter_method:
52-
return query
53-
54-
if (
55-
len(bounding_latitudes_tokens := bounding_latitudes.split(",")) != 2
56-
or len(bounding_longitudes_tokens := bounding_longitudes.split(",")) != 2
57-
):
58-
raise_http_validation_error(invalid_bounding_coordinates.format(bounding_latitudes, bounding_longitudes))
59-
min_latitude, max_latitude = bounding_latitudes_tokens
60-
min_longitude, max_longitude = bounding_longitudes_tokens
61-
try:
62-
min_latitude = float(min_latitude)
63-
max_latitude = float(max_latitude)
64-
min_longitude = float(min_longitude)
65-
max_longitude = float(max_longitude)
66-
except ValueError:
67-
raise_http_validation_error(invalid_bounding_coordinates.format(bounding_latitudes, bounding_longitudes))
68-
points = [
69-
(min_longitude, min_latitude),
70-
(min_longitude, max_latitude),
71-
(max_longitude, max_latitude),
72-
(max_longitude, min_latitude),
73-
(min_longitude, min_latitude),
74-
]
75-
wkt_polygon = f"POLYGON(({', '.join(f'{lon} {lat}' for lon, lat in points)}))"
76-
bounding_box = WKTElement(
77-
wkt_polygon,
78-
srid=Gtfsdataset.bounding_box.type.srid,
79-
)
80-
81-
if bounding_filter_method == "partially_enclosed":
82-
return query.filter(
83-
or_(
84-
Gtfsdataset.bounding_box.ST_Overlaps(bounding_box),
85-
Gtfsdataset.bounding_box.ST_Contains(bounding_box),
86-
)
87-
)
88-
elif bounding_filter_method == "completely_enclosed":
89-
return query.filter(bounding_box.ST_Covers(Gtfsdataset.bounding_box))
90-
elif bounding_filter_method == "disjoint":
91-
return query.filter(Gtfsdataset.bounding_box.ST_Disjoint(bounding_box))
92-
else:
93-
raise_http_validation_error(invalid_bounding_method.format(bounding_filter_method))
94-
9539
@staticmethod
9640
def get_datasets_gtfs(query: Query, session: Session, limit: int = None, offset: int = None) -> List[GtfsDataset]:
9741
# Results are sorted by stable_id because Database.select(group_by=) requires it so

api/src/feeds/impl/error_handling.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,15 @@
1-
from typing import Final
2-
31
from fastapi import HTTPException
42

5-
invalid_date_message: Final[
6-
str
7-
] = "Invalid date format for '{}'. Expected ISO 8601 format, example: '2021-01-01T00:00:00Z'"
8-
invalid_bounding_coordinates: Final[str] = "Invalid bounding coordinates {} {}"
9-
invalid_bounding_method: Final[str] = "Invalid bounding_filter_method {}"
10-
feed_not_found: Final[str] = "Feed '{}' not found"
11-
gtfs_feed_not_found: Final[str] = "GTFS feed '{}' not found"
12-
gtfs_rt_feed_not_found: Final[str] = "GTFS realtime Feed '{}' not found"
13-
dataset_not_found: Final[str] = "Dataset '{}' not found"
3+
from shared.common.error_handling import InternalHTTPException
4+
5+
6+
def convert_exception(input_exception: InternalHTTPException) -> HTTPException:
7+
"""Convert an InternalHTTPException to an HTTPException.
8+
HTTPException is dependent on fastapi, and we don't necessarily want to deploy it with python functions.
9+
That's why InternalHTTPException (a class that we deploy) is thrown instead of HTTPException.
10+
Since InternalHTTPException is internal, it needs to be converted before being sent up.
11+
"""
12+
return HTTPException(status_code=input_exception.status_code, detail=input_exception.detail)
1413

1514

1615
def raise_http_error(status_code: int, error: str):

api/src/feeds/impl/feeds_api_impl.py

Lines changed: 55 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,9 @@
66
from sqlalchemy.orm import joinedload, Session
77
from sqlalchemy.orm.query import Query
88

9-
from database.database import Database, with_db_session
10-
from database_gen.sqlacodegen_models import (
9+
from shared.common.db_utils import get_gtfs_feeds_query, get_gtfs_rt_feeds_query, get_joinedload_options
10+
from shared.database.database import Database, with_db_session
11+
from shared.database_gen.sqlacodegen_models import (
1112
Feed,
1213
Gtfsdataset,
1314
Gtfsfeed,
@@ -17,18 +18,17 @@
1718
t_location_with_translations_en,
1819
Entitytype,
1920
)
20-
from feeds.filters.feed_filter import FeedFilter
21-
from feeds.filters.gtfs_dataset_filter import GtfsDatasetFilter
22-
from feeds.filters.gtfs_feed_filter import GtfsFeedFilter, LocationFilter
23-
from feeds.filters.gtfs_rt_feed_filter import GtfsRtFeedFilter, EntityTypeFilter
21+
from shared.feed_filters.feed_filter import FeedFilter
22+
from shared.feed_filters.gtfs_dataset_filter import GtfsDatasetFilter
23+
from shared.feed_filters.gtfs_feed_filter import LocationFilter
24+
from shared.feed_filters.gtfs_rt_feed_filter import GtfsRtFeedFilter, EntityTypeFilter
2425
from feeds.impl.datasets_api_impl import DatasetsApiImpl
25-
from feeds.impl.error_handling import (
26-
raise_http_validation_error,
26+
from shared.common.error_handling import (
2727
invalid_date_message,
28-
raise_http_error,
2928
feed_not_found,
3029
gtfs_feed_not_found,
3130
gtfs_rt_feed_not_found,
31+
InternalHTTPException,
3232
)
3333
from feeds.impl.models.basic_feed_impl import BasicFeedImpl
3434
from feeds.impl.models.entity_type_enum import EntityType
@@ -39,6 +39,7 @@
3939
from feeds_gen.models.gtfs_dataset import GtfsDataset
4040
from feeds_gen.models.gtfs_feed import GtfsFeed
4141
from feeds_gen.models.gtfs_rt_feed import GtfsRTFeed
42+
from feeds.impl.error_handling import raise_http_error, raise_http_validation_error, convert_exception
4243
from middleware.request_context import is_user_email_restricted
4344
from utils.date_utils import valid_iso_date
4445
from utils.location_translation import (
@@ -116,7 +117,7 @@ def get_feeds(
116117
)
117118
# Results are sorted by provider
118119
feed_query = feed_query.order_by(Feed.provider, Feed.stable_id)
119-
feed_query = feed_query.options(*BasicFeedImpl.get_joinedload_options())
120+
feed_query = feed_query.options(*get_joinedload_options())
120121
if limit is not None:
121122
feed_query = feed_query.limit(limit)
122123
if offset is not None:
@@ -155,7 +156,7 @@ def _get_gtfs_feed(stable_id: str, db_session: Session) -> Optional[Gtfsfeed]:
155156
joinedload(Gtfsfeed.gtfsdatasets)
156157
.joinedload(Gtfsdataset.validation_reports)
157158
.joinedload(Validationreport.notices),
158-
*BasicFeedImpl.get_joinedload_options(),
159+
*get_joinedload_options(),
159160
)
160161
).all()
161162
if len(results) == 0:
@@ -233,46 +234,29 @@ def get_gtfs_feeds(
233234
is_official: bool,
234235
db_session: Session,
235236
) -> List[GtfsFeed]:
236-
"""Get some (or all) GTFS feeds from the Mobility Database."""
237-
gtfs_feed_filter = GtfsFeedFilter(
238-
stable_id=None,
239-
provider__ilike=provider,
240-
producer_url__ilike=producer_url,
241-
location=LocationFilter(
237+
try:
238+
include_wip = not is_user_email_restricted()
239+
feed_query = get_gtfs_feeds_query(
240+
limit=limit,
241+
offset=offset,
242+
provider=provider,
243+
producer_url=producer_url,
242244
country_code=country_code,
243-
subdivision_name__ilike=subdivision_name,
244-
municipality__ilike=municipality,
245-
),
246-
)
247-
248-
subquery = gtfs_feed_filter.filter(select(Gtfsfeed.id).join(Location, Gtfsfeed.locations))
249-
subquery = DatasetsApiImpl.apply_bounding_filtering(
250-
subquery, dataset_latitudes, dataset_longitudes, bounding_filter_method
251-
).subquery()
252-
253-
is_email_restricted = is_user_email_restricted()
254-
self.logger.info(f"User email is restricted: {is_email_restricted}")
255-
feed_query = (
256-
db_session.query(Gtfsfeed)
257-
.filter(Gtfsfeed.id.in_(subquery))
258-
.filter(
259-
or_(
260-
Gtfsfeed.operational_status == None, # noqa: E711
261-
Gtfsfeed.operational_status != "wip",
262-
not is_email_restricted, # Allow all feeds to be returned if the user is not restricted
263-
)
245+
subdivision_name=subdivision_name,
246+
municipality=municipality,
247+
dataset_latitudes=dataset_latitudes,
248+
dataset_longitudes=dataset_longitudes,
249+
bounding_filter_method=bounding_filter_method,
250+
is_official=is_official,
251+
include_wip=include_wip,
252+
db_session=db_session,
264253
)
265-
.options(
266-
joinedload(Gtfsfeed.gtfsdatasets)
267-
.joinedload(Gtfsdataset.validation_reports)
268-
.joinedload(Validationreport.notices),
269-
*BasicFeedImpl.get_joinedload_options(),
270-
)
271-
.order_by(Gtfsfeed.provider, Gtfsfeed.stable_id)
272-
)
273-
if is_official:
274-
feed_query = feed_query.filter(Feed.official)
275-
feed_query = feed_query.limit(limit).offset(offset)
254+
except InternalHTTPException as e:
255+
# get_gtfs_feeds_query cannot throw HTTPException since it's part of fastapi and it's
256+
# not necessarily deployed (e.g. for python functions). Instead it throws an InternalHTTPException
257+
# that needs to be converted to HTTPException before being thrown.
258+
raise convert_exception(e)
259+
276260
return self._get_response(feed_query, GtfsFeedImpl, db_session)
277261

278262
@with_db_session
@@ -299,7 +283,7 @@ def get_gtfs_rt_feed(self, id: str, db_session: Session) -> GtfsRTFeed:
299283
.options(
300284
joinedload(Gtfsrealtimefeed.entitytypes),
301285
joinedload(Gtfsrealtimefeed.gtfs_feeds),
302-
*BasicFeedImpl.get_joinedload_options(),
286+
*get_joinedload_options(),
303287
)
304288
).all()
305289

@@ -324,6 +308,26 @@ def get_gtfs_rt_feeds(
324308
db_session: Session,
325309
) -> List[GtfsRTFeed]:
326310
"""Get some (or all) GTFS Realtime feeds from the Mobility Database."""
311+
try:
312+
include_wip = not is_user_email_restricted()
313+
feed_query = get_gtfs_rt_feeds_query(
314+
limit=limit,
315+
offset=offset,
316+
provider=provider,
317+
producer_url=producer_url,
318+
entity_types=entity_types,
319+
country_code=country_code,
320+
subdivision_name=subdivision_name,
321+
municipality=municipality,
322+
is_official=is_official,
323+
include_wip=include_wip,
324+
db_session=db_session,
325+
)
326+
except InternalHTTPException as e:
327+
raise convert_exception(e)
328+
329+
return self._get_response(feed_query, GtfsRTFeedImpl, db_session)
330+
327331
entity_types_list = entity_types.split(",") if entity_types else None
328332

329333
# Validate entity types using the EntityType enum
@@ -365,7 +369,7 @@ def get_gtfs_rt_feeds(
365369
.options(
366370
joinedload(Gtfsrealtimefeed.entitytypes),
367371
joinedload(Gtfsrealtimefeed.gtfs_feeds),
368-
*BasicFeedImpl.get_joinedload_options(),
372+
*get_joinedload_options(),
369373
)
370374
.order_by(Gtfsrealtimefeed.provider, Gtfsrealtimefeed.stable_id)
371375
)

api/src/feeds/impl/models/basic_feed_impl.py

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,4 @@
1-
from sqlalchemy.orm import joinedload
2-
from sqlalchemy.orm.strategy_options import _AbstractLoad
3-
4-
from database_gen.sqlacodegen_models import Feed
1+
from shared.database_gen.sqlacodegen_models import Feed
52
from feeds.impl.models.external_id_impl import ExternalIdImpl
63
from feeds.impl.models.redirect_impl import RedirectImpl
74
from feeds_gen.models.basic_feed import BasicFeed
@@ -47,16 +44,6 @@ def from_orm(cls, feed: Feed | None, _=None) -> BasicFeed | None:
4744
redirects=sorted([RedirectImpl.from_orm(item) for item in feed.redirectingids], key=lambda x: x.target_id),
4845
)
4946

50-
@staticmethod
51-
def get_joinedload_options() -> [_AbstractLoad]:
52-
"""Returns common joinedload options for feeds queries."""
53-
return [
54-
joinedload(Feed.locations),
55-
joinedload(Feed.externalids),
56-
joinedload(Feed.redirectingids),
57-
joinedload(Feed.officialstatushistories),
58-
]
59-
6047

6148
class BasicFeedImpl(BaseFeedImpl, BasicFeed):
6249
"""Implementation of the `BasicFeed` model.

0 commit comments

Comments
 (0)