Skip to content

Commit 99d4337

Browse files
committed
Moved files around. Adjusted requirements.txt
1 parent 9ed1e15 commit 99d4337

File tree

6 files changed

+82
-78
lines changed

6 files changed

+82
-78
lines changed

.github/workflows/build-test.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,9 @@ on:
88
- "functions/**"
99
- ".github/workflows/web-*.yml"
1010
workflow_call:
11+
push:
12+
branches: [ 779-generate-new-sourcescsv-from-postgresql-database ]
13+
1114

1215
env:
1316
python_version: '3.11'

api/src/common/common.py

Lines changed: 0 additions & 74 deletions
This file was deleted.

api/src/common/db_utils.py

Lines changed: 72 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,15 @@
1+
from geoalchemy2 import WKTElement
12
from sqlalchemy import select
23
from sqlalchemy.orm import joinedload
34
from sqlalchemy.orm.query import Query
5+
from sqlalchemy.orm.strategy_options import _AbstractLoad
46

5-
from common.common import apply_bounding_filtering, get_joinedload_options
6-
from common.error_handling import raise_internal_http_validation_error
7+
# from common.common import apply_bounding_filtering, get_joinedload_options
8+
from common.error_handling import (
9+
raise_internal_http_validation_error,
10+
invalid_bounding_method,
11+
invalid_bounding_coordinates,
12+
)
713
from database.database import Database
814
from database_gen.sqlacodegen_models import (
915
Gtfsdataset,
@@ -12,6 +18,7 @@
1218
Validationreport,
1319
Gtfsrealtimefeed,
1420
Entitytype,
21+
Feed,
1522
)
1623

1724
from feeds.filters.gtfs_feed_filter import GtfsFeedFilter, LocationFilter
@@ -133,3 +140,66 @@ def get_gtfs_rt_feeds_query(
133140
.offset(offset)
134141
)
135142
return feed_query
143+
144+
145+
def get_joinedload_options() -> [_AbstractLoad]:
146+
"""Returns common joinedload options for feeds queries."""
147+
return [joinedload(Feed.locations), joinedload(Feed.externalids), joinedload(Feed.redirectingids)]
148+
149+
150+
def apply_bounding_filtering(
151+
query: Query,
152+
bounding_latitudes: str,
153+
bounding_longitudes: str,
154+
bounding_filter_method: str,
155+
) -> Query:
156+
"""Create a new query based on the bounding parameters."""
157+
158+
if not bounding_latitudes or not bounding_longitudes or not bounding_filter_method:
159+
return query
160+
161+
if (
162+
len(bounding_latitudes_tokens := bounding_latitudes.split(",")) != 2
163+
or len(bounding_longitudes_tokens := bounding_longitudes.split(",")) != 2
164+
):
165+
raise_internal_http_validation_error(
166+
invalid_bounding_coordinates.format(bounding_latitudes, bounding_longitudes)
167+
)
168+
min_latitude, max_latitude = bounding_latitudes_tokens
169+
min_longitude, max_longitude = bounding_longitudes_tokens
170+
try:
171+
min_latitude = float(min_latitude)
172+
max_latitude = float(max_latitude)
173+
min_longitude = float(min_longitude)
174+
max_longitude = float(max_longitude)
175+
except ValueError:
176+
raise_internal_http_validation_error(
177+
invalid_bounding_coordinates.format(bounding_latitudes, bounding_longitudes)
178+
)
179+
180+
points = [
181+
(min_longitude, min_latitude),
182+
(min_longitude, max_latitude),
183+
(max_longitude, max_latitude),
184+
(max_longitude, min_latitude),
185+
(min_longitude, min_latitude),
186+
]
187+
wkt_polygon = f"POLYGON(({', '.join(f'{lon} {lat}' for lon, lat in points)}))"
188+
bounding_box = WKTElement(
189+
wkt_polygon,
190+
srid=Gtfsdataset.bounding_box.type.srid,
191+
)
192+
193+
if bounding_filter_method == "partially_enclosed":
194+
return query.filter(
195+
or_(
196+
Gtfsdataset.bounding_box.ST_Overlaps(bounding_box),
197+
Gtfsdataset.bounding_box.ST_Contains(bounding_box),
198+
)
199+
)
200+
elif bounding_filter_method == "completely_enclosed":
201+
return query.filter(bounding_box.ST_Covers(Gtfsdataset.bounding_box))
202+
elif bounding_filter_method == "disjoint":
203+
return query.filter(Gtfsdataset.bounding_box.ST_Disjoint(bounding_box))
204+
else:
205+
raise raise_internal_http_validation_error(invalid_bounding_method.format(bounding_filter_method))

api/src/feeds/impl/feeds_api_impl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -243,7 +243,7 @@ def get_gtfs_feeds(
243243
except InternalHTTPException as e:
244244
# get_gtfs_feeds_query cannot throw HTTPException since it's part of fastapi and it's
245245
# not necessarily deployed (e.g. for python functions). Instead it throws an InternalHTTPException
246-
# that needs to be converted to HTTPException before being thrown.
246+
# that needs to be converted to HTTPException before being returned to the calling api.
247247
raise convert_exception(e)
248248

249249
return self._get_response(feed_query, GtfsFeedImpl)

functions-python/create_csv/function_config.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
"memory": "256Mi",
77
"trigger_http": true,
88
"include_folders": ["helpers", "dataset_service"],
9-
"include_common_folders": ["utils", "database_gen", "common", "database", "feeds/filters"],
9+
"include_api_folders": ["database_gen", "common", "database", "feeds/filters", "utils"],
1010
"secret_environment_variables": [
1111
{
1212
"key": "FEEDS_DATABASE_URL"

functions-python/create_csv/requirements.txt

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@ requests~=2.32.3
99
attrs~=23.1.0
1010
pluggy~=1.3.0
1111
certifi~=2024.7.4
12+
python-dotenv==1.0.0
13+
fastapi-filter[sqlalchemy]==1.0.0
1214

1315
# SQL Alchemy and Geo Alchemy
1416
SQLAlchemy==2.0.23
@@ -18,3 +20,6 @@ geoalchemy2==0.14.7
1820
google-cloud-pubsub
1921
google-cloud-datastore
2022
cloudevents~=1.10.1
23+
24+
# Additional packages for this function
25+
pandas

0 commit comments

Comments
 (0)