Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 64 additions & 0 deletions map-poc-test/display-routes-tab/generate-route-endpoint-data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import csv
import json
import argparse

def load_csv(file_path, key_column):
"""Loads a CSV file into a dictionary with key_column as the key."""
data = {}
with open(file_path, mode='r', encoding='utf-8-sig') as file:
reader = csv.DictReader(file)
for row in reader:
data[row[key_column]] = row
return data

def process_gtfs(calendar_file, trips_file, routes_file, output_file):
"""Processes GTFS files and generates a JSON output."""
calendar_data = load_csv(calendar_file, 'service_id')
trips_data = load_csv(trips_file, 'trip_id')
routes_data = load_csv(routes_file, 'route_id')

route_info = {}

for trip in trips_data.values():
service_id = trip['service_id']
route_id = trip['route_id']

if service_id not in calendar_data or route_id not in routes_data:
continue

calendar_entry = calendar_data[service_id]
route_entry = routes_data[route_id]

if route_id not in route_info:
route_info[route_id] = {
'routeId': route_id,
'routeName': route_entry.get('route_long_name', '') or route_entry.get('route_short_name', ''),
'color': f"#{route_entry.get('route_color', '000000')}",
'textColor': f"#{route_entry.get('route_text_color', 'ffffff')}",
'routeType': route_entry.get('route_type', ''),
'startDate': calendar_entry['start_date'],
'endDate': calendar_entry['end_date'],
'monday': calendar_entry['monday'] == '1',
'tuesday': calendar_entry['tuesday'] == '1',
'wednesday': calendar_entry['wednesday'] == '1',
'thursday': calendar_entry['thursday'] == '1',
'friday': calendar_entry['friday'] == '1',
'saturday': calendar_entry['saturday'] == '1',
'sunday': calendar_entry['sunday'] == '1'
}

with open(output_file, 'w', encoding='utf-8') as json_file:
json.dump(list(route_info.values()), json_file, indent=4)

def main():
parser = argparse.ArgumentParser(description='Process GTFS files and generate JSON output.')
parser.add_argument('calendar', help='Path to calendar.txt')
parser.add_argument('trips', help='Path to trips.txt')
parser.add_argument('routes', help='Path to routes.txt')
parser.add_argument('output', help='Path to output JSON file')
args = parser.parse_args()

process_gtfs(args.calendar, args.trips, args.routes, args.output)

if __name__ == '__main__':
main()
100 changes: 100 additions & 0 deletions map-poc-test/gtfs_stops_to_geojson.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import csv
import json
import sys
from collections import defaultdict

def read_csv(filepath):
with open(filepath, mode='r', encoding='utf-8-sig') as f:
reader = csv.DictReader(f)
for row in reader:
yield row

def load_routes(routes_file):
routes = {}
for row in read_csv(routes_file):
route_id = row.get('route_id')
if route_id:
routes[route_id] = row
return routes

def build_stop_to_routes(stop_times_file, trips_file):
# Build trip_id -> route_id mapping
trip_to_route = {}
for row in read_csv(trips_file):
trip_id = row.get('trip_id')
route_id = row.get('route_id')
if trip_id and route_id:
trip_to_route[trip_id] = route_id

# Build stop_id -> set of route_ids
stop_to_routes = defaultdict(set)
for row in read_csv(stop_times_file):
trip_id = row.get('trip_id')
stop_id = row.get('stop_id')
if trip_id and stop_id:
route_id = trip_to_route.get(trip_id)
if route_id:
stop_to_routes[stop_id].add(route_id)

return stop_to_routes

def convert_stops_to_geojson(stops_file, stop_times_file, trips_file, routes_file, output_file):
routes = load_routes(routes_file)
stop_to_routes = build_stop_to_routes(stop_times_file, trips_file)

features = []

for row in read_csv(stops_file):
stop_id = row.get('stop_id')
if not stop_id:
continue

if 'stop_lat' not in row or 'stop_lon' not in row or not row['stop_lat'] or not row['stop_lon']:
continue # skip bad coordinates

# Routes serving this stop
route_ids = sorted(stop_to_routes.get(stop_id, []))
route_colors = [routes[r].get('route_color', '#000000') for r in route_ids if r in routes]

feature = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
float(row['stop_lon']),
float(row['stop_lat'])
]
},
"properties": {
"stop_id": stop_id,
"stop_code": row.get("stop_code", ""),
"stop_name": row.get("stop_name", ""),
"stop_desc": row.get("stop_desc", ""),
"zone_id": row.get("zone_id", ""),
"stop_url": row.get("stop_url", ""),
"wheelchair_boarding": row.get("wheelchair_boarding", ""),
"stop_lat": row.get("stop_lat"),
"stop_lon": row.get("stop_lon"),
"location_type": row.get("location_type", ""),
"route_ids": route_ids,
"route_colors": route_colors
}
}
features.append(feature)

geojson = {
"type": "FeatureCollection",
"features": features
}

with open(output_file, 'w', encoding='utf-8') as f:
json.dump(geojson, f, indent=2, ensure_ascii=False)

print(f"✅ GeoJSON file saved to {output_file} with {len(features)} stops")

if __name__ == "__main__":
if len(sys.argv) != 6:
print("Usage: python script.py stops.txt stop_times.txt trips.txt routes.txt output.geojson")
sys.exit(1)

convert_stops_to_geojson(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5])
154 changes: 154 additions & 0 deletions map-poc-test/routes-poc/gtfs_routes_to_geojson.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
import csv
import json
import argparse
import sys
from collections import defaultdict
from geojson import Feature, FeatureCollection, LineString

# ----------------------------
# Utility readers
# ----------------------------

def read_csv(filepath):
"""Generator that reads a CSV row-by-row as dict."""
with open(filepath, mode='r', encoding='utf-8-sig') as f:
reader = csv.DictReader(f)
for row in reader:
yield row

# ----------------------------
# Agency Loader
# ----------------------------

def load_agencies(agency_file):
agencies = {}
default_agency_name = ''
for row in read_csv(agency_file):
agency_id = row.get('agency_id') or 'default'
agency_name = row.get('agency_name', '').strip()
agencies[agency_id] = agency_name
if not default_agency_name:
default_agency_name = agency_name

if not agencies and default_agency_name:
agencies['default'] = default_agency_name

return agencies

# ----------------------------
# Route Loader
# ----------------------------

def load_routes(routes_file):
routes = {}
for row in read_csv(routes_file):
route_id = row.get('route_id')
if not route_id:
continue # skip broken routes
routes[route_id] = row
return routes

# ----------------------------
# Shape Loader
# ----------------------------

def load_shapes(shapes_file):
shapes = defaultdict(list)
for row in read_csv(shapes_file):
shape_id = row.get('shape_id')
try:
lat = float(row['shape_pt_lat'])
lon = float(row['shape_pt_lon'])
seq = int(row['shape_pt_sequence'])
except (KeyError, ValueError):
continue # skip bad rows

shapes[shape_id].append((seq, lon, lat))

# Sort shape points
final_shapes = {}
for sid, points in shapes.items():
points.sort()
final_shapes[sid] = [(lon, lat) for _, lon, lat in points]

return final_shapes

# ----------------------------
# Route -> Shape mapping
# ----------------------------

def build_route_shapes(trips_file):
mapping = defaultdict(set)
for row in read_csv(trips_file):
route_id = row.get('route_id')
shape_id = row.get('shape_id')
if route_id and shape_id:
mapping[route_id].add(shape_id)
return mapping

# ----------------------------
# Main converter
# ----------------------------

def generate_geojson(routes_file, trips_file, shapes_file, agency_file, output_file):
agencies = load_agencies(agency_file)
routes = load_routes(routes_file)
shapes = load_shapes(shapes_file)
route_shapes = build_route_shapes(trips_file)

features = []

for route_id, shape_ids in route_shapes.items():
route = routes.get(route_id)
if not route:
continue

agency_id = route.get('agency_id') or 'default'
agency_name = agencies.get(agency_id, '')

for shape_id in shape_ids:
shape = shapes.get(shape_id)
if not shape:
print(f"Warning: shape_id {shape_id} not found for route_id {route_id}", file=sys.stderr)
continue

feature = Feature(
geometry=LineString(shape),
properties={
'agency_name': agency_name,
'route_id': route_id,
'route_short_name': route.get('route_short_name', ''),
'route_long_name': route.get('route_long_name', ''),
'route_type': route.get('route_type', ''),
'route_color': route.get('route_color', '#000000'),
'route_text_color': route.get('route_text_color', '#FFFFFF')
}
)
features.append(feature)

geojson = FeatureCollection(features)
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(geojson, f, indent=2)

print(f"✅ GeoJSON written to {output_file} with {len(features)} features")

# ----------------------------
# CLI
# ----------------------------

if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Convert GTFS routes to GeoJSON.")
parser.add_argument("routes_file")
parser.add_argument("trips_file")
parser.add_argument("shapes_file")
parser.add_argument("agency_file")
parser.add_argument("output_file")
args = parser.parse_args()

generate_geojson(
args.routes_file,
args.trips_file,
args.shapes_file,
args.agency_file,
args.output_file
)
7 changes: 7 additions & 0 deletions web-app/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"dependencies": {
"@emotion/react": "^11.13.0",
"@emotion/styled": "^11.13.0",
"@maplibre/maplibre-gl-leaflet": "^0.0.22",
"@mui/icons-material": "^5.16.4",
"@mui/material": "^5.16.4",
"@mui/x-date-pickers": "^7.11.0",
Expand All @@ -13,6 +14,7 @@
"@turf/center": "^6.5.0",
"@types/i18next": "^13.0.0",
"@types/leaflet": "^1.9.12",
"@types/react-map-gl": "^6.1.7",
"axios": "^1.7.2",
"countries-list": "^3.1.1",
"date-fns": "^2.30.0",
Expand All @@ -24,10 +26,14 @@
"i18next-browser-languagedetector": "^8.0.0",
"i18next-http-backend": "^2.5.2",
"leaflet": "^1.9.4",
"leaflet.vectorgrid": "^1.3.0",
"maplibre-gl": "^4.7.1",
"material-react-table": "^2.13.0",
"mui-datatables": "^4.3.0",
"mui-nested-menu": "^3.4.0",
"openapi-fetch": "^0.9.3",
"pmtiles": "^4.2.1",
"protomaps-leaflet": "^4.1.0",
"react": "^17.0.0 || ^18.0.0",
"react-dom": "^17.0.0 || ^18.0.0",
"react-ga4": "^2.1.0",
Expand All @@ -36,6 +42,7 @@
"react-hook-form": "^7.52.1",
"react-i18next": "^14.1.2",
"react-leaflet": "^4.2.1",
"react-map-gl": "^7.1.7",
"react-redux": "^8.1.3",
"react-router-dom": "^6.16.0",
"react-scripts": "5.0.1",
Expand Down
25 changes: 25 additions & 0 deletions web-app/src/app/Theme.ts
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,31 @@ export const getTheme = (mode: ThemeModeEnum): Theme => {
},
},
},
MuiTabs: {
styleOverrides: {
root: {
backgroundColor: chosenPalette.background.paper,
color: chosenPalette.text.primary,
width: 'fit-content',
border: '2px solid',
borderColor: chosenPalette.primary.main,
borderRadius: '5px',
textTransform: 'none',
},
},
},
MuiTab: {
styleOverrides: {
root: {
textTransform: 'none',
color: chosenPalette.text.primary,
'&.Mui-selected': {
backgroundColor: chosenPalette.primary.main,
color: chosenPalette.primary.contrastText,
},
},
},
},
MuiButton: {
styleOverrides: {
root: {
Expand Down
Loading
Loading