11"""Fetches data from the Nysse GTFS API."""
22
3+ import asyncio
34import csv
45from datetime import UTC , datetime , timedelta
56import logging
@@ -37,39 +38,64 @@ def _get_database():
3738 return conn , cursor
3839
3940
41+ _fetch_lock = asyncio .Lock ()
42+
43+
4044async def _fetch_gtfs ():
4145 try :
42- path = _get_dir_path ()
43- filename = "extended_gtfs_tampere.zip"
44- if os .path .isfile (path + filename ) and datetime .now ().minute != 0 :
45- _LOGGER .debug ("Skipped fetching GTFS data" )
46- return # Skip fetching if the file exists or it's not the top of the hour
47- timestamp = _get_file_modified_time (path + filename )
48-
49- _LOGGER .debug ("Fetching GTFS data from %s" , GTFS_URL )
50- timeout = aiohttp .ClientTimeout (total = 30 )
51- async with (
52- aiohttp .ClientSession (timeout = timeout ) as session ,
53- session .get (GTFS_URL , headers = {"If-Modified-Since" : timestamp }) as response ,
54- ):
55- if response .status == 200 :
56- _LOGGER .info ("Response OK" )
57- with open (path + filename , "wb" ) as f :
58- f .write (await response .read ())
59- with zipfile .ZipFile (path + filename , "r" ) as zip_ref :
60- zip_ref .extractall (path )
61- await _read_csv_to_db ()
62- elif response .status == 304 :
63- _LOGGER .debug (
64- "%s has not received updates: %s" , filename , response .status
65- )
66- else :
67- _LOGGER .error ("Error fetching GTFS data: Status %s" , response .status )
46+ async with _fetch_lock : # Ensure only one fetch runs at a time
47+ path = _get_dir_path ()
48+ filename = "extended_gtfs_tampere.zip"
49+ if os .path .isfile (path + filename ) and datetime .now ().minute != 0 :
50+ _LOGGER .debug ("Skipped fetching GTFS data" )
51+ return # Skip fetching if the file exists or it's not the top of the hour
52+ timestamp = _get_file_modified_time (path + filename )
53+
54+ _LOGGER .debug ("Fetching GTFS data from %s" , GTFS_URL )
55+ timeout = aiohttp .ClientTimeout (total = 30 )
56+ async with (
57+ aiohttp .ClientSession (timeout = timeout ) as session ,
58+ session .get (
59+ GTFS_URL , headers = {"If-Modified-Since" : timestamp }
60+ ) as response ,
61+ ):
62+ if response .status == 200 :
63+ _LOGGER .info ("Response OK" )
64+ content = await response .read ()
65+ loop = asyncio .get_running_loop ()
66+ await loop .run_in_executor (
67+ None , _save_response_to_file , path , filename , content
68+ )
69+ await _read_csv_to_db ()
70+ elif response .status == 304 :
71+ _LOGGER .debug (
72+ "%s has not received updates: %s" , filename , response .status
73+ )
74+ else :
75+ _LOGGER .error (
76+ "Error fetching GTFS data: Status %s" , response .status
77+ )
6878 except aiohttp .ClientError as err :
6979 _LOGGER .error ("Error fetching GTFS data: %s" , err )
7080
7181
82+ def _save_response_to_file (path , filename , content ):
83+ with open (path + filename , "wb" ) as f :
84+ f .write (content )
85+ with zipfile .ZipFile (path + filename , "r" ) as zip_ref :
86+ zip_ref .extractall (path )
87+
88+
7289async def _read_csv_to_db ():
90+ loop = asyncio .get_running_loop ()
91+ path = _get_dir_path ()
92+ stops = await loop .run_in_executor (None , _parse_csv_file , path + "stops.txt" )
93+ trips = await loop .run_in_executor (None , _parse_csv_file , path + "trips.txt" )
94+ calendar = await loop .run_in_executor (None , _parse_csv_file , path + "calendar.txt" )
95+ stop_times = await loop .run_in_executor (
96+ None , _parse_csv_file , path + "stop_times.txt"
97+ )
98+
7399 conn , cursor = _get_database ()
74100
75101 # Stops
@@ -83,7 +109,6 @@ async def _read_csv_to_db():
83109 )
84110 """
85111 )
86- stops = _parse_csv_file (_get_dir_path () + "stops.txt" )
87112 to_db = [
88113 (i ["stop_id" ], i ["stop_name" ], i ["stop_lat" ], i ["stop_lon" ]) for i in stops
89114 ]
@@ -104,7 +129,6 @@ async def _read_csv_to_db():
104129 )
105130 """
106131 )
107- trips = _parse_csv_file (_get_dir_path () + "trips.txt" )
108132 to_db = [
109133 (
110134 i ["trip_id" ],
@@ -141,7 +165,6 @@ async def _read_csv_to_db():
141165 )
142166 """
143167 )
144- calendar = _parse_csv_file (_get_dir_path () + "calendar.txt" )
145168 to_db = [
146169 (
147170 i ["service_id" ],
@@ -179,7 +202,6 @@ async def _read_csv_to_db():
179202 )
180203 """
181204 )
182- stop_times = _parse_csv_file (_get_dir_path () + "stop_times.txt" )
183205 to_db = [
184206 (
185207 i ["trip_id" ],
0 commit comments