@@ -65,8 +65,8 @@ def get_feeds_data(
6565 return [d for d in data if d ["stops_url" ]]
6666
6767
68- def parse_request_parameters (request : flask .Request ) -> Tuple [List [str ], bool ]:
69- """Parse the request parameters to get the country codes and whether to include only unprocessed feeds. """
68+ def parse_request_parameters (request : flask .Request ) -> Tuple [List [str ], bool , bool ]:
69+ """Parse the request parameters"""
7070 json_request = request .get_json ()
7171 country_codes = json_request .get ("country_codes" , "" ).split ("," )
7272 country_codes = [code .strip ().upper () for code in country_codes if code ]
@@ -78,13 +78,16 @@ def parse_request_parameters(request: flask.Request) -> Tuple[List[str], bool]:
7878 include_only_unprocessed = (
7979 json_request .get ("include_only_unprocessed" , True ) is True
8080 )
81- return country_codes , include_only_unprocessed
81+ use_cache = bool (json_request .get ("use_cache" , True )) is True
82+ return country_codes , include_only_unprocessed , use_cache
8283
8384
8485def reverse_geolocation_batch (request : flask .Request ) -> Tuple [str , int ]:
8586 """Batch function to trigger reverse geolocation for feeds."""
8687 try :
87- country_codes , include_only_unprocessed = parse_request_parameters (request )
88+ country_codes , include_only_unprocessed , use_cache = parse_request_parameters (
89+ request
90+ )
8891 feeds_data = get_feeds_data (country_codes , include_only_unprocessed )
8992 logging .info ("Valid feeds with latest dataset: %s" , len (feeds_data ))
9093
@@ -93,6 +96,7 @@ def reverse_geolocation_batch(request: flask.Request) -> Tuple[str, int]:
9396 stable_id = feed ["stable_id" ],
9497 dataset_id = feed ["dataset_id" ],
9598 stops_url = feed ["stops_url" ],
99+ use_cache = use_cache ,
96100 )
97101 return f"Batch function triggered for { len (feeds_data )} feeds." , 200
98102 except Exception as e :
@@ -104,13 +108,19 @@ def create_http_processor_task(
104108 stable_id : str ,
105109 dataset_id : str ,
106110 stops_url : str ,
111+ use_cache : bool = False ,
107112) -> None :
108113 """
109114 Create a task to process a group of points.
110115 """
111116 client = tasks_v2 .CloudTasksClient ()
112117 body = json .dumps (
113- {"stable_id" : stable_id , "stops_url" : stops_url , "dataset_id" : dataset_id }
118+ {
119+ "stable_id" : stable_id ,
120+ "stops_url" : stops_url ,
121+ "dataset_id" : dataset_id ,
122+ "use_cache" : use_cache ,
123+ }
114124 ).encode ()
115125 queue_name = os .getenv ("QUEUE_NAME" )
116126 project_id = os .getenv ("PROJECT_ID" )
0 commit comments