@@ -65,8 +65,16 @@ def get_feeds_data(
6565 return [d for d in data if d ["stops_url" ]]
6666
6767
68- def parse_request_parameters (request : flask .Request ) -> Tuple [List [str ], bool ]:
69- """Parse the request parameters to get the country codes and whether to include only unprocessed feeds."""
68+ def parse_request_parameters (request : flask .Request ) -> Tuple [List [str ], bool , bool ]:
69+ """
70+ Parse the request parameters.
71+
72+ Returns:
73+ Tuple[List[str], bool, bool]: A tuple containing:
74+ - country_codes: List of country codes to filter feeds
75+ - include_only_unprocessed: Whether to include only unprocessed feeds
76+ - use_cache: Whether to use cache for reverse geolocation
77+ """
7078 json_request = request .get_json ()
7179 country_codes = json_request .get ("country_codes" , "" ).split ("," )
7280 country_codes = [code .strip ().upper () for code in country_codes if code ]
@@ -78,13 +86,16 @@ def parse_request_parameters(request: flask.Request) -> Tuple[List[str], bool]:
7886 include_only_unprocessed = (
7987 json_request .get ("include_only_unprocessed" , True ) is True
8088 )
81- return country_codes , include_only_unprocessed
89+ use_cache = bool (json_request .get ("use_cache" , True ))
90+ return country_codes , include_only_unprocessed , use_cache
8291
8392
8493def reverse_geolocation_batch (request : flask .Request ) -> Tuple [str , int ]:
8594 """Batch function to trigger reverse geolocation for feeds."""
8695 try :
87- country_codes , include_only_unprocessed = parse_request_parameters (request )
96+ country_codes , include_only_unprocessed , use_cache = parse_request_parameters (
97+ request
98+ )
8899 feeds_data = get_feeds_data (country_codes , include_only_unprocessed )
89100 logging .info ("Valid feeds with latest dataset: %s" , len (feeds_data ))
90101
@@ -93,6 +104,7 @@ def reverse_geolocation_batch(request: flask.Request) -> Tuple[str, int]:
93104 stable_id = feed ["stable_id" ],
94105 dataset_id = feed ["dataset_id" ],
95106 stops_url = feed ["stops_url" ],
107+ use_cache = use_cache ,
96108 )
97109 return f"Batch function triggered for { len (feeds_data )} feeds." , 200
98110 except Exception as e :
@@ -104,13 +116,19 @@ def create_http_processor_task(
104116 stable_id : str ,
105117 dataset_id : str ,
106118 stops_url : str ,
119+ use_cache : bool = True ,
107120) -> None :
108121 """
109122 Create a task to process a group of points.
110123 """
111124 client = tasks_v2 .CloudTasksClient ()
112125 body = json .dumps (
113- {"stable_id" : stable_id , "stops_url" : stops_url , "dataset_id" : dataset_id }
126+ {
127+ "stable_id" : stable_id ,
128+ "stops_url" : stops_url ,
129+ "dataset_id" : dataset_id ,
130+ "use_cache" : use_cache ,
131+ }
114132 ).encode ()
115133 queue_name = os .getenv ("QUEUE_NAME" )
116134 project_id = os .getenv ("PROJECT_ID" )
0 commit comments