@@ -32,48 +32,46 @@ def _fetch_post(self, params: dict[str, Any], data: dict[str, Any]) -> list[dict
3232 result = response .json ()
3333
3434 all_data = [result ]
35+
36+ if params ["auto_paginate" ] is False :
37+ return all_data
38+
39+ # If we need to paginate, use concurrent requests
3540 pagination = result .get ("pagination" )
36- metadata = result .get ("metadata" )
41+ if pagination .get ("has_more" ):
42+ print ("More pages to fetch, paginating additional pages..." )
3743
38- if pagination :
3944 limit = pagination .get ("limit" )
40- returned_count = metadata .get ("results" , {}).get ("returned_count" , 0 )
41- # if we got fewer or equal results than requested, don't paginate
42- if returned_count <= limit :
43- return all_data
44-
45- # If we need to paginate, use concurrent requests
46- if pagination .get ("has_more" ):
47- print ("More pages to fetch, paginating additional pages..." )
48- offset = pagination .get ("offset" )
49- total_available = metadata .get ("results" , {}).get ("total_available" , 0 )
50-
51- # Calculate how many more pages we need
52- remaining_pages = (total_available - limit ) // limit
53- if (total_available - limit ) % limit > 0 :
54- remaining_pages += 1
55-
56- # Generate all the URLs we need to fetch
57- urls = []
58- current_offset = offset + limit
59- for _ in range (remaining_pages ):
60- urls .append (f"{ self .full_post_url } ?limit={ limit } &offset={ current_offset } " )
61- current_offset += limit
62-
63- # Use ThreadPoolExecutor to make concurrent requests
64- with ThreadPoolExecutor (max_workers = self .client .num_workers ) as executor :
65- future_to_url = {
66- executor .submit (self ._post , url = url , data = data , params = params ): url
67- for url in urls
68- }
69-
70- for future in as_completed (future_to_url ):
71- try :
72- response = future .result ()
73- page_result = response .json ()
74- all_data .append (page_result )
75- except Exception as exc :
76- print (f"Request failed: { exc } " )
45+ offset = pagination .get ("offset" )
46+ metadata = result .get ("metadata" )
47+ total_available = metadata .get ("results" , {}).get ("total_available" , 0 )
48+
49+ # Calculate how many more pages we need
50+ remaining_pages = (total_available - limit ) // limit
51+ if (total_available - limit ) % limit > 0 :
52+ remaining_pages += 1
53+
54+ # Generate all the URLs we need to fetch
55+ urls = []
56+ current_offset = offset + limit
57+ for _ in range (remaining_pages ):
58+ urls .append (f"{ self .full_post_url } ?limit={ limit } &offset={ current_offset } " )
59+ current_offset += limit
60+
61+ # Use ThreadPoolExecutor to make concurrent requests
62+ with ThreadPoolExecutor (max_workers = self .client .num_workers ) as executor :
63+ future_to_url = {
64+ executor .submit (self ._post , url = url , data = data , params = params ): url
65+ for url in urls
66+ }
67+
68+ for future in as_completed (future_to_url ):
69+ try :
70+ response = future .result ()
71+ page_result = response .json ()
72+ all_data .append (page_result )
73+ except Exception as exc :
74+ print (f"Request failed: { exc } " )
7775
7876 return all_data
7977
@@ -127,8 +125,6 @@ def _fetch_post_parcl_property_ids(
127125 if idx < len (parcl_property_ids_chunks ) - 1 : # Don't delay after the last one
128126 time .sleep (0.1 )
129127
130- # Helper functions to abstract raise statements
131-
132128 # Collect results as they complete
133129 for future in as_completed (future_to_chunk ):
134130 chunk_num = future_to_chunk [future ]
@@ -432,24 +428,20 @@ def _build_owner_filters(self, params: PropertyV2RetrieveParams) -> dict[str, An
432428
433429 return owner_filters
434430
435- def _validate_limit (self , limit : int | None ) -> int :
436- """Validate limit parameter ."""
431+ def _set_limit_pagination (self , limit : int | None ) -> tuple [ int , bool ] :
432+ """Validate and set limit and auto pagination ."""
437433 max_limit = RequestLimits .PROPERTY_V2_MAX .value
438434
439- # If auto-paginate is enabled or no limit is provided, use maximum limit
440- if limit in (None , 0 ):
441- print (f"No limit provided. Setting limit to maximum value of { max_limit } ." )
442- return max_limit
435+ # If no limit is provided, use maximum limit and auto paginate
436+ if limit == 0 or limit is None :
437+ auto_paginate = True
438+ print (f"""No limit provided. Using max limit of { max_limit } .
439+ Auto pagination is { auto_paginate } """ )
440+ return max_limit , auto_paginate
443441
444- # If limit exceeds maximum, cap it
445- if limit > max_limit :
446- print (
447- f"Supplied limit value is too large for requested endpoint."
448- f"Setting limit to maximum value of { max_limit } ."
449- )
450- return max_limit
451-
452- return limit
442+ auto_paginate = False
443+ print (f"Limit is set at { limit } . Auto pagiation is { auto_paginate } " )
444+ return limit , auto_paginate
453445
454446 def _build_param_categories (
455447 self , params : PropertyV2RetrieveParams
@@ -609,13 +601,16 @@ def retrieve(
609601
610602 # Set limit
611603 request_params = input_params .params .copy ()
604+ request_params ["auto_paginate" ] = False # auto_paginate is False by default
612605
613606 # Make request with params
614607 if data .get (PARCL_PROPERTY_IDS ):
615608 request_params ["limit" ] = PARCL_PROPERTY_IDS_LIMIT
616609 results = self ._fetch_post_parcl_property_ids (params = request_params , data = data )
617610 else :
618- request_params ["limit" ] = self ._validate_limit (input_params .limit )
611+ request_params ["limit" ], request_params ["auto_paginate" ] = self ._set_limit_pagination (
612+ input_params .limit
613+ )
619614 results = self ._fetch_post (params = request_params , data = data )
620615
621616 # Get metadata from results
0 commit comments