Skip to content

Commit d815106

Browse files
committed
fixes & impl for async
1 parent 62fe3ed commit d815106

File tree

2 files changed

+53
-24
lines changed

2 files changed

+53
-24
lines changed

ipinfo/handler.py

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ def getBatchDetails(
116116
input list).
117117
118118
The input list is broken up into batches to abide by API requirements.
119-
The batch size can be adjusted with `batch_size` but is clipped to
119+
The batch size can be adjusted with `batch_size` but is clipped to
120120
`BATCH_MAX_SIZE`.
121121
Defaults to `BATCH_MAX_SIZE`.
122122
@@ -158,14 +158,22 @@ def getBatchDetails(
158158
else:
159159
lookup_addresses.append(ip_address)
160160

161-
# prepare req http options
162-
req_opts = {**self.request_options, "timeout": timeout_per_batch}
161+
# all in cache - return early.
162+
if len(lookup_addresses) == 0:
163+
return result
163164

165+
# do start timer if necessary
164166
if timeout_total is not None:
165167
start_time = time.time()
166168

169+
# prepare req http options
170+
req_opts = {**self.request_options, "timeout": timeout_per_batch}
171+
167172
# loop over batch chunks and do lookup for each.
168-
for i in range(0, len(ip_addresses), batch_size):
173+
url = API_URL + "/batch"
174+
headers = handler_utils.get_headers(self.access_token)
175+
headers["content-type"] = "application/json"
176+
for i in range(0, len(lookup_addresses), batch_size):
169177
# quit if total timeout is reached.
170178
if (
171179
timeout_total is not None
@@ -176,14 +184,11 @@ def getBatchDetails(
176184
else:
177185
return result
178186

179-
chunk = ip_addresses[i : i + batch_size]
187+
chunk = lookup_addresses[i : i + batch_size]
180188

181189
# lookup
182-
url = API_URL + "/batch"
183-
headers = handler_utils.get_headers(self.access_token)
184-
headers["content-type"] = "application/json"
185190
response = requests.post(
186-
url, json=lookup_addresses, headers=headers, **req_opts
191+
url, json=chunk, headers=headers, **req_opts
187192
)
188193

189194
# fail on bad status codes

ipinfo/handler_async.py

Lines changed: 39 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,14 @@ async def getDetails(self, ip_address=None):
118118

119119
return Details(details)
120120

121-
async def getBatchDetails(self, ip_addresses, batch_size=None):
121+
async def getBatchDetails(
122+
self,
123+
ip_addresses,
124+
batch_size=None,
125+
timeout_per_batch=BATCH_REQ_TIMEOUT_DEFAULT,
126+
timeout_total=None,
127+
raise_on_fail=True,
128+
):
122129
"""
123130
Get details for a batch of IP addresses at once.
124131
@@ -158,31 +165,48 @@ async def getBatchDetails(self, ip_addresses, batch_size=None):
158165
else:
159166
lookup_addresses.append(ip_address)
160167

168+
# all in cache - return early.
169+
if len(lookup_addresses) == 0:
170+
return result
171+
172+
# do start timer if necessary
173+
if timeout_total is not None:
174+
start_time = time.time()
175+
161176
# loop over batch chunks and prepare coroutines for each.
177+
url = API_URL + "/batch"
178+
headers = handler_utils.get_headers(self.access_token)
179+
headers["content-type"] = "application/json"
162180
reqs = []
163-
for i in range(0, len(ip_addresses), batch_size):
164-
chunk = ip_addresses[i : i + batch_size]
165-
166-
# all in cache - return early.
167-
if len(lookup_addresses) == 0:
168-
return result
181+
for i in range(0, len(lookup_addresses), batch_size):
182+
chunk = lookup_addresses[i : i + batch_size]
169183

170184
# do http req
171-
url = API_URL + "/batch"
172-
headers = handler_utils.get_headers(self.access_token)
173-
headers["content-type"] = "application/json"
174185
reqs.append(
175186
self.httpsess.post(
176-
url, data=json.dumps(lookup_addresses), headers=headers
187+
url,
188+
data=json.dumps(chunk),
189+
headers=headers,
190+
timeout=timeout_per_batch,
177191
)
178192
)
179193

180-
resps = await asyncio.gather(*reqs)
194+
resps = await asyncio.wait_for(
195+
asyncio.gather(*reqs, return_exceptions=raise_on_fail),
196+
timeout_total
197+
)
181198
for resp in resps:
182199
# gather data
183-
if resp.status == 429:
184-
raise RequestQuotaExceededError()
185-
resp.raise_for_status()
200+
try:
201+
if resp.status == 429:
202+
raise RequestQuotaExceededError()
203+
resp.raise_for_status()
204+
except Exception as e:
205+
if raise_on_fail:
206+
raise e
207+
else:
208+
return result
209+
186210
json_resp = await resp.json()
187211

188212
# format & fill up cache

0 commit comments

Comments
 (0)