Skip to content

Commit cd54bfe

Browse files
author
Rahul Saini
authored
optimized sub-optimal code.
1 parent 06dea5e commit cd54bfe

File tree

1 file changed

+13
-20
lines changed

1 file changed

+13
-20
lines changed

ipinfo/handler_async.py

Lines changed: 13 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -141,9 +141,7 @@ async def getDetails(self, ip_address=None, timeout=None):
141141

142142
# check if bogon.
143143
if ip_address and is_bogon(ip_address):
144-
details = {}
145-
details["ip"] = ip_address
146-
details["bogon"] = True
144+
details = {"ip": ip_address, "bogon": True}
147145
return Details(details)
148146

149147
# check cache first.
@@ -225,7 +223,7 @@ async def getBatchDetails(
225223
"""
226224
self._ensure_aiohttp_ready()
227225

228-
if batch_size == None:
226+
if batch_size is None:
229227
batch_size = BATCH_MAX_SIZE
230228

231229
result = {}
@@ -249,7 +247,7 @@ async def getBatchDetails(
249247
lookup_addresses.append(ip_address)
250248

251249
# all in cache - return early.
252-
if len(lookup_addresses) == 0:
250+
if not lookup_addresses:
253251
return result
254252

255253
# do start timer if necessary
@@ -282,7 +280,7 @@ async def getBatchDetails(
282280
)
283281

284282
# if all done, return result.
285-
if len(pending) == 0:
283+
if not pending:
286284
return result
287285

288286
# if some had a timeout, first cancel timed out stuff and wait for
@@ -370,9 +368,7 @@ async def getBatchDetailsIter(
370368
ip_address = ip_address.exploded
371369

372370
if ip_address and is_bogon(ip_address):
373-
details = {}
374-
details["ip"] = ip_address
375-
details["bogon"] = True
371+
details = {"ip": ip_address, "bogon": True}
376372
yield Details(details)
377373
else:
378374
lookup_addresses.append(ip_address)
@@ -383,24 +379,21 @@ async def getBatchDetailsIter(
383379
except KeyError:
384380
lookup_addresses.append(ip_address)
385381

386-
if len(lookup_addresses) == 0:
382+
if not lookup_addresses:
387383
yield results.items()
388384

389385
url = API_URL + "/batch"
390386
headers = handler_utils.get_headers(self.access_token, self.headers)
391387
headers["content-type"] = "application/json"
392388

393389
async def process_batch(batch):
394-
try:
395-
async with aiohttp.ClientSession(headers=headers) as session:
396-
response = await session.post(url, json=batch)
397-
response.raise_for_status()
398-
json_response = await response.json()
399-
for ip_address, details in json_response.items():
400-
self.cache[cache_key(ip_address)] = details
401-
results[ip_address] = details
402-
except Exception as e:
403-
raise e
390+
async with aiohttp.ClientSession(headers=headers) as session:
391+
response = await session.post(url, json=batch)
392+
response.raise_for_status()
393+
json_response = await response.json()
394+
for ip_address, details in json_response.items():
395+
self.cache[cache_key(ip_address)] = details
396+
results[ip_address] = details
404397

405398
for i in range(0, len(lookup_addresses), batch_size):
406399
batch = lookup_addresses[i : i + batch_size]

0 commit comments

Comments
 (0)