@@ -63,7 +63,8 @@ def __init__(
6363 """Initialize AsyncClient with configurable parameters.
6464
6565 Args:
66- api_key: API key for authentication. If None, will try to load from environment
66+ api_key: API key for authentication. If None, will try to
67+ load from environment
6768 verify_ssl: Whether to verify SSL certificates
6869 timeout: Request timeout in seconds. None means no timeout (infinite)
6970 max_retries: Maximum number of retry attempts
@@ -83,7 +84,8 @@ def __init__(
8384
8485 validate_api_key (api_key )
8586 logger .debug (
86- f"🛠️ Configuration: verify_ssl={ verify_ssl } , timeout={ timeout } , max_retries={ max_retries } "
87+ f"🛠️ Configuration: verify_ssl={ verify_ssl } , "
88+ f"timeout={ timeout } , max_retries={ max_retries } "
8789 )
8890 self .api_key = api_key
8991 self .headers = {** DEFAULT_HEADERS , "SGAI-APIKEY" : api_key }
@@ -104,7 +106,8 @@ async def _make_request(self, method: str, url: str, **kwargs) -> Any:
104106 for attempt in range (self .max_retries ):
105107 try :
106108 logger .info (
107- f"🚀 Making { method } request to { url } (Attempt { attempt + 1 } /{ self .max_retries } )"
109+ f"🚀 Making { method } request to { url } "
110+ f"(Attempt { attempt + 1 } /{ self .max_retries } )"
108111 )
109112 logger .debug (f"🔍 Request parameters: { kwargs } " )
110113
@@ -255,7 +258,8 @@ async def get_credits(self):
255258 f"{ API_BASE_URL } /credits" ,
256259 )
257260 logger .info (
258- f"✨ Credits info retrieved: { result .get ('remaining_credits' )} credits remaining"
261+ f"✨ Credits info retrieved: "
262+ f"{ result .get ('remaining_credits' )} credits remaining"
259263 )
260264 return result
261265
@@ -271,8 +275,9 @@ async def searchscraper(
271275 Args:
272276 user_prompt: The search prompt string
273277 num_results: Number of websites to scrape (3-20). Default is 3.
274- More websites provide better research depth but cost more credits.
275- Credit calculation: 30 base + 10 per additional website beyond 3.
278+ More websites provide better research depth but cost more
279+ credits. Credit calculation: 30 base + 10 per additional
280+ website beyond 3.
276281 headers: Optional headers to send with the request
277282 output_schema: Optional schema to structure the output
278283 """
@@ -323,7 +328,8 @@ async def crawl(
323328 batch_size : Optional [int ] = None ,
324329 sitemap : bool = False ,
325330 ):
326- """Send a crawl request with support for both AI extraction and markdown conversion modes"""
331+ """Send a crawl request with support for both AI extraction and
332+ markdown conversion modes"""
327333 logger .info ("🔍 Starting crawl request" )
328334 logger .debug (f"🌐 URL: { url } " )
329335 logger .debug (
0 commit comments