@@ -317,6 +317,10 @@ def scrape(
317317 allow_localhost = config .allow_localhost ,
318318 )
319319
320+ from .stats import get_metrics_collector
321+ collector = get_metrics_collector ()
322+ metrics = collector .start (validated_url , "GET" )
323+
320324 start_time = time .perf_counter ()
321325 max_retries = config .max_retries
322326 last_error : Exception | None = None
@@ -352,6 +356,13 @@ def scrape(
352356
353357 request_time = time .perf_counter () - start_time
354358
359+ metrics .complete (
360+ status_code = response .status_code ,
361+ bytes_received = len (response .content ),
362+ cached = False
363+ )
364+ collector .finish (metrics )
365+
355366 return ScrapeResult (
356367 text = response .text ,
357368 content = response .content ,
@@ -370,22 +381,31 @@ def scrape(
370381 if attempt < max_retries :
371382 time .sleep (2 ** attempt )
372383 continue
384+ metrics .fail (f"Timeout: { str (e )} " )
385+ collector .finish (metrics )
373386 raise last_error
374387
375388 except httpx .ConnectError as e :
376389 last_error = NetworkError (f"Connection failed: { e } " , cause = e )
377390 if attempt < max_retries :
378391 time .sleep (2 ** attempt )
379392 continue
393+ metrics .fail (f"Connection error: { str (e )} " )
394+ collector .finish (metrics )
380395 raise last_error
381396
382397 except Exception as e :
383398 last_error = NetworkError (f"Request failed: { e } " , cause = e )
384399 if attempt < max_retries :
385400 time .sleep (2 ** attempt )
386401 continue
402+ metrics .fail (f"Error: { str (e )} " )
403+ collector .finish (metrics )
387404 raise last_error
388405
406+ metrics .fail (f"Retry exhausted after { max_retries + 1 } attempts" )
407+ collector .finish (metrics )
408+
389409 raise RetryExhausted (
390410 f"All { max_retries + 1 } attempts failed" ,
391411 attempts = max_retries + 1 ,
0 commit comments