@@ -149,18 +149,19 @@ def tearDown(self):
149149 ):
150150 self .reader .shutdown ()
151151
152- def _create_metric (self , i , value = None ):
152+ @staticmethod
153+ def _create_metric (index , value = None ):
153154 """Helper to create a test metric with a gauge data point
154155
155156 Args:
156- i : Index/identifier for the metric
157- value: Optional specific value, defaults to i
157+ index : Index/identifier for the metric
158+ value: Optional specific value, defaults to index
158159
159160 Returns:
160161 MetricsData: A fully formed metrics data object
161162 """
162163 if value is None :
163- value = i
164+ value = index
164165
165166 data_point = NumberDataPoint (
166167 attributes = {},
@@ -174,13 +175,17 @@ def _create_metric(self, i, value=None):
174175 return MetricsData (
175176 resource_metrics = [
176177 ResourceMetrics (
177- resource = Resource .create ({"service.name" : f"test-{ i } " }),
178+ resource = Resource .create (
179+ {"service.name" : f"test-{ index } " }
180+ ),
178181 scope_metrics = [
179182 ScopeMetrics (
180- scope = InstrumentationScope (name = f"test-scope-{ i } " ),
183+ scope = InstrumentationScope (
184+ name = f"test-scope-{ index } "
185+ ),
181186 metrics = [
182187 {
183- "name" : f"metric-{ i } " ,
188+ "name" : f"metric-{ index } " ,
184189 "description" : "Test metric" ,
185190 "unit" : "1" ,
186191 "data" : gauge ,
@@ -264,12 +269,12 @@ def test_validation_errors(self):
264269 def test_export_batch (self ):
265270 """Test that metrics are properly batched and exported"""
266271 # Create 10 simple metrics
267- for i in range (10 ):
272+ for metric_idx in range (10 ):
268273 # Add metrics to the reader
269- self .reader ._receive_metrics (self ._create_metric (i ))
274+ self .reader ._receive_metrics (self ._create_metric (metric_idx ))
270275
271276 # After 5 metrics, the batch should be exported
272- if i == 4 :
277+ if metric_idx == 4 :
273278 self .assertEqual (len (self .exporter .get_exported_metrics ()), 1 )
274279 # The batch should contain 5 resource metrics
275280 self .assertEqual (
@@ -281,21 +286,21 @@ def test_export_batch(self):
281286 5 ,
282287 )
283288 # Verify content of exported metrics
284- for j in range (5 ):
289+ for batch_idx in range (5 ):
285290 exported_metric = self .exporter .get_exported_metrics ()[
286291 0
287- ].resource_metrics [j ]
292+ ].resource_metrics [batch_idx ]
288293 self .assertEqual (
289294 exported_metric .resource .attributes ["service.name" ],
290- f"test-{ j } " ,
295+ f"test-{ batch_idx } " ,
291296 )
292297 self .assertEqual (
293298 exported_metric .scope_metrics [0 ].scope .name ,
294- f"test-scope-{ j } " ,
299+ f"test-scope-{ batch_idx } " ,
295300 )
296301 self .assertEqual (
297302 exported_metric .scope_metrics [0 ].metrics [0 ]["name" ],
298- f"metric-{ j } " ,
303+ f"metric-{ batch_idx } " ,
299304 )
300305
301306 # After all 10 metrics, we should have 2 batches
@@ -307,21 +312,21 @@ def test_export_batch(self):
307312 )
308313
309314 # Verify content of second batch
310- for j in range (5 , 10 ):
311- idx = j - 5
315+ for batch_idx in range (5 , 10 ):
316+ idx = batch_idx - 5
312317 exported_metric = self .exporter .get_exported_metrics ()[
313318 1
314319 ].resource_metrics [idx ]
315320 self .assertEqual (
316321 exported_metric .resource .attributes ["service.name" ],
317- f"test-{ j } " ,
322+ f"test-{ batch_idx } " ,
318323 )
319324
320325 def test_export_batch_boundary_conditions (self ):
321326 """Test batching behavior at boundary conditions"""
322327 # Test with exactly one batch size
323- for i in range (5 ):
324- self .reader ._receive_metrics (self ._create_metric (i ))
328+ for metric_idx in range (5 ):
329+ self .reader ._receive_metrics (self ._create_metric (metric_idx ))
325330
326331 # Should have exactly one batch
327332 self .assertEqual (len (self .exporter .get_exported_metrics ()), 1 )
@@ -336,8 +341,8 @@ def test_export_batch_boundary_conditions(self):
336341 self .assertEqual (len (self .exporter .get_exported_metrics ()), 0 )
337342
338343 # Test with batch size + 1
339- for i in range (6 ):
340- self .reader ._receive_metrics (self ._create_metric (i ))
344+ for metric_idx in range (6 ):
345+ self .reader ._receive_metrics (self ._create_metric (metric_idx ))
341346
342347 # Force export of any remaining metrics
343348 self .reader .force_flush ()
@@ -353,8 +358,8 @@ def test_export_batch_boundary_conditions(self):
353358 def test_shutdown (self ):
354359 """Test that shutdown exports queued metrics and shuts down the exporter"""
355360 # Add 3 metrics (not enough for automatic batch export)
356- for i in range (3 ):
357- self .reader ._receive_metrics (self ._create_metric (i ))
361+ for metric_idx in range (3 ):
362+ self .reader ._receive_metrics (self ._create_metric (metric_idx ))
358363
359364 # No exports should have happened yet (batch size not reached)
360365 self .assertEqual (len (self .exporter .get_exported_metrics ()), 0 )
@@ -377,8 +382,8 @@ def test_shutdown(self):
377382 def test_force_flush (self ):
378383 """Test that force_flush exports queued metrics"""
379384 # Add 3 metrics (not enough for automatic batch export)
380- for i in range (3 ):
381- self .reader ._receive_metrics (self ._create_metric (i ))
385+ for metric_idx in range (3 ):
386+ self .reader ._receive_metrics (self ._create_metric (metric_idx ))
382387
383388 # No exports should have happened yet (batch size not reached)
384389 self .assertEqual (len (self .exporter .get_exported_metrics ()), 0 )
@@ -413,9 +418,9 @@ def test_with_meter_provider(self):
413418 gauge = meter .create_gauge ("test_gauge" , description = "Test gauge" )
414419
415420 # Record some metrics
416- for i in range (10 ):
421+ for metric_idx in range (10 ):
417422 counter .add (1 )
418- gauge .set (i )
423+ gauge .set (metric_idx )
419424
420425 # Manually collect after each recording
421426 # This simulates the behavior in synchronous_read.py example
@@ -474,9 +479,9 @@ def record_and_collect(num_metrics):
474479 # Run multiple threads concurrently recording metrics
475480 with ThreadPoolExecutor (max_workers = 10 ) as executor :
476481 futures = []
477- for i in range (10 ):
482+ for thread_idx in range (10 ):
478483 future = executor .submit (
479- record_and_collect , (i + 1 ) * 5
484+ record_and_collect , (thread_idx + 1 ) * 5
480485 ) # 5, 10, 15, ... 50 metrics
481486 futures .append (future )
482487
@@ -512,8 +517,8 @@ def test_failing_exporter(self):
512517 )
513518
514519 # Add metrics to the reader
515- for i in range (10 ):
516- reader ._receive_metrics (self ._create_metric (i ))
520+ for metric_idx in range (10 ):
521+ reader ._receive_metrics (self ._create_metric (metric_idx ))
517522
518523 # Exporter should have been called at least once
519524 self .assertGreaterEqual (failing_exporter .export_call_count , 1 )
@@ -536,17 +541,18 @@ def test_exception_exporter(self):
536541
537542 # Add metrics to the reader - this should not propagate exceptions
538543 try :
539- for i in range (10 ):
540- reader ._receive_metrics (self ._create_metric (i ))
544+ for metric_idx in range (10 ):
545+ reader ._receive_metrics (self ._create_metric (metric_idx ))
541546 reader .force_flush ()
542547 reader .shutdown ()
543- except Exception as e :
544- self .fail (f"Exception should be caught: { str (e )} " )
548+ except RuntimeError as runtime_err :
549+ self .fail (f"RuntimeError should be caught: { str (runtime_err )} " )
545550
546551 # Exporter should have been called
547552 self .assertGreaterEqual (exception_exporter .export_call_count , 1 )
548553
549- def test_garbage_collection (self ):
554+ @staticmethod
555+ def test_garbage_collection ():
550556 """Test that the reader can be garbage collected"""
551557 exporter = InMemoryMetricExporter ()
552558 reader = SynchronousExportingMetricReader (exporter )
@@ -559,10 +565,9 @@ def test_garbage_collection(self):
559565 del reader
560566 gc .collect ()
561567
562- self .assertIsNone (
563- weak_ref (),
564- "The SynchronousExportingMetricReader object wasn't garbage collected" ,
565- )
568+ assert (
569+ weak_ref () is None
570+ ), "The SynchronousExportingMetricReader object wasn't garbage collected"
566571
567572 def test_at_fork_reinit (self ):
568573 """Test that the _at_fork_reinit method properly resets internal state"""
@@ -675,9 +680,9 @@ def _target():
675680 "export_count" : len (metrics ),
676681 }
677682 pipe_conn .send (result )
678- except Exception as e :
683+ except ( RuntimeError , OSError , IOError ) as process_err :
679684 # Send exception info back to parent
680- pipe_conn .send ({"error" : str (e )})
685+ pipe_conn .send ({"error" : str (process_err )})
681686 finally :
682687 pipe_conn .close ()
683688
0 commit comments