|
13 | 13 | from django.http import HttpResponse, JsonResponse |
14 | 14 | from opentelemetry import trace, metrics |
15 | 15 | from opentelemetry.trace.span import format_trace_id |
16 | | -from opentelemetry.sdk.resources import Resource |
17 | | -from opentelemetry.sdk.metrics import MeterProvider |
18 | | -from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader |
19 | | -from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter |
20 | | -from opentelemetry.semconv.resource import ResourceAttributes |
21 | 16 |
|
22 | 17 | logger = logging.getLogger(__name__) |
23 | 18 |
|
24 | | -# Custom export pipeline - only create if specific env vars exist |
25 | | -pipeline_meter = None |
26 | | -if os.environ.get('SERVICE_NAME') and os.environ.get('DEPLOYMENT_ENVIRONMENT_NAME'): |
27 | | - service_name = os.environ.get('SERVICE_NAME') |
28 | | - deployment_env = os.environ.get('DEPLOYMENT_ENVIRONMENT_NAME') |
29 | | - pipeline_resource = Resource.create({ |
30 | | - #ResourceAttributes.DEPLOYMENT_ENVIRONMENT_NAME maps to dimension 'deployment.name' so "deployment.environment.name" used |
31 | | - #to assign value correctly. |
32 | | - ResourceAttributes.SERVICE_NAME: service_name, |
33 | | - "deployment.environment.name": deployment_env |
34 | | - }) |
35 | | - |
36 | | - pipeline_metric_exporter = OTLPMetricExporter(endpoint="localhost:4317") |
37 | | - pipeline_metric_reader = PeriodicExportingMetricReader( |
38 | | - exporter=pipeline_metric_exporter, |
39 | | - export_interval_millis=1000 |
40 | | - ) |
41 | | - |
42 | | - pipeline_meter_provider = MeterProvider( |
43 | | - resource=pipeline_resource, |
44 | | - metric_readers=[pipeline_metric_reader] |
45 | | - ) |
46 | | - |
47 | | - pipeline_meter = pipeline_meter_provider.get_meter("myMeter") |
48 | | - |
49 | | - |
50 | | -#python equivalent of Meter meter = GlobalOpenTelemetry.getMeter("myMeter"); for custom metrics |
| 19 | +#python equivalent of Meter meter = GlobalOpenTelemetry.getMeter("myMeter"); for custom metrics. |
51 | 20 | meter = metrics.get_meter("myMeter") |
52 | 21 | agent_based_counter = meter.create_counter("agent_based_counter", unit="1", description="agent export counter") |
53 | 22 | agent_based_histogram = meter.create_histogram("agent_based_histogram", description="agent export histogram") |
54 | 23 | agent_based_gauge = meter.create_up_down_counter("agent_based_gauge", unit="1", description="agent export gauge") |
55 | 24 |
|
56 | | -# Create pipeline metrics only if pipeline exists |
57 | | -custom_pipeline_counter = None |
58 | | -custom_pipeline_histogram = None |
59 | | -custom_pipeline_gauge = None |
60 | | -if pipeline_meter: |
61 | | - custom_pipeline_counter = pipeline_meter.create_counter("custom_pipeline_counter", unit="1", description="pipeline export counter") |
62 | | - custom_pipeline_histogram = pipeline_meter.create_histogram("custom_pipeline_histogram", description="pipeline export histogram") |
63 | | - custom_pipeline_gauge = pipeline_meter.create_up_down_counter("custom_pipeline_gauge", unit="1", description="pipeline export gauge") |
64 | | - |
65 | 25 |
|
66 | 26 | should_send_local_root_client_call = False |
67 | 27 | lock = threading.Lock() |
@@ -103,11 +63,6 @@ def aws_sdk_call(request): |
103 | 63 | agent_based_histogram.record(random.randint(100, 1000), {"Operation": "histogram"}) |
104 | 64 | agent_based_gauge.add(random.randint(-10, 10), {"Operation": "gauge"}) |
105 | 65 |
|
106 | | - if pipeline_meter: |
107 | | - custom_pipeline_counter.add(1, {"Operation": "pipeline_counter"}) |
108 | | - custom_pipeline_histogram.record(random.randint(100, 1000), {"Operation": "pipeline_histogram"}) |
109 | | - custom_pipeline_gauge.add(random.randint(-10, 10), {"Operation": "pipeline_gauge"}) |
110 | | - |
111 | 66 | bucket_name = "e2e-test-bucket-name" |
112 | 67 |
|
113 | 68 | # Add a unique test ID to bucketname to associate buckets to specific test runs |
|
0 commit comments