|
4 | 4 | from openai import OpenAI |
5 | 5 |
|
6 | 6 | # NOTE: OpenTelemetry Python Logs and Events APIs are in beta |
7 | | -from opentelemetry import _events, _logs, metrics, trace |
| 7 | +from opentelemetry import _events, _logs, trace |
8 | 8 | from opentelemetry.exporter.otlp.proto.grpc._log_exporter import ( |
9 | 9 | OTLPLogExporter, |
10 | 10 | ) |
11 | | -from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( |
12 | | - OTLPMetricExporter, |
13 | | -) |
14 | 11 | from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import ( |
15 | 12 | OTLPSpanExporter, |
16 | 13 | ) |
17 | 14 | from opentelemetry.instrumentation.openai_v2 import OpenAIInstrumentor |
18 | 15 | from opentelemetry.sdk._events import EventLoggerProvider |
19 | 16 | from opentelemetry.sdk._logs import LoggerProvider |
20 | 17 | from opentelemetry.sdk._logs.export import BatchLogRecordProcessor |
21 | | -from opentelemetry.sdk.metrics import MeterProvider |
22 | | -from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader |
23 | 18 | from opentelemetry.sdk.trace import TracerProvider |
24 | 19 | from opentelemetry.sdk.trace.export import BatchSpanProcessor |
25 | 20 |
|
|
36 | 31 | ) |
37 | 32 | _events.set_event_logger_provider(EventLoggerProvider()) |
38 | 33 |
|
39 | | -# configure metrics |
40 | | -metrics.set_meter_provider( |
41 | | - MeterProvider( |
42 | | - metric_readers=[ |
43 | | - PeriodicExportingMetricReader( |
44 | | - OTLPMetricExporter(), |
45 | | - ), |
46 | | - ] |
47 | | - ) |
48 | | -) |
49 | | - |
50 | | -from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor |
51 | | - |
52 | | -HTTPXClientInstrumentor().instrument() |
53 | | - |
54 | 34 | # instrument OpenAI |
55 | 35 | OpenAIInstrumentor().instrument() |
56 | 36 |
|
57 | | -tracer = trace.get_tracer(__name__) |
58 | | - |
59 | 37 |
|
60 | 38 | def main(): |
61 | 39 | client = OpenAI() |
62 | | - |
63 | | - for u in range(10): |
64 | | - with tracer.start_as_current_span("main"): |
65 | | - chat_completion = client.chat.completions.create( |
66 | | - model=os.getenv("CHAT_MODEL", "gpt-4o-mini"), |
67 | | - messages=[ |
68 | | - { |
69 | | - "role": "user", |
70 | | - "content": "Write a haiku on OpenTelemetry.", |
71 | | - }, |
72 | | - ], |
73 | | - ) |
74 | | - print(chat_completion.choices[0].message.content) |
| 40 | + chat_completion = client.chat.completions.create( |
| 41 | + model=os.getenv("CHAT_MODEL", "gpt-4o-mini"), |
| 42 | + messages=[ |
| 43 | + { |
| 44 | + "role": "user", |
| 45 | + "content": "Write a short poem on OpenTelemetry.", |
| 46 | + }, |
| 47 | + ], |
| 48 | + ) |
| 49 | + print(chat_completion.choices[0].message.content) |
75 | 50 |
|
76 | 51 |
|
77 | 52 | if __name__ == "__main__": |
|
0 commit comments