File tree Expand file tree Collapse file tree 2 files changed +59
-34
lines changed Expand file tree Collapse file tree 2 files changed +59
-34
lines changed Load Diff This file was deleted.
Original file line number Diff line number Diff line change 1+ # Instructions
2+ # 1. Run the OpenTelemetry Collector with the OTLP receiver enabled
3+ # Create otel-config.yaml with the following content:
4+ # receivers:
5+ # otlp:
6+ # protocols:
7+ # grpc:
8+ # endpoint: "0.0.0.0:4317"
9+ # http:
10+ # endpoint: "0.0.0.0:4318"
11+
12+ # exporters:
13+ # logging:
14+ # loglevel: debug
15+
16+ # service:
17+ # pipelines:
18+ # traces:
19+ # receivers: [otlp]
20+ # exporters: [logging]
21+ # docker pull otel/opentelemetry-collector:latest
22+ # docker run --rm -p 4317:4317 -p 4318:4318 -v $(pwd)/otel-config.yaml:/otel-config.yaml otel/opentelemetry-collector --config otel-config.yaml
23+ # 2. Run the following code
24+
25+ from langtrace_python_sdk import langtrace
26+ from openai import OpenAI
27+ from opentelemetry .exporter .otlp .proto .http .trace_exporter import OTLPSpanExporter
28+
29+
30+ # Configure the OTLP exporter to use the correct endpoint and API key
31+ otlp_endpoint = "http://localhost:4318/v1/traces"
32+ otlp_exporter = OTLPSpanExporter (
33+ endpoint = otlp_endpoint ,
34+ headers = (("Content-Type" , "application/json" ),))
35+ langtrace .init (custom_remote_exporter = otlp_exporter , batch = False )
36+
37+
38+ def chat_with_openai ():
39+ client = OpenAI ()
40+ messages = [
41+ {
42+ "role" : "user" ,
43+ "content" : "Hello, I'm a human." ,
44+ },
45+ ]
46+ chat_completion = client .chat .completions .create (
47+ messages = messages ,
48+ stream = False ,
49+ model = "gpt-3.5-turbo" ,
50+ )
51+ print (chat_completion .choices [0 ].message .content )
52+
53+
54+ def main ():
55+ chat_with_openai ()
56+
57+
58+ if __name__ == "__main__" :
59+ main ()
You can’t perform that action at this time.
0 commit comments