diff --git a/packages/sample-app/package.json b/packages/sample-app/package.json index 9ec07b7d..a7823931 100644 --- a/packages/sample-app/package.json +++ b/packages/sample-app/package.json @@ -23,6 +23,7 @@ "run:llamaindex": "npm run build && node dist/src/sample_llamaindex.js", "run:pinecone": "npm run build && node dist/src/sample_pinecone.js", "run:langchain": "npm run build && node dist/src/sample_langchain.js", + "run:otel": "npm run build && node dist/src/sample_otel.js", "lint": "eslint . --ext .ts", "lint:fix": "eslint . --ext .ts --fix" }, diff --git a/packages/sample-app/src/sample_otel.ts b/packages/sample-app/src/sample_otel.ts new file mode 100644 index 00000000..8cbd0dcc --- /dev/null +++ b/packages/sample-app/src/sample_otel.ts @@ -0,0 +1,59 @@ +import { DiagConsoleLogger, DiagLogLevel, diag } from "@opentelemetry/api"; +import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-http"; +import { registerInstrumentations } from "@opentelemetry/instrumentation"; +import { + NodeTracerProvider, + SimpleSpanProcessor, +} from "@opentelemetry/sdk-trace-node"; +import { OpenAIInstrumentation } from "@traceloop/instrumentation-openai"; + +diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG); + +const traceProvider = new NodeTracerProvider(); + +const spanExporter = new OTLPTraceExporter({ + url: "https://otel.baselime.io/v1", + timeoutMillis: 5000, + headers: { + "x-api-key": process.env.BASELIME_API_KEY, + }, +}); + +const spanProcessor = new SimpleSpanProcessor(spanExporter); + +traceProvider.addSpanProcessor(spanProcessor); +traceProvider.register(); + +registerInstrumentations({ + instrumentations: [new OpenAIInstrumentation()], +}); + +const tracer = traceProvider.getTracer("opentelemetry-esm-instrumentation"); + +import OpenAI from "openai"; + +const openai = new OpenAI({ + apiKey: process.env.OPENAI_API_KEY, +}); + +async function doOpenAI() { + await tracer.startActiveSpan("doOpenAI", async (span) => { + const response = await openai.chat.completions.create({ + model: "gpt-3.5-turbo", + messages: [ + { + role: "user", + content: "Hello, how are you?", + }, + ], + }); + + console.log(response.choices[0].message.content); + + span.end(); + }); +} + +doOpenAI() + .catch(console.error) + .then(() => spanExporter.forceFlush());