Skip to content

Commit 82fdae9

Browse files
committed
feat: add opentelemetry tracing to agent-api
1 parent 181e36d commit 82fdae9

File tree

3 files changed

+48
-1
lines changed

3 files changed

+48
-1
lines changed

packages/agent-api/host.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
{
22
"version": "2.0",
3+
"telemetryMode": "OpenTelemetry",
34
"logging": {
45
"applicationInsights": {
56
"samplingSettings": {

packages/agent-api/package.json

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"version": "1.0.0",
44
"description": "Azure Functions API for Contoso Burgers AI Agent",
55
"private": true,
6-
"main": "./dist/src/functions/*.js",
6+
"main": "./dist/src/{tracing.js,functions/*.js}",
77
"scripts": {
88
"build": "tsc",
99
"watch": "tsc -w",
@@ -18,13 +18,19 @@
1818
"dependencies": {
1919
"@azure/functions": "^4.0.0",
2020
"@azure/identity": "^4.2.0",
21+
"@azure/monitor-opentelemetry": "^1.12.0",
2122
"@azure/storage-blob": "^12.17.0",
2223
"@langchain/azure-cosmosdb": "^0.2.2",
2324
"@langchain/core": "^0.3.18",
2425
"@langchain/langgraph": "^0.4.9",
2526
"@langchain/mcp-adapters": "^0.6.0",
2627
"@langchain/openai": "^0.6.9",
2728
"@microsoft/ai-chat-protocol": "^1.0.0-beta.20240814.1",
29+
"@opentelemetry/exporter-trace-otlp-http": "^0.203.0",
30+
"@opentelemetry/instrumentation": "^0.200.0",
31+
"@opentelemetry/sdk-trace-base": "^1.30.1",
32+
"@opentelemetry/sdk-trace-node": "^2.0.1",
33+
"@traceloop/instrumentation-langchain": "^0.18.0",
2834
"langchain": "^0.3.6"
2935
},
3036
"devDependencies": {

packages/agent-api/src/tracing.ts

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
// OpenTelemetry tracing setup
2+
// This file is loaded automatically by the Azure Functions host,
3+
// as configured in package.json "main" field.
4+
5+
import { useAzureMonitor } from "@azure/monitor-opentelemetry";
6+
import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-http";
7+
import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node";
8+
import { BatchSpanProcessor } from "@opentelemetry/sdk-trace-base";
9+
import { LangChainInstrumentation } from "@traceloop/instrumentation-langchain";
10+
import { registerInstrumentations } from "@opentelemetry/instrumentation";
11+
12+
let isTracingInitialized = false;
13+
if (!isTracingInitialized) {
14+
// Initialize tracing and export to Azure Monitor or OTLP endpoint
15+
// When running locally, you can use a local OpenTelemetry collector to receive the traces,
16+
// for example VS Code AI Toolkit's extension trace collector:
17+
// https://marketplace.visualstudio.com/items?itemName=ms-windows-ai-studio.windows-ai-studio
18+
const appInsightsConnectionString = process.env["APPLICATIONINSIGHTS_CONNECTION_STRING"];
19+
if (appInsightsConnectionString) {
20+
useAzureMonitor({
21+
azureMonitorExporterOptions: { connectionString: appInsightsConnectionString }
22+
});
23+
} else {
24+
console.warn("Using local OTLP endpoint at http://localhost:4318, run a local OpenTelemetry collector to receive the traces");
25+
26+
const exporter = new OTLPTraceExporter({
27+
url: "http://localhost:4318/v1/traces",
28+
});
29+
const provider = new NodeTracerProvider({
30+
spanProcessors: [new BatchSpanProcessor(exporter as any) as any],
31+
});
32+
provider.register();
33+
}
34+
35+
registerInstrumentations({
36+
instrumentations: [new LangChainInstrumentation()],
37+
});
38+
39+
isTracingInitialized = true;
40+
}

0 commit comments

Comments
 (0)