@@ -234,75 +234,75 @@ The resulting trace will look [like this](https://smith.langchain.com/public/296
234
234
235
235
### Next.js
236
236
237
- The [ default OTEL setup] ( https://nextjs.org/docs/app/api-reference/file-conventions/instrumentation ) for Next.js will trace all routes, including
238
- those that do not contain LLM traces. We instead suggest manually instrumenting specific routes by creating and passing a tracer as shown below:
237
+ First, install the [ ` @vercel/otel ` ] ( https://www.npmjs.com/package/@vercel/otel ) package:
239
238
240
- ``` ts
241
- import { generateText } from " ai" ;
242
- import { openai } from " @ai-sdk/openai" ;
243
- import { traceable } from " langsmith/traceable" ;
239
+ <CodeTabs
240
+ groupId = " client-language"
241
+ tabs = { [
242
+ {
243
+ value: " npm" ,
244
+ label: " npm" ,
245
+ language: " bash" ,
246
+ content: ` npm install @vercel/otel ` ,
247
+ },
248
+ {
249
+ value: " yarn" ,
250
+ label: " yarn" ,
251
+ language: " bash" ,
252
+ content: ` yarn add @vercel/otel ` ,
253
+ },
254
+ {
255
+ value: " pnpm" ,
256
+ label: " pnpm" ,
257
+ language: " bash" ,
258
+ content: ` pnpm add @vercel/otel ` ,
259
+ },
260
+ ]}
261
+ />
262
+
263
+ Then, set up an [ ` instrumentation.ts ` ] ( https://nextjs.org/docs/app/guides/instrumentation ) file in your root directory.
264
+ Call ` initializeOTEL ` and pass the resulting ` DEFAULT_LANGSMITH_SPAN_PROCESSOR ` into the ` spanProcessors ` field into your ` registerOTEL(...) ` call.
265
+ It should look something like this:
244
266
267
+ ``` ts
268
+ import { registerOTel } from " @vercel/otel" ;
245
269
import { initializeOTEL } from " langsmith/experimental/otel/setup" ;
246
- import { LangSmithOTLPTraceExporter } from " langsmith/experimental/otel/exporter" ;
247
- import {
248
- BatchSpanProcessor ,
249
- BasicTracerProvider ,
250
- } from " @opentelemetry/sdk-trace-base" ;
251
- import { AsyncHooksContextManager } from " @opentelemetry/context-async-hooks" ;
252
- import { context } from " @opentelemetry/api" ;
253
270
254
- import { after } from " next/server " ;
271
+ const { DEFAULT_LANGSMITH_SPAN_PROCESSOR } = initializeOTEL ({}) ;
255
272
256
- const exporter = new LangSmithOTLPTraceExporter ();
257
- const processor = new BatchSpanProcessor (exporter );
258
- const contextManager = new AsyncHooksContextManager ();
273
+ export function register() {
274
+ registerOTel ({
275
+ serviceName: " your-project-name" ,
276
+ spanProcessors: [DEFAULT_LANGSMITH_SPAN_PROCESSOR ],
277
+ });
278
+ }
279
+ ```
259
280
260
- contextManager .enable ();
261
- context .setGlobalContextManager (contextManager );
281
+ And finally, in your API routes, call ` initializeOTEL ` as well and add an ` experimental_telemetry ` field to your AI SDK calls:
262
282
263
- const provider = new BasicTracerProvider ({
264
- spanProcessors: [ processor ],
265
- }) ;
283
+ ``` ts
284
+ import { generateText } from " ai " ;
285
+ import { openai } from " @ai-sdk/openai " ;
266
286
267
- // Pass instantiated provider and context manager to LangSmith
268
- initializeOTEL ({
269
- globalTracerProvider: provider ,
270
- globalContextManager: contextManager ,
271
- });
287
+ import { initializeOTEL } from " langsmith/experimental/otel/setup" ;
272
288
273
- // highlight-next-line
274
- const tracer = provider .getTracer (" ai-sdk-telemetry" );
289
+ initializeOTEL ();
275
290
276
291
export async function GET() {
277
- after (async () => {
278
- await provider .shutdown ();
279
- });
280
-
281
- const wrappedText = traceable (
282
- async (content : string ) => {
283
- const { text } = await generateText ({
284
- model: openai (" gpt-4.1-nano" ),
285
- messages: [{ role: " user" , content }],
286
- experimental_telemetry: {
287
- isEnabled: true ,
288
- // highlight-next-line
289
- tracer ,
290
- },
291
- maxSteps: 10 ,
292
- });
293
-
294
- return { text };
292
+ const { text } = await generateText ({
293
+ model: openai (" gpt-4.1-nano" ),
294
+ messages: [{ role: " user" , content: " Why is the sky blue?" }],
295
+ experimental_telemetry: {
296
+ isEnabled: true ,
295
297
},
296
- // highlight-next-line
297
- { name: " parentTraceable" , tracer }
298
- );
299
-
300
- const { text } = await wrappedText (" Why is the sky blue?" );
298
+ });
301
299
302
300
return new Response (text );
303
301
}
304
302
```
305
303
304
+ You can also wrap parts of your code in ` traceables ` for more granularity.
305
+
306
306
### Sentry
307
307
308
308
If you're using Sentry, you can attach the LangSmith trace exporter to Sentry's default OpenTelemetry instrumentation as show in the example below.
0 commit comments