|
| 1 | +--- |
| 2 | +title: LangChain |
| 3 | +description: "Adds instrumentation for LangChain." |
| 4 | +supported: |
| 5 | + - javascript.node |
| 6 | + - javascript.aws-lambda |
| 7 | + - javascript.azure-functions |
| 8 | + - javascript.connect |
| 9 | + - javascript.express |
| 10 | + - javascript.fastify |
| 11 | + - javascript.gcp-functions |
| 12 | + - javascript.hapi |
| 13 | + - javascript.hono |
| 14 | + - javascript.koa |
| 15 | + - javascript.nestjs |
| 16 | + - javascript.electron |
| 17 | + - javascript.nextjs |
| 18 | + - javascript.nuxt |
| 19 | + - javascript.solidstart |
| 20 | + - javascript.sveltekit |
| 21 | + - javascript.react-router |
| 22 | + - javascript.remix |
| 23 | + - javascript.astro |
| 24 | + - javascript.bun |
| 25 | + - javascript.tanstackstart-react |
| 26 | + - javascript.cloudflare |
| 27 | +--- |
| 28 | + |
| 29 | +<Alert> |
| 30 | + |
| 31 | +This integration works in the Node.js, Cloudflare Workers, and Vercel Edge Functions runtimes. It requires SDK version `10.22.0` or higher. |
| 32 | + |
| 33 | +</Alert> |
| 34 | + |
| 35 | +_Import name: `Sentry.langChainIntegration`_ |
| 36 | + |
| 37 | +The `langChainIntegration` adds instrumentation for LangChain to capture spans by automatically wrapping LangChain operations and recording AI agent interactions with configurable input/output recording. |
| 38 | + |
| 39 | +<PlatformSection notSupported={["javascript.cloudflare", "javascript.nextjs"]}> |
| 40 | + |
| 41 | +It is enabled by default and will automatically capture spans for LangChain operations including chat models, LLM invocations, chains, and tool executions. You can opt-in to capture inputs and outputs by setting `recordInputs` and `recordOutputs` in the integration config: |
| 42 | + |
| 43 | +```javascript |
| 44 | +Sentry.init({ |
| 45 | + dsn: "____PUBLIC_DSN____", |
| 46 | + tracesSampleRate: 1.0, |
| 47 | + integrations: [ |
| 48 | + Sentry.langChainIntegration({ |
| 49 | + recordInputs: true, |
| 50 | + recordOutputs: true, |
| 51 | + }), |
| 52 | + ], |
| 53 | +}); |
| 54 | +``` |
| 55 | + |
| 56 | +</PlatformSection> |
| 57 | + |
| 58 | +<PlatformSection supported={["javascript.cloudflare"]}> |
| 59 | + |
| 60 | +For Cloudflare Workers, you need to manually instrument LangChain operations using the `createLangChainCallbackHandler` helper: |
| 61 | + |
| 62 | +```javascript |
| 63 | +import * as Sentry from "@sentry/cloudflare"; |
| 64 | +import { ChatAnthropic } from "@langchain/anthropic"; |
| 65 | + |
| 66 | +// Create a LangChain callback handler |
| 67 | +const callbackHandler = Sentry.createLangChainCallbackHandler({ |
| 68 | + recordInputs: true, // Optional: record input prompts/messages |
| 69 | + recordOutputs: true, // Optional: record output responses |
| 70 | +}); |
| 71 | + |
| 72 | +// Use with chat models |
| 73 | +const model = new ChatAnthropic({ |
| 74 | + model: "claude-3-5-sonnet-20241022", |
| 75 | + apiKey: process.env.ANTHROPIC_API_KEY, |
| 76 | +}); |
| 77 | + |
| 78 | +await model.invoke("Tell me a joke", { |
| 79 | + callbacks: [callbackHandler], |
| 80 | +}); |
| 81 | +``` |
| 82 | + |
| 83 | +</PlatformSection> |
| 84 | + |
| 85 | +## Options |
| 86 | + |
| 87 | +### `recordInputs` |
| 88 | + |
| 89 | +_Type: `boolean`_ |
| 90 | + |
| 91 | +Records inputs to LangChain operations (such as prompts and messages). |
| 92 | + |
| 93 | +Defaults to `true` if `sendDefaultPii` is `true`. |
| 94 | + |
| 95 | +```javascript |
| 96 | +Sentry.init({ |
| 97 | + integrations: [Sentry.langChainIntegration({ recordInputs: true })], |
| 98 | +}); |
| 99 | +``` |
| 100 | + |
| 101 | +### `recordOutputs` |
| 102 | + |
| 103 | +_Type: `boolean`_ |
| 104 | + |
| 105 | +Records outputs from LangChain operations (such as generated text and responses). |
| 106 | + |
| 107 | +Defaults to `true` if `sendDefaultPii` is `true`. |
| 108 | + |
| 109 | +```javascript |
| 110 | +Sentry.init({ |
| 111 | + integrations: [Sentry.langChainIntegration({ recordOutputs: true })], |
| 112 | +}); |
| 113 | +``` |
| 114 | + |
| 115 | +## Configuration |
| 116 | + |
| 117 | +By default this integration adds tracing support for LangChain operations including: |
| 118 | + |
| 119 | +- **Chat model invocations** (`gen_ai.chat`) - Captures spans for chat model calls |
| 120 | +- **LLM invocations** (`gen_ai.pipeline`) - Captures spans for LLM pipeline executions |
| 121 | +- **Chain executions** (`gen_ai.invoke_agent`) - Captures spans for chain invocations |
| 122 | +- **Tool executions** (`gen_ai.execute_tool`) - Captures spans for tool calls |
| 123 | + |
| 124 | +### Supported Runnables |
| 125 | + |
| 126 | +The integration automatically instruments the following LangChain runnable methods: |
| 127 | + |
| 128 | +- `invoke()` - Single execution |
| 129 | +- `stream()` - Streaming execution |
| 130 | +- `batch()` - Batch execution |
| 131 | + |
| 132 | +<PlatformSection notSupported={["javascript.cloudflare", "javascript.nextjs"]}> |
| 133 | + |
| 134 | +### Supported Providers |
| 135 | + |
| 136 | +The automatic instrumentation supports the following LangChain provider packages: |
| 137 | + |
| 138 | +- `@langchain/anthropic` |
| 139 | +- `@langchain/openai` |
| 140 | +- `@langchain/google-genai` |
| 141 | +- `@langchain/mistralai` |
| 142 | +- `@langchain/google-vertexai` |
| 143 | +- `@langchain/groq` |
| 144 | + |
| 145 | +</PlatformSection> |
| 146 | + |
| 147 | +<PlatformSection supported={['javascript.nextjs']}> |
| 148 | + |
| 149 | +## Edge runtime |
| 150 | + |
| 151 | +This integration is automatically instrumented in the Node.js runtime. For Next.js applications using the Edge runtime, you need to manually instrument LangChain operations using the callback handler: |
| 152 | + |
| 153 | +```javascript |
| 154 | +import * as Sentry from "@sentry/nextjs"; |
| 155 | +import { ChatAnthropic } from "@langchain/anthropic"; |
| 156 | + |
| 157 | +// Create a LangChain callback handler |
| 158 | +const callbackHandler = Sentry.createLangChainCallbackHandler({ |
| 159 | + recordInputs: true, |
| 160 | + recordOutputs: true, |
| 161 | +}); |
| 162 | + |
| 163 | +// Use with chat models |
| 164 | +const model = new ChatAnthropic({ |
| 165 | + model: "claude-3-5-sonnet-20241022", |
| 166 | + apiKey: process.env.ANTHROPIC_API_KEY, |
| 167 | +}); |
| 168 | + |
| 169 | +await model.invoke("Tell me a joke", { |
| 170 | + callbacks: [callbackHandler], |
| 171 | +}); |
| 172 | +``` |
| 173 | + |
| 174 | +</PlatformSection> |
| 175 | + |
| 176 | +## Supported Versions |
| 177 | + |
| 178 | +- `langchain`: `>=0.1.0 <1.0.0` |
0 commit comments