From f520c2d7740bd05b95e161d1c7a57d2269349c55 Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Tue, 11 Nov 2025 15:36:00 +0100 Subject: [PATCH 1/4] Ensure that gen_ai.response.tool_calls property contains sensible data in vercel ai --- .../node-integration-tests/suites/tracing/vercelai/test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts index aac243eff11c..e8b2f1187c8d 100644 --- a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts @@ -330,7 +330,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.prompt': '{"prompt":"What is the weather in San Francisco?"}', 'vercel.ai.response.finishReason': 'tool-calls', 'gen_ai.response.text': 'Tool call completed!', - 'gen_ai.response.tool_calls': expect.any(String), + 'gen_ai.response.tool_calls': expect.stringContaining('getWeather'), 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.settings.maxSteps': 1, 'vercel.ai.streaming': false, @@ -364,7 +364,7 @@ describe('Vercel AI integration', () => { 'vercel.ai.response.model': 'mock-model-id', 'gen_ai.response.text': 'Tool call completed!', 'vercel.ai.response.timestamp': expect.any(String), - 'gen_ai.response.tool_calls': expect.any(String), + 'gen_ai.response.tool_calls': expect.stringContaining('getWeather'), 'vercel.ai.settings.maxRetries': 2, 'vercel.ai.streaming': false, 'gen_ai.request.model': 'mock-model-id', From 24923d8537d484b174f30c720fdd7e08a159e593 Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Tue, 11 Nov 2025 15:51:18 +0100 Subject: [PATCH 2/4] Ensure gen_ai.chat spans contain sensible data for gen_ai.response.tool_calls --- .../node-integration-tests/suites/tracing/langchain/test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-packages/node-integration-tests/suites/tracing/langchain/test.ts b/dev-packages/node-integration-tests/suites/tracing/langchain/test.ts index ff0e95b8f8ad..11522241c678 100644 --- a/dev-packages/node-integration-tests/suites/tracing/langchain/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/langchain/test.ts @@ -179,7 +179,7 @@ describe('LangChain integration', () => { 'gen_ai.response.id': expect.any(String), 'gen_ai.response.model': expect.any(String), 'gen_ai.response.stop_reason': 'tool_use', - 'gen_ai.response.tool_calls': expect.any(String), + 'gen_ai.response.tool_calls': expect.stringContaining('get_weather'), }), description: 'chat claude-3-5-sonnet-20241022', op: 'gen_ai.chat', From 53fa154036de3477537bbb95975947402b6ac5d0 Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Wed, 12 Nov 2025 09:43:40 +0100 Subject: [PATCH 3/4] Try to add langchain to node integration tests --- .../node-integration-tests/package.json | 1 + .../suites/tracing/langchain/scenario.mjs | 1 + yarn.lock | 53 +++++++++++++++++++ 3 files changed, 55 insertions(+) diff --git a/dev-packages/node-integration-tests/package.json b/dev-packages/node-integration-tests/package.json index 325a99860c4e..6c766b5bc5e5 100644 --- a/dev-packages/node-integration-tests/package.json +++ b/dev-packages/node-integration-tests/package.json @@ -58,6 +58,7 @@ "ioredis": "^5.4.1", "kafkajs": "2.2.4", "knex": "^2.5.1", + "langchain": "^0.3.28", "lru-memoizer": "2.3.0", "mongodb": "^3.7.3", "mongodb-memory-server-global": "^10.1.4", diff --git a/dev-packages/node-integration-tests/suites/tracing/langchain/scenario.mjs b/dev-packages/node-integration-tests/suites/tracing/langchain/scenario.mjs index 2c60e55ff77e..8078e0046aad 100644 --- a/dev-packages/node-integration-tests/suites/tracing/langchain/scenario.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/langchain/scenario.mjs @@ -1,4 +1,5 @@ import { ChatAnthropic } from '@langchain/anthropic'; +import * as langchain from 'langchain'; import * as Sentry from '@sentry/node'; import express from 'express'; diff --git a/yarn.lock b/yarn.lock index 99e434f5efff..f12695b0b6e3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4931,6 +4931,22 @@ zod "^3.25.32" zod-to-json-schema "^3.22.3" +"@langchain/openai@>=0.1.0 <0.7.0": + version "0.6.16" + resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.6.16.tgz#fed8bc90127d15255e0e4ee527c7eadb75b21e9c" + integrity sha512-v9INBOjE0w6ZrUE7kP9UkRyNsV7daH7aPeSOsPEJ35044UI3udPHwNduQ8VmaOUsD26OvSdg1b1GDhrqWLMaRw== + dependencies: + js-tiktoken "^1.0.12" + openai "5.12.2" + zod "^3.25.32" + +"@langchain/textsplitters@>=0.0.0 <0.2.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@langchain/textsplitters/-/textsplitters-0.1.0.tgz#f37620992192df09ecda3dfbd545b36a6bcbae46" + integrity sha512-djI4uw9rlkAb5iMhtLED+xJebDdAG935AdP4eRTB02R7OB/act55Bj9wsskhZsvuyQRpO4O1wQOp85s6T6GWmw== + dependencies: + js-tiktoken "^1.0.12" + "@leichtgewicht/ip-codec@^2.0.1": version "2.0.4" resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" @@ -20296,6 +20312,11 @@ jsonparse@^1.2.0, jsonparse@^1.3.1: resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== +jsonpointer@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" + integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== + jsonwebtoken@^9.0.0: version "9.0.2" resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz#65ff91f4abef1784697d40952bb1998c504caaf3" @@ -20474,6 +20495,23 @@ lambda-local@^2.2.0: dotenv "^16.3.1" winston "^3.10.0" +langchain@^0.3.28: + version "0.3.36" + resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.3.36.tgz#6ab7f4028adae16bf74538aa2127df9fe2fddf02" + integrity sha512-PqC19KChFF0QlTtYDFgfEbIg+SCnCXox29G8tY62QWfj9bOW7ew2kgWmPw5qoHLOTKOdQPvXET20/1Pdq8vAtQ== + dependencies: + "@langchain/openai" ">=0.1.0 <0.7.0" + "@langchain/textsplitters" ">=0.0.0 <0.2.0" + js-tiktoken "^1.0.12" + js-yaml "^4.1.0" + jsonpointer "^5.0.1" + langsmith "^0.3.67" + openapi-types "^12.1.3" + p-retry "4" + uuid "^10.0.0" + yaml "^2.2.1" + zod "^3.25.32" + langsmith@^0.3.67: version "0.3.74" resolved "https://registry.npmjs.org/langsmith/-/langsmith-0.3.74.tgz#014d31a9ff7530b54f0d797502abd512ce8fb6fb" @@ -23934,11 +23972,21 @@ open@^9.1.0: is-inside-container "^1.0.0" is-wsl "^2.2.0" +openai@5.12.2: + version "5.12.2" + resolved "https://registry.yarnpkg.com/openai/-/openai-5.12.2.tgz#512ab6b80eb8414837436e208f1b951442b97761" + integrity sha512-xqzHHQch5Tws5PcKR2xsZGX9xtch+JQFz5zb14dGqlshmmDAFBFEWmeIpf7wVqWV+w7Emj7jRgkNJakyKE0tYQ== + openai@5.18.1: version "5.18.1" resolved "https://registry.yarnpkg.com/openai/-/openai-5.18.1.tgz#1c4884aefcada7ec684771e03c860c381f1902c1" integrity sha512-iXSOfLlOL+jgnFr5CGrB2SEZw5C92o1nrFW2SasoAXj4QxGhfeJPgg8zkX+vaCfX80cT6CWjgaGnq7z9XzbyRw== +openapi-types@^12.1.3: + version "12.1.3" + resolved "https://registry.yarnpkg.com/openapi-types/-/openapi-types-12.1.3.tgz#471995eb26c4b97b7bd356aacf7b91b73e777dd3" + integrity sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw== + opener@^1.5.2: version "1.5.2" resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" @@ -31796,6 +31844,11 @@ yaml@^1.10.0, yaml@^1.10.2: resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== +yaml@^2.2.1: + version "2.8.1" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.8.1.tgz#1870aa02b631f7e8328b93f8bc574fac5d6c4d79" + integrity sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw== + yaml@^2.5.0: version "2.5.1" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.5.1.tgz#c9772aacf62cb7494a95b0c4f1fb065b563db130" From 48ca001e8a7f1fedbc4458602ddd17fc10b9f777 Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Wed, 12 Nov 2025 11:15:44 +0100 Subject: [PATCH 4/4] Try to trigger gen_ai.invoke_agent spans but doesn't work so far --- .../suites/tracing/langchain/scenario.mjs | 22 ++++++++++++++++++- .../suites/tracing/langchain/test.ts | 4 ++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/dev-packages/node-integration-tests/suites/tracing/langchain/scenario.mjs b/dev-packages/node-integration-tests/suites/tracing/langchain/scenario.mjs index 8078e0046aad..9a58f4cf93e0 100644 --- a/dev-packages/node-integration-tests/suites/tracing/langchain/scenario.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/langchain/scenario.mjs @@ -1,5 +1,6 @@ import { ChatAnthropic } from '@langchain/anthropic'; -import * as langchain from 'langchain'; +import { LLMChain } from 'langchain/chains'; +import { PromptTemplate } from '@langchain/core/prompts'; import * as Sentry from '@sentry/node'; import express from 'express'; @@ -101,6 +102,25 @@ async function run() { } catch { // Expected error } + + // Test 4: LLMChain + const chainModel = new ChatAnthropic({ + model: 'claude-3-5-sonnet-20241022', + temperature: 0, + apiKey: 'mock-api-key', + clientOptions: { + baseURL: baseUrl, + }, + }); + + const prompt = PromptTemplate.fromTemplate('Answer the following question: {question}'); + + const chain = new LLMChain({ + llm: chainModel, + prompt: prompt, + }); + + await chain.call({ question: 'What is 2+2?' }); }); await Sentry.flush(2000); diff --git a/dev-packages/node-integration-tests/suites/tracing/langchain/test.ts b/dev-packages/node-integration-tests/suites/tracing/langchain/test.ts index 11522241c678..7eeb18826342 100644 --- a/dev-packages/node-integration-tests/suites/tracing/langchain/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/langchain/test.ts @@ -68,6 +68,10 @@ describe('LangChain integration', () => { origin: 'auto.ai.langchain', status: 'internal_error', }), + // Fourth span - agent executor + expect.objectContaining({ + op: 'gen_ai.invoke_agent', + }), ]), };