Skip to content

Commit 5525329

Browse files
authored
fix(ai-sdk): model provider attribute transformation for openai (#609)
1 parent 288d5c8 commit 5525329

File tree

3 files changed

+638
-60
lines changed

3 files changed

+638
-60
lines changed
Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,113 @@
1+
import { ReadableSpan } from "@opentelemetry/sdk-trace-node";
2+
import { SpanAttributes } from "@traceloop/ai-semantic-conventions";
3+
4+
const AI_GENERATE_TEXT_DO_GENERATE = "ai.generateText.doGenerate";
5+
const AI_STREAM_TEXT_DO_STREAM = "ai.streamText.doStream";
6+
const AI_RESPONSE_TEXT = "ai.response.text";
7+
const AI_PROMPT_MESSAGES = "ai.prompt.messages";
8+
const AI_USAGE_PROMPT_TOKENS = "ai.usage.promptTokens";
9+
const AI_USAGE_COMPLETION_TOKENS = "ai.usage.completionTokens";
10+
const AI_MODEL_PROVIDER = "ai.model.provider";
11+
12+
export const transformAiSdkSpanName = (span: ReadableSpan): void => {
13+
const nameMap: Record<string, string> = {
14+
[AI_GENERATE_TEXT_DO_GENERATE]: "ai.generateText.generate",
15+
[AI_STREAM_TEXT_DO_STREAM]: "ai.streamText.stream",
16+
};
17+
18+
if (span.name in nameMap) {
19+
// Unfortunately, the span name is not writable as this is not the intended behavior
20+
// but it is a workaround to set the correct span name
21+
(span as any).name = nameMap[span.name];
22+
}
23+
};
24+
25+
export const transformResponseText = (
26+
attributes: Record<string, any>,
27+
): void => {
28+
if (AI_RESPONSE_TEXT in attributes) {
29+
attributes[`${SpanAttributes.LLM_COMPLETIONS}.0.content`] =
30+
attributes[AI_RESPONSE_TEXT];
31+
attributes[`${SpanAttributes.LLM_COMPLETIONS}.0.role`] = "assistant";
32+
delete attributes[AI_RESPONSE_TEXT];
33+
}
34+
};
35+
36+
export const transformPromptMessages = (
37+
attributes: Record<string, any>,
38+
): void => {
39+
if (AI_PROMPT_MESSAGES in attributes) {
40+
try {
41+
const messages = JSON.parse(attributes[AI_PROMPT_MESSAGES] as string);
42+
messages.forEach((msg: { role: string; content: any }, index: number) => {
43+
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
44+
typeof msg.content === "string"
45+
? msg.content
46+
: JSON.stringify(msg.content);
47+
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] = msg.role;
48+
});
49+
delete attributes[AI_PROMPT_MESSAGES];
50+
} catch {
51+
// Skip if JSON parsing fails
52+
}
53+
}
54+
};
55+
56+
export const transformPromptTokens = (
57+
attributes: Record<string, any>,
58+
): void => {
59+
if (AI_USAGE_PROMPT_TOKENS in attributes) {
60+
attributes[`${SpanAttributes.LLM_USAGE_PROMPT_TOKENS}`] =
61+
attributes[AI_USAGE_PROMPT_TOKENS];
62+
delete attributes[AI_USAGE_PROMPT_TOKENS];
63+
}
64+
};
65+
66+
export const transformCompletionTokens = (
67+
attributes: Record<string, any>,
68+
): void => {
69+
if (AI_USAGE_COMPLETION_TOKENS in attributes) {
70+
attributes[`${SpanAttributes.LLM_USAGE_COMPLETION_TOKENS}`] =
71+
attributes[AI_USAGE_COMPLETION_TOKENS];
72+
delete attributes[AI_USAGE_COMPLETION_TOKENS];
73+
}
74+
};
75+
76+
export const calculateTotalTokens = (attributes: Record<string, any>): void => {
77+
const promptTokens = attributes[`${SpanAttributes.LLM_USAGE_PROMPT_TOKENS}`];
78+
const completionTokens =
79+
attributes[`${SpanAttributes.LLM_USAGE_COMPLETION_TOKENS}`];
80+
81+
if (promptTokens && completionTokens) {
82+
attributes[`${SpanAttributes.LLM_USAGE_TOTAL_TOKENS}`] =
83+
Number(promptTokens) + Number(completionTokens);
84+
}
85+
};
86+
87+
export const transformVendor = (attributes: Record<string, any>): void => {
88+
if (AI_MODEL_PROVIDER in attributes) {
89+
const vendor = attributes[AI_MODEL_PROVIDER];
90+
if (vendor && vendor.startsWith("openai")) {
91+
attributes[SpanAttributes.LLM_SYSTEM] = "OpenAI";
92+
} else {
93+
attributes[SpanAttributes.LLM_SYSTEM] = vendor;
94+
}
95+
delete attributes[AI_MODEL_PROVIDER];
96+
}
97+
};
98+
99+
export const transformAiSdkAttributes = (
100+
attributes: Record<string, any>,
101+
): void => {
102+
transformResponseText(attributes);
103+
transformPromptMessages(attributes);
104+
transformPromptTokens(attributes);
105+
transformCompletionTokens(attributes);
106+
calculateTotalTokens(attributes);
107+
transformVendor(attributes);
108+
};
109+
110+
export const transformAiSdkSpan = (span: ReadableSpan): void => {
111+
transformAiSdkSpanName(span);
112+
transformAiSdkAttributes(span.attributes);
113+
};

packages/traceloop-sdk/src/lib/tracing/span-processor.ts

Lines changed: 3 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ import {
1414
WORKFLOW_NAME_KEY,
1515
} from "./tracing";
1616
import { SpanAttributes } from "@traceloop/ai-semantic-conventions";
17+
import { transformAiSdkSpan } from "./ai-sdk-transformations";
1718

1819
export const ALL_INSTRUMENTATION_LIBRARIES = "all" as const;
1920
type AllInstrumentationLibraries = typeof ALL_INSTRUMENTATION_LIBRARIES;
@@ -172,66 +173,8 @@ const onSpanEnd = (
172173
return;
173174
}
174175

175-
// Vercel AI Adapters
176-
const attributes = span.attributes;
177-
178-
// Adapt span names
179-
const nameMap: Record<string, string> = {
180-
"ai.generateText.doGenerate": "ai.generateText.generate",
181-
"ai.streamText.doStream": "ai.streamText.stream",
182-
};
183-
if (span.name in nameMap) {
184-
// Unfortunately, the span name is not writable as this is not the intended behavior
185-
// but it is a workaround to set the correct span name
186-
(span as any).name = nameMap[span.name];
187-
}
188-
189-
if ("ai.response.text" in attributes) {
190-
attributes[`${SpanAttributes.LLM_COMPLETIONS}.0.content`] =
191-
attributes["ai.response.text"];
192-
attributes[`${SpanAttributes.LLM_COMPLETIONS}.0.role`] = "assistant";
193-
delete attributes["ai.response.text"];
194-
}
195-
196-
if ("ai.prompt.messages" in attributes) {
197-
try {
198-
const messages = JSON.parse(attributes["ai.prompt.messages"] as string);
199-
messages.forEach(
200-
(msg: { role: string; content: any }, index: number) => {
201-
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
202-
typeof msg.content === "string"
203-
? msg.content
204-
: JSON.stringify(msg.content);
205-
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
206-
msg.role;
207-
},
208-
);
209-
delete attributes["ai.prompt.messages"];
210-
} catch {
211-
//Skip if JSON parsing fails
212-
}
213-
}
214-
215-
if ("ai.usage.promptTokens" in attributes) {
216-
attributes[`${SpanAttributes.LLM_USAGE_PROMPT_TOKENS}`] =
217-
attributes["ai.usage.promptTokens"];
218-
delete attributes["ai.usage.promptTokens"];
219-
}
220-
221-
if ("ai.usage.completionTokens" in attributes) {
222-
attributes[`${SpanAttributes.LLM_USAGE_COMPLETION_TOKENS}`] =
223-
attributes["ai.usage.completionTokens"];
224-
delete attributes["ai.usage.completionTokens"];
225-
}
226-
227-
if (
228-
attributes[`${SpanAttributes.LLM_USAGE_PROMPT_TOKENS}`] &&
229-
attributes[`${SpanAttributes.LLM_USAGE_COMPLETION_TOKENS}`]
230-
) {
231-
attributes[`${SpanAttributes.LLM_USAGE_TOTAL_TOKENS}`] =
232-
Number(attributes[`${SpanAttributes.LLM_USAGE_PROMPT_TOKENS}`]) +
233-
Number(attributes[`${SpanAttributes.LLM_USAGE_COMPLETION_TOKENS}`]);
234-
}
176+
// Apply AI SDK transformations (if needed)
177+
transformAiSdkSpan(span);
235178

236179
originalOnEnd(span);
237180
};

0 commit comments

Comments
 (0)