Skip to content

Commit d794cfb

Browse files
committed
feat(core): Prepend vercel ai attributes with vercel.ai.X
1 parent 6179229 commit d794cfb

File tree

3 files changed

+136
-129
lines changed

3 files changed

+136
-129
lines changed

dev-packages/e2e-tests/test-applications/nextjs-15/tests/ai-test.test.ts

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -31,34 +31,34 @@ test('should create AI spans with correct attributes', async ({ page }) => {
3131

3232
// First AI call - should have telemetry enabled and record inputs/outputs (sendDefaultPii: true)
3333
/* const firstPipelineSpan = aiPipelineSpans[0];
34-
expect(firstPipelineSpan?.data?.['ai.model.id']).toBe('mock-model-id');
35-
expect(firstPipelineSpan?.data?.['ai.model.provider']).toBe('mock-provider');
36-
expect(firstPipelineSpan?.data?.['ai.prompt']).toContain('Where is the first span?');
34+
expect(firstPipelineSpan?.data?.['vercel.ai.model.id']).toBe('mock-model-id');
35+
expect(firstPipelineSpan?.data?.['vercel.ai.model.provider']).toBe('mock-provider');
36+
expect(firstPipelineSpan?.data?.['vercel.ai.prompt']).toContain('Where is the first span?');
3737
expect(firstPipelineSpan?.data?.['gen_ai.response.text']).toBe('First span here!');
3838
expect(firstPipelineSpan?.data?.['gen_ai.usage.input_tokens']).toBe(10);
3939
expect(firstPipelineSpan?.data?.['gen_ai.usage.output_tokens']).toBe(20); */
4040

4141
// Second AI call - explicitly enabled telemetry
4242
const secondPipelineSpan = aiPipelineSpans[0];
43-
expect(secondPipelineSpan?.data?.['ai.prompt']).toContain('Where is the second span?');
43+
expect(secondPipelineSpan?.data?.['vercel.ai.prompt']).toContain('Where is the second span?');
4444
expect(secondPipelineSpan?.data?.['gen_ai.response.text']).toContain('Second span here!');
4545

4646
// Third AI call - with tool calls
4747
/* const thirdPipelineSpan = aiPipelineSpans[2];
48-
expect(thirdPipelineSpan?.data?.['ai.response.finishReason']).toBe('tool-calls');
48+
expect(thirdPipelineSpan?.data?.['vercel.ai.response.finishReason']).toBe('tool-calls');
4949
expect(thirdPipelineSpan?.data?.['gen_ai.usage.input_tokens']).toBe(15);
5050
expect(thirdPipelineSpan?.data?.['gen_ai.usage.output_tokens']).toBe(25); */
5151

5252
// Tool call span
5353
/* const toolSpan = toolCallSpans[0];
54-
expect(toolSpan?.data?.['ai.toolCall.name']).toBe('getWeather');
55-
expect(toolSpan?.data?.['ai.toolCall.id']).toBe('call-1');
56-
expect(toolSpan?.data?.['ai.toolCall.args']).toContain('San Francisco');
57-
expect(toolSpan?.data?.['ai.toolCall.result']).toContain('Sunny, 72°F'); */
54+
expect(toolSpan?.data?.['vercel.ai.toolCall.name']).toBe('getWeather');
55+
expect(toolSpan?.data?.['vercel.ai.toolCall.id']).toBe('call-1');
56+
expect(toolSpan?.data?.['vercel.ai.toolCall.args']).toContain('San Francisco');
57+
expect(toolSpan?.data?.['vercel.ai.toolCall.result']).toContain('Sunny, 72°F'); */
5858

5959
// Verify the fourth call was not captured (telemetry disabled)
6060
const promptsInSpans = spans
61-
.map(span => span.data?.['ai.prompt'])
61+
.map(span => span.data?.['vercel.ai.prompt'])
6262
.filter((prompt): prompt is string => prompt !== undefined);
6363
const hasDisabledPrompt = promptsInSpans.some(prompt => prompt.includes('Where is the third span?'));
6464
expect(hasDisabledPrompt).toBe(false);

0 commit comments

Comments
 (0)