Skip to content

Commit 7250b4f

Browse files
committed
fix: Support previous v4 of Vercel AI SDK
fix: Fix metric tracking for v5 responses
1 parent ba26071 commit 7250b4f

File tree

4 files changed

+68
-25
lines changed

4 files changed

+68
-25
lines changed

packages/ai-providers/server-ai-vercel/__tests__/VercelProvider.test.ts

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,52 @@ describe('VercelProvider', () => {
6868
},
6969
});
7070
});
71+
72+
it('supports v5 field names (inputTokens, outputTokens)', () => {
73+
const mockResponse = {
74+
usage: {
75+
inputTokens: 40,
76+
outputTokens: 60,
77+
totalTokens: 100,
78+
},
79+
};
80+
81+
const result = VercelProvider.createAIMetrics(mockResponse);
82+
83+
expect(result).toEqual({
84+
success: true,
85+
usage: {
86+
total: 100,
87+
input: 40,
88+
output: 60,
89+
},
90+
});
91+
});
92+
93+
it('prefers v5 field names over v4 when both are present', () => {
94+
const mockResponse = {
95+
usage: {
96+
// v4 field names
97+
promptTokens: 10,
98+
completionTokens: 20,
99+
// v5 field names (should be preferred)
100+
inputTokens: 40,
101+
outputTokens: 60,
102+
totalTokens: 100,
103+
},
104+
};
105+
106+
const result = VercelProvider.createAIMetrics(mockResponse);
107+
108+
expect(result).toEqual({
109+
success: true,
110+
usage: {
111+
total: 100,
112+
input: 40, // inputTokens preferred over promptTokens
113+
output: 60, // outputTokens preferred over completionTokens
114+
},
115+
});
116+
});
71117
});
72118

73119
describe('invokeModel', () => {

packages/ai-providers/server-ai-vercel/package.json

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -27,16 +27,7 @@
2727
"author": "LaunchDarkly",
2828
"license": "Apache-2.0",
2929
"dependencies": {
30-
"@ai-sdk/provider": "^2.0.0",
31-
"@launchdarkly/server-sdk-ai": "^0.12.1",
32-
"ai": "^5.0.0"
33-
},
34-
"optionalDependencies": {
35-
"@ai-sdk/anthropic": "^2.0.0",
36-
"@ai-sdk/cohere": "^2.0.0",
37-
"@ai-sdk/google": "^2.0.0",
38-
"@ai-sdk/mistral": "^2.0.0",
39-
"@ai-sdk/openai": "^2.0.0"
30+
"@launchdarkly/server-sdk-ai": "^0.12.1"
4031
},
4132
"devDependencies": {
4233
"@ai-sdk/anthropic": "^2.0.0",
@@ -49,6 +40,7 @@
4940
"@types/jest": "^29.5.3",
5041
"@typescript-eslint/eslint-plugin": "^6.20.0",
5142
"@typescript-eslint/parser": "^6.20.0",
43+
"ai": "^5.0.0",
5244
"eslint": "^8.45.0",
5345
"eslint-config-airbnb-base": "^15.0.0",
5446
"eslint-config-airbnb-typescript": "^17.1.0",
@@ -62,6 +54,6 @@
6254
"typescript": "5.1.6"
6355
},
6456
"peerDependencies": {
65-
"@launchdarkly/js-server-sdk-common": "2.x"
57+
"ai": "^4.0.0 || ^5.0.0"
6658
}
6759
}

packages/ai-providers/server-ai-vercel/src/VercelProvider.ts

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
1-
import { LanguageModelV2 } from '@ai-sdk/provider';
2-
import { generateText } from 'ai';
1+
import { generateText, LanguageModel } from 'ai';
32

4-
import { LDLogger } from '@launchdarkly/js-server-sdk-common';
53
import {
64
AIProvider,
75
ChatResponse,
86
LDAIConfig,
97
LDAIMetrics,
8+
LDLogger,
109
LDMessage,
1110
LDTokenUsage,
1211
} from '@launchdarkly/server-sdk-ai';
@@ -16,10 +15,10 @@ import {
1615
* This provider integrates Vercel AI SDK with LaunchDarkly's tracking capabilities.
1716
*/
1817
export class VercelProvider extends AIProvider {
19-
private _model: LanguageModelV2;
18+
private _model: LanguageModel;
2019
private _parameters: Record<string, unknown>;
2120

22-
constructor(model: LanguageModelV2, parameters: Record<string, unknown>, logger?: LDLogger) {
21+
constructor(model: LanguageModel, parameters: Record<string, unknown>, logger?: LDLogger) {
2322
super(logger);
2423
this._model = model;
2524
this._parameters = parameters;
@@ -47,6 +46,8 @@ export class VercelProvider extends AIProvider {
4746
*/
4847
async invokeModel(messages: LDMessage[]): Promise<ChatResponse> {
4948
// Call Vercel AI generateText
49+
// Type assertion: our MinLanguageModel is compatible with the expected LanguageModel interface
50+
// The generateText function will work with any object that has the required properties
5051
const result = await generateText({
5152
model: this._model,
5253
messages,
@@ -71,7 +72,7 @@ export class VercelProvider extends AIProvider {
7172
/**
7273
* Get the underlying Vercel AI model instance.
7374
*/
74-
getModel(): LanguageModelV2 {
75+
getModel(): LanguageModel {
7576
return this._model;
7677
}
7778

@@ -98,16 +99,18 @@ export class VercelProvider extends AIProvider {
9899
* Create AI metrics information from a Vercel AI response.
99100
* This method extracts token usage information and success status from Vercel AI responses
100101
* and returns a LaunchDarkly AIMetrics object.
102+
* Supports both v4 and v5 field names for backward compatibility.
101103
*/
102104
static createAIMetrics(vercelResponse: any): LDAIMetrics {
103105
// Extract token usage if available
104106
let usage: LDTokenUsage | undefined;
105107
if (vercelResponse?.usage) {
106-
const { promptTokens, completionTokens, totalTokens } = vercelResponse.usage;
108+
const { totalTokens, inputTokens, promptTokens, outputTokens, completionTokens } =
109+
vercelResponse.usage;
107110
usage = {
108111
total: totalTokens || 0,
109-
input: promptTokens || 0,
110-
output: completionTokens || 0,
112+
input: inputTokens || promptTokens || 0,
113+
output: outputTokens || completionTokens || 0,
111114
};
112115
}
113116

@@ -125,7 +128,7 @@ export class VercelProvider extends AIProvider {
125128
* @param aiConfig The LaunchDarkly AI configuration
126129
* @returns A Promise that resolves to a configured Vercel AI model
127130
*/
128-
static async createVercelModel(aiConfig: LDAIConfig): Promise<LanguageModelV2> {
131+
static async createVercelModel(aiConfig: LDAIConfig): Promise<LanguageModel> {
129132
const providerName = VercelProvider.mapProvider(aiConfig.provider?.name || '');
130133
const modelName = aiConfig.model?.name || '';
131134
// Parameters are not used in model creation but kept for future use
@@ -143,28 +146,28 @@ export class VercelProvider extends AIProvider {
143146
}
144147
case 'anthropic':
145148
try {
146-
const { anthropic } = await import('@ai-sdk/anthropic' as any);
149+
const { anthropic } = await import('@ai-sdk/anthropic');
147150
return anthropic(modelName);
148151
} catch (error) {
149152
throw new Error(`Failed to load @ai-sdk/anthropic: ${error}`);
150153
}
151154
case 'google':
152155
try {
153-
const { google } = await import('@ai-sdk/google' as any);
156+
const { google } = await import('@ai-sdk/google');
154157
return google(modelName);
155158
} catch (error) {
156159
throw new Error(`Failed to load @ai-sdk/google: ${error}`);
157160
}
158161
case 'cohere':
159162
try {
160-
const { cohere } = await import('@ai-sdk/cohere' as any);
163+
const { cohere } = await import('@ai-sdk/cohere');
161164
return cohere(modelName);
162165
} catch (error) {
163166
throw new Error(`Failed to load @ai-sdk/cohere: ${error}`);
164167
}
165168
case 'mistral':
166169
try {
167-
const { mistral } = await import('@ai-sdk/mistral' as any);
170+
const { mistral } = await import('@ai-sdk/mistral');
168171
return mistral(modelName);
169172
} catch (error) {
170173
throw new Error(`Failed to load @ai-sdk/mistral: ${error}`);

packages/sdk/server-ai/src/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,6 @@ export function initAi(ldClient: LDClientMin): LDAIClient {
1919
return new LDAIClientImpl(ldClient);
2020
}
2121

22+
export { LDLogger } from '@launchdarkly/js-server-sdk-common';
23+
2224
export * from './api';

0 commit comments

Comments
 (0)