Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,52 @@ describe('VercelProvider', () => {
},
});
});

it('supports v5 field names (inputTokens, outputTokens)', () => {
const mockResponse = {
usage: {
inputTokens: 40,
outputTokens: 60,
totalTokens: 100,
},
};

const result = VercelProvider.createAIMetrics(mockResponse);

expect(result).toEqual({
success: true,
usage: {
total: 100,
input: 40,
output: 60,
},
});
});

it('prefers v5 field names over v4 when both are present', () => {
const mockResponse = {
usage: {
// v4 field names
promptTokens: 10,
completionTokens: 20,
// v5 field names (should be preferred)
inputTokens: 40,
outputTokens: 60,
totalTokens: 100,
},
};

const result = VercelProvider.createAIMetrics(mockResponse);

expect(result).toEqual({
success: true,
usage: {
total: 100,
input: 40, // inputTokens preferred over promptTokens
output: 60, // outputTokens preferred over completionTokens
},
});
});
});

describe('invokeModel', () => {
Expand Down
14 changes: 3 additions & 11 deletions packages/ai-providers/server-ai-vercel/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,7 @@
"author": "LaunchDarkly",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "^2.0.0",
"@launchdarkly/server-sdk-ai": "^0.12.1",
"ai": "^5.0.0"
},
"optionalDependencies": {
"@ai-sdk/anthropic": "^2.0.0",
"@ai-sdk/cohere": "^2.0.0",
"@ai-sdk/google": "^2.0.0",
"@ai-sdk/mistral": "^2.0.0",
"@ai-sdk/openai": "^2.0.0"
"@launchdarkly/server-sdk-ai": "^0.12.1"
},
"devDependencies": {
"@ai-sdk/anthropic": "^2.0.0",
Expand All @@ -49,6 +40,7 @@
"@types/jest": "^29.5.3",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"ai": "^5.0.0",
"eslint": "^8.45.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-airbnb-typescript": "^17.1.0",
Expand All @@ -62,6 +54,6 @@
"typescript": "5.1.6"
},
"peerDependencies": {
"@launchdarkly/js-server-sdk-common": "2.x"
"ai": "^4.0.0 || ^5.0.0"
}
}
31 changes: 17 additions & 14 deletions packages/ai-providers/server-ai-vercel/src/VercelProvider.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
import { LanguageModelV2 } from '@ai-sdk/provider';
import { generateText } from 'ai';
import { generateText, LanguageModel } from 'ai';

import { LDLogger } from '@launchdarkly/js-server-sdk-common';
import {
AIProvider,
ChatResponse,
LDAIConfig,
LDAIMetrics,
LDLogger,
LDMessage,
LDTokenUsage,
} from '@launchdarkly/server-sdk-ai';
Expand All @@ -16,10 +15,10 @@
* This provider integrates Vercel AI SDK with LaunchDarkly's tracking capabilities.
*/
export class VercelProvider extends AIProvider {
private _model: LanguageModelV2;
private _model: LanguageModel;
private _parameters: Record<string, unknown>;

constructor(model: LanguageModelV2, parameters: Record<string, unknown>, logger?: LDLogger) {
constructor(model: LanguageModel, parameters: Record<string, unknown>, logger?: LDLogger) {
super(logger);
this._model = model;
this._parameters = parameters;
Expand Down Expand Up @@ -47,6 +46,8 @@
*/
async invokeModel(messages: LDMessage[]): Promise<ChatResponse> {
// Call Vercel AI generateText
// Type assertion: our MinLanguageModel is compatible with the expected LanguageModel interface
// The generateText function will work with any object that has the required properties
const result = await generateText({
model: this._model,
messages,
Expand All @@ -71,7 +72,7 @@
/**
* Get the underlying Vercel AI model instance.
*/
getModel(): LanguageModelV2 {
getModel(): LanguageModel {
return this._model;
}

Expand All @@ -98,16 +99,18 @@
* Create AI metrics information from a Vercel AI response.
* This method extracts token usage information and success status from Vercel AI responses
* and returns a LaunchDarkly AIMetrics object.
* Supports both v4 and v5 field names for backward compatibility.
*/
static createAIMetrics(vercelResponse: any): LDAIMetrics {
// Extract token usage if available
let usage: LDTokenUsage | undefined;
if (vercelResponse?.usage) {
const { promptTokens, completionTokens, totalTokens } = vercelResponse.usage;
const { totalTokens, inputTokens, promptTokens, outputTokens, completionTokens } =
vercelResponse.usage;
usage = {
total: totalTokens || 0,
input: promptTokens || 0,
output: completionTokens || 0,
input: inputTokens || promptTokens || 0,
output: outputTokens || completionTokens || 0,
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: Token Metrics Incorrectly Fallback to V4

The createAIMetrics function's token usage calculation uses the || operator for fallback. This incorrectly uses v4 field values when v5 fields have a valid 0 count, leading to inaccurate token metrics.

Fix in Cursor Fix in Web

};
}

Expand All @@ -125,7 +128,7 @@
* @param aiConfig The LaunchDarkly AI configuration
* @returns A Promise that resolves to a configured Vercel AI model
*/
static async createVercelModel(aiConfig: LDAIConfig): Promise<LanguageModelV2> {
static async createVercelModel(aiConfig: LDAIConfig): Promise<LanguageModel> {
const providerName = VercelProvider.mapProvider(aiConfig.provider?.name || '');
const modelName = aiConfig.model?.name || '';
// Parameters are not used in model creation but kept for future use
Expand All @@ -136,35 +139,35 @@
switch (providerName) {
case 'openai':
try {
const { openai } = await import('@ai-sdk/openai');

Check failure on line 142 in packages/ai-providers/server-ai-vercel/src/VercelProvider.ts

View workflow job for this annotation

GitHub Actions / build-test-vercel-provider

'@ai-sdk/openai' should be listed in the project's dependencies, not devDependencies
return openai(modelName);
} catch (error) {
throw new Error(`Failed to load @ai-sdk/openai: ${error}`);
}
case 'anthropic':
try {
const { anthropic } = await import('@ai-sdk/anthropic' as any);
const { anthropic } = await import('@ai-sdk/anthropic');

Check failure on line 149 in packages/ai-providers/server-ai-vercel/src/VercelProvider.ts

View workflow job for this annotation

GitHub Actions / build-test-vercel-provider

'@ai-sdk/anthropic' should be listed in the project's dependencies, not devDependencies
return anthropic(modelName);
} catch (error) {
throw new Error(`Failed to load @ai-sdk/anthropic: ${error}`);
}
case 'google':
try {
const { google } = await import('@ai-sdk/google' as any);
const { google } = await import('@ai-sdk/google');

Check failure on line 156 in packages/ai-providers/server-ai-vercel/src/VercelProvider.ts

View workflow job for this annotation

GitHub Actions / build-test-vercel-provider

'@ai-sdk/google' should be listed in the project's dependencies, not devDependencies
return google(modelName);
} catch (error) {
throw new Error(`Failed to load @ai-sdk/google: ${error}`);
}
case 'cohere':
try {
const { cohere } = await import('@ai-sdk/cohere' as any);
const { cohere } = await import('@ai-sdk/cohere');

Check failure on line 163 in packages/ai-providers/server-ai-vercel/src/VercelProvider.ts

View workflow job for this annotation

GitHub Actions / build-test-vercel-provider

'@ai-sdk/cohere' should be listed in the project's dependencies, not devDependencies
return cohere(modelName);
} catch (error) {
throw new Error(`Failed to load @ai-sdk/cohere: ${error}`);
}
case 'mistral':
try {
const { mistral } = await import('@ai-sdk/mistral' as any);
const { mistral } = await import('@ai-sdk/mistral');

Check failure on line 170 in packages/ai-providers/server-ai-vercel/src/VercelProvider.ts

View workflow job for this annotation

GitHub Actions / build-test-vercel-provider

'@ai-sdk/mistral' should be listed in the project's dependencies, not devDependencies
return mistral(modelName);
} catch (error) {
throw new Error(`Failed to load @ai-sdk/mistral: ${error}`);
Expand Down
2 changes: 2 additions & 0 deletions packages/sdk/server-ai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,6 @@ export function initAi(ldClient: LDClientMin): LDAIClient {
return new LDAIClientImpl(ldClient);
}

export { LDLogger } from '@launchdarkly/js-server-sdk-common';

export * from './api';
Loading