diff --git a/packages/ai-providers/server-ai-vercel/__tests__/VercelProvider.test.ts b/packages/ai-providers/server-ai-vercel/__tests__/VercelProvider.test.ts index 65a423a161..5dc97bb706 100644 --- a/packages/ai-providers/server-ai-vercel/__tests__/VercelProvider.test.ts +++ b/packages/ai-providers/server-ai-vercel/__tests__/VercelProvider.test.ts @@ -68,6 +68,52 @@ describe('VercelProvider', () => { }, }); }); + + it('supports v5 field names (inputTokens, outputTokens)', () => { + const mockResponse = { + usage: { + inputTokens: 40, + outputTokens: 60, + totalTokens: 100, + }, + }; + + const result = VercelProvider.createAIMetrics(mockResponse); + + expect(result).toEqual({ + success: true, + usage: { + total: 100, + input: 40, + output: 60, + }, + }); + }); + + it('prefers v5 field names over v4 when both are present', () => { + const mockResponse = { + usage: { + // v4 field names + promptTokens: 10, + completionTokens: 20, + // v5 field names (should be preferred) + inputTokens: 40, + outputTokens: 60, + totalTokens: 100, + }, + }; + + const result = VercelProvider.createAIMetrics(mockResponse); + + expect(result).toEqual({ + success: true, + usage: { + total: 100, + input: 40, // inputTokens preferred over promptTokens + output: 60, // outputTokens preferred over completionTokens + }, + }); + }); }); describe('invokeModel', () => { diff --git a/packages/ai-providers/server-ai-vercel/package.json b/packages/ai-providers/server-ai-vercel/package.json index 4d9e954991..4ba0efb36e 100644 --- a/packages/ai-providers/server-ai-vercel/package.json +++ b/packages/ai-providers/server-ai-vercel/package.json @@ -32,7 +32,6 @@ "@ai-sdk/google": "^2.0.0", "@ai-sdk/mistral": "^2.0.0", "@ai-sdk/openai": "^2.0.0", - "@ai-sdk/provider": "^2.0.0", "@launchdarkly/server-sdk-ai": "^0.12.2", "@trivago/prettier-plugin-sort-imports": "^4.1.1", "@types/jest": "^29.5.3", @@ -57,7 +56,6 @@ "@ai-sdk/google": "^2.0.0", "@ai-sdk/mistral": "^2.0.0", "@ai-sdk/openai": "^2.0.0", - "@ai-sdk/provider": "^2.0.0", "@launchdarkly/server-sdk-ai": "^0.12.2", "ai": "^4.0.0 || ^5.0.0" }, diff --git a/packages/ai-providers/server-ai-vercel/src/VercelProvider.ts b/packages/ai-providers/server-ai-vercel/src/VercelProvider.ts index d006b62ab7..fc13ae701a 100644 --- a/packages/ai-providers/server-ai-vercel/src/VercelProvider.ts +++ b/packages/ai-providers/server-ai-vercel/src/VercelProvider.ts @@ -1,5 +1,4 @@ -import { LanguageModelV2 } from '@ai-sdk/provider'; -import { generateText } from 'ai'; +import { generateText, LanguageModel } from 'ai'; import { AIProvider } from '@launchdarkly/server-sdk-ai'; import type { @@ -16,10 +15,10 @@ import type { * This provider integrates Vercel AI SDK with LaunchDarkly's tracking capabilities. */ export class VercelProvider extends AIProvider { - private _model: LanguageModelV2; + private _model: LanguageModel; private _parameters: Record; - constructor(model: LanguageModelV2, parameters: Record, logger?: LDLogger) { + constructor(model: LanguageModel, parameters: Record, logger?: LDLogger) { super(logger); this._model = model; this._parameters = parameters; @@ -47,6 +46,8 @@ export class VercelProvider extends AIProvider { */ async invokeModel(messages: LDMessage[]): Promise { // Call Vercel AI generateText + // Type assertion: our MinLanguageModel is compatible with the expected LanguageModel interface + // The generateText function will work with any object that has the required properties const result = await generateText({ model: this._model, messages, @@ -71,7 +72,7 @@ export class VercelProvider extends AIProvider { /** * Get the underlying Vercel AI model instance. */ - getModel(): LanguageModelV2 { + getModel(): LanguageModel { return this._model; } @@ -98,16 +99,18 @@ export class VercelProvider extends AIProvider { * Create AI metrics information from a Vercel AI response. * This method extracts token usage information and success status from Vercel AI responses * and returns a LaunchDarkly AIMetrics object. + * Supports both v4 and v5 field names for backward compatibility. */ static createAIMetrics(vercelResponse: any): LDAIMetrics { // Extract token usage if available let usage: LDTokenUsage | undefined; if (vercelResponse?.usage) { - const { promptTokens, completionTokens, totalTokens } = vercelResponse.usage; + const { totalTokens, inputTokens, promptTokens, outputTokens, completionTokens } = + vercelResponse.usage; usage = { - total: totalTokens || 0, - input: promptTokens || 0, - output: completionTokens || 0, + total: totalTokens ?? 0, + input: inputTokens ?? promptTokens ?? 0, + output: outputTokens ?? completionTokens ?? 0, }; } @@ -125,7 +128,7 @@ export class VercelProvider extends AIProvider { * @param aiConfig The LaunchDarkly AI configuration * @returns A Promise that resolves to a configured Vercel AI model */ - static async createVercelModel(aiConfig: LDAIConfig): Promise { + static async createVercelModel(aiConfig: LDAIConfig): Promise { const providerName = VercelProvider.mapProvider(aiConfig.provider?.name || ''); const modelName = aiConfig.model?.name || ''; // Parameters are not used in model creation but kept for future use @@ -143,28 +146,28 @@ export class VercelProvider extends AIProvider { } case 'anthropic': try { - const { anthropic } = await import('@ai-sdk/anthropic' as any); + const { anthropic } = await import('@ai-sdk/anthropic'); return anthropic(modelName); } catch (error) { throw new Error(`Failed to load @ai-sdk/anthropic: ${error}`); } case 'google': try { - const { google } = await import('@ai-sdk/google' as any); + const { google } = await import('@ai-sdk/google'); return google(modelName); } catch (error) { throw new Error(`Failed to load @ai-sdk/google: ${error}`); } case 'cohere': try { - const { cohere } = await import('@ai-sdk/cohere' as any); + const { cohere } = await import('@ai-sdk/cohere'); return cohere(modelName); } catch (error) { throw new Error(`Failed to load @ai-sdk/cohere: ${error}`); } case 'mistral': try { - const { mistral } = await import('@ai-sdk/mistral' as any); + const { mistral } = await import('@ai-sdk/mistral'); return mistral(modelName); } catch (error) { throw new Error(`Failed to load @ai-sdk/mistral: ${error}`);