1- import { LanguageModelV2 } from '@ai-sdk/provider' ;
2- import { generateText } from 'ai' ;
1+ import { generateText , LanguageModel } from 'ai' ;
32
4- import { LDLogger } from '@launchdarkly/js-server-sdk-common' ;
53import {
64 AIProvider ,
75 ChatResponse ,
86 LDAIConfig ,
97 LDAIMetrics ,
8+ LDLogger ,
109 LDMessage ,
1110 LDTokenUsage ,
1211} from '@launchdarkly/server-sdk-ai' ;
@@ -16,10 +15,10 @@ import {
1615 * This provider integrates Vercel AI SDK with LaunchDarkly's tracking capabilities.
1716 */
1817export class VercelProvider extends AIProvider {
19- private _model : LanguageModelV2 ;
18+ private _model : LanguageModel ;
2019 private _parameters : Record < string , unknown > ;
2120
22- constructor ( model : LanguageModelV2 , parameters : Record < string , unknown > , logger ?: LDLogger ) {
21+ constructor ( model : LanguageModel , parameters : Record < string , unknown > , logger ?: LDLogger ) {
2322 super ( logger ) ;
2423 this . _model = model ;
2524 this . _parameters = parameters ;
@@ -47,6 +46,8 @@ export class VercelProvider extends AIProvider {
4746 */
4847 async invokeModel ( messages : LDMessage [ ] ) : Promise < ChatResponse > {
4948 // Call Vercel AI generateText
49+ // Type assertion: our MinLanguageModel is compatible with the expected LanguageModel interface
50+ // The generateText function will work with any object that has the required properties
5051 const result = await generateText ( {
5152 model : this . _model ,
5253 messages,
@@ -71,7 +72,7 @@ export class VercelProvider extends AIProvider {
7172 /**
7273 * Get the underlying Vercel AI model instance.
7374 */
74- getModel ( ) : LanguageModelV2 {
75+ getModel ( ) : LanguageModel {
7576 return this . _model ;
7677 }
7778
@@ -98,16 +99,18 @@ export class VercelProvider extends AIProvider {
9899 * Create AI metrics information from a Vercel AI response.
99100 * This method extracts token usage information and success status from Vercel AI responses
100101 * and returns a LaunchDarkly AIMetrics object.
102+ * Supports both v4 and v5 field names for backward compatibility.
101103 */
102104 static createAIMetrics ( vercelResponse : any ) : LDAIMetrics {
103105 // Extract token usage if available
104106 let usage : LDTokenUsage | undefined ;
105107 if ( vercelResponse ?. usage ) {
106- const { promptTokens, completionTokens, totalTokens } = vercelResponse . usage ;
108+ const { totalTokens, inputTokens, promptTokens, outputTokens, completionTokens } =
109+ vercelResponse . usage ;
107110 usage = {
108111 total : totalTokens || 0 ,
109- input : promptTokens || 0 ,
110- output : completionTokens || 0 ,
112+ input : inputTokens || promptTokens || 0 ,
113+ output : outputTokens || completionTokens || 0 ,
111114 } ;
112115 }
113116
@@ -125,7 +128,7 @@ export class VercelProvider extends AIProvider {
125128 * @param aiConfig The LaunchDarkly AI configuration
126129 * @returns A Promise that resolves to a configured Vercel AI model
127130 */
128- static async createVercelModel ( aiConfig : LDAIConfig ) : Promise < LanguageModelV2 > {
131+ static async createVercelModel ( aiConfig : LDAIConfig ) : Promise < LanguageModel > {
129132 const providerName = VercelProvider . mapProvider ( aiConfig . provider ?. name || '' ) ;
130133 const modelName = aiConfig . model ?. name || '' ;
131134 // Parameters are not used in model creation but kept for future use
@@ -143,28 +146,28 @@ export class VercelProvider extends AIProvider {
143146 }
144147 case 'anthropic' :
145148 try {
146- const { anthropic } = await import ( '@ai-sdk/anthropic' as any ) ;
149+ const { anthropic } = await import ( '@ai-sdk/anthropic' ) ;
147150 return anthropic ( modelName ) ;
148151 } catch ( error ) {
149152 throw new Error ( `Failed to load @ai-sdk/anthropic: ${ error } ` ) ;
150153 }
151154 case 'google' :
152155 try {
153- const { google } = await import ( '@ai-sdk/google' as any ) ;
156+ const { google } = await import ( '@ai-sdk/google' ) ;
154157 return google ( modelName ) ;
155158 } catch ( error ) {
156159 throw new Error ( `Failed to load @ai-sdk/google: ${ error } ` ) ;
157160 }
158161 case 'cohere' :
159162 try {
160- const { cohere } = await import ( '@ai-sdk/cohere' as any ) ;
163+ const { cohere } = await import ( '@ai-sdk/cohere' ) ;
161164 return cohere ( modelName ) ;
162165 } catch ( error ) {
163166 throw new Error ( `Failed to load @ai-sdk/cohere: ${ error } ` ) ;
164167 }
165168 case 'mistral' :
166169 try {
167- const { mistral } = await import ( '@ai-sdk/mistral' as any ) ;
170+ const { mistral } = await import ( '@ai-sdk/mistral' ) ;
168171 return mistral ( modelName ) ;
169172 } catch ( error ) {
170173 throw new Error ( `Failed to load @ai-sdk/mistral: ${ error } ` ) ;
0 commit comments