Skip to content

Commit 47a0875

Browse files
committed
favor totalusage over usage metrics
1 parent 15e3774 commit 47a0875

File tree

1 file changed

+55
-23
lines changed

1 file changed

+55
-23
lines changed

packages/ai-providers/server-ai-vercel/src/VercelProvider.ts

Lines changed: 55 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -113,57 +113,89 @@ export class VercelProvider extends AIProvider {
113113
return mapping[lowercasedName] || lowercasedName;
114114
}
115115

116+
/**
117+
* Map Vercel AI SDK usage data to LaunchDarkly token usage.
118+
* Supports both v4 and v5 field names for backward compatibility.
119+
*
120+
* @param usageData Usage data from Vercel AI SDK (may be from usage or totalUsage)
121+
* @returns LDTokenUsage or undefined if no usage data provided
122+
*/
123+
static mapUsageDataToLDTokenUsage(usageData: any): LDTokenUsage | undefined {
124+
if (!usageData) {
125+
return undefined;
126+
}
127+
128+
const { totalTokens, inputTokens, promptTokens, outputTokens, completionTokens } = usageData;
129+
return {
130+
total: totalTokens ?? 0,
131+
input: inputTokens ?? promptTokens ?? 0,
132+
output: outputTokens ?? completionTokens ?? 0,
133+
};
134+
}
135+
116136
/**
117137
* Create AI metrics information from a Vercel AI response.
118138
* This method extracts token usage information and success status from Vercel AI responses
119139
* and returns a LaunchDarkly AIMetrics object.
120140
* Supports both v4 and v5 field names for backward compatibility.
121141
*/
122142
static createAIMetrics(vercelResponse: any): LDAIMetrics {
123-
// Extract token usage if available
124-
let usage: LDTokenUsage | undefined;
125-
if (vercelResponse?.usage) {
126-
const { totalTokens, inputTokens, promptTokens, outputTokens, completionTokens } =
127-
vercelResponse.usage;
128-
usage = {
129-
total: totalTokens ?? 0,
130-
input: inputTokens ?? promptTokens ?? 0,
131-
output: outputTokens ?? completionTokens ?? 0,
132-
};
143+
const finishReason = vercelResponse?.finishReason ?? 'unknown';
144+
let usageData: any;
145+
146+
// favor totalUsage over usage for cumulative usage across all steps
147+
if (vercelResponse?.totalUsage) {
148+
usageData = vercelResponse?.totalUsage;
149+
} else if (vercelResponse?.usage) {
150+
usageData = vercelResponse?.usage;
133151
}
134152

135-
// Vercel AI responses that complete successfully are considered successful
153+
const usage = VercelProvider.mapUsageDataToLDTokenUsage(usageData);
154+
const success = finishReason !== 'error';
155+
136156
return {
137-
success: true,
157+
success,
138158
usage,
139159
};
140160
}
141161

142162
/**
143-
* Create a metrics extractor for Vercel AI SDK streaming results.
163+
* Create AI metrics from a Vercel AI SDK streaming result.
144164
* Use this with tracker.trackStreamMetricsOf() for streaming operations like streamText.
145165
*
146-
* The extractor waits for the stream's response promise to resolve, then extracts
147-
* metrics from the completed response.
166+
* This method waits for the stream to complete, then extracts metrics using totalUsage
167+
* (preferred for cumulative usage across all steps) or usage if totalUsage is unavailable.
148168
*
149-
* @returns A metrics extractor function for streaming results
169+
* @param stream The stream result from streamText()
170+
* @returns A Promise that resolves to LDAIMetrics
150171
*
151172
* @example
152173
* const stream = aiConfig.tracker.trackStreamMetricsOf(
153174
* () => streamText(vercelConfig),
154-
* VercelProvider.createStreamMetricsExtractor()
175+
* VercelProvider.createStreamMetrics
155176
* );
156177
*
157178
* for await (const chunk of stream.textStream) {
158179
* process.stdout.write(chunk);
159180
* }
160181
*/
161-
static createStreamMetricsExtractor() {
162-
return async (stream: any): Promise<LDAIMetrics> => {
163-
// Wait for stream to complete
164-
const result = await stream.response;
165-
// Extract metrics from completed response
166-
return VercelProvider.createAIMetrics(result);
182+
static async createStreamMetrics(stream: any): Promise<LDAIMetrics> {
183+
const finishReason = (await stream.finishReason?.catch(() => 'error')) ?? 'unknown';
184+
185+
// favor totalUsage over usage for cumulative usage across all steps
186+
let usageData: any;
187+
if (stream.totalUsage) {
188+
usageData = await stream.totalUsage;
189+
} else if (stream.usage) {
190+
usageData = await stream.usage;
191+
}
192+
193+
const usage = VercelProvider.mapUsageDataToLDTokenUsage(usageData);
194+
const success = finishReason !== 'error';
195+
196+
return {
197+
success,
198+
usage,
167199
};
168200
}
169201

0 commit comments

Comments
 (0)