Skip to content

Commit 03002ce

Browse files
committed
chore: Update VercelAI example to use Vercel AI Provider methods
1 parent 8d57904 commit 03002ce

File tree

2 files changed

+43
-8
lines changed

2 files changed

+43
-8
lines changed

packages/sdk/server-ai/examples/vercel-ai/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
"@ai-sdk/openai": "2.0.30",
2525
"@launchdarkly/node-server-sdk": "9.7.1",
2626
"@launchdarkly/server-sdk-ai": "0.12.3",
27+
"@launchdarkly/server-sdk-ai-vercel": "^0.2.0",
2728
"ai": "5.0.0",
2829
"zod": "^3.23.8"
2930
},

packages/sdk/server-ai/examples/vercel-ai/src/index.ts

Lines changed: 42 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import { generateText, streamText } from 'ai';
44

55
import { init, type LDClient, type LDContext } from '@launchdarkly/node-server-sdk';
66
import { initAi } from '@launchdarkly/server-sdk-ai';
7+
import { VercelProvider } from '@launchdarkly/server-sdk-ai-vercel';
78

89
// Environment variables
910
const sdkKey = process.env.LAUNCHDARKLY_SDK_KEY ?? '';
@@ -41,31 +42,64 @@ async function main() {
4142
// Get AI configuration from LaunchDarkly
4243
const aiConfig = await aiClient.config(aiConfigKey, context, { model: { name: 'gpt-4' } });
4344

44-
if (!aiConfig.enabled) {
45+
if (!aiConfig.enabled || !aiConfig.tracker) {
4546
console.log('*** AI configuration is not enabled');
4647
process.exit(0);
4748
}
4849

4950
console.log('Using model:', aiConfig.model?.name);
5051

51-
// Example of using generateText (non-streaming)
52-
console.log('\n*** Generating text:');
5352
try {
5453
const userMessage = {
5554
role: 'user' as const,
5655
content: 'What can you help me with?',
5756
};
5857

59-
const result = await aiConfig.tracker.trackVercelAISDKGenerateTextMetrics(() =>
60-
generateText(aiConfig.toVercelAISDK(openai, { nonInterpolatedMessages: [userMessage] })),
58+
// Example of using generateText (non-streaming)
59+
console.log('\n*** Generating text:');
60+
61+
// Convert config to Vercel AI SDK format
62+
const vercelConfig = VercelProvider.toVercelAISDK(
63+
aiConfig,
64+
openai,
65+
{ nonInterpolatedMessages: [userMessage] },
66+
);
67+
68+
// Track metrics using trackMetricsOf with VercelProvider.createAIMetrics
69+
const result = await aiConfig.tracker.trackMetricsOf(
70+
VercelProvider.createAIMetrics,
71+
() => generateText(vercelConfig),
6172
);
73+
6274
console.log('Response:', result.text);
75+
} catch (err) {
76+
console.error('Error:', err);
77+
}
6378

64-
process.stdout.write('Streaming Response: ');
65-
const streamResult = aiConfig.tracker.trackVercelAISDKStreamTextMetrics(() =>
66-
streamText(aiConfig.toVercelAISDK(openai, { nonInterpolatedMessages: [userMessage] })),
79+
// Example 2: Using streamText with trackStreamMetricsOf (streaming)
80+
try {
81+
const userMessage = {
82+
role: 'user' as const,
83+
content: 'Count from 1 to 5.',
84+
};
85+
86+
// Example of using generateText (non-streaming)
87+
console.log('\n*** Streaming text:');
88+
// Convert config to Vercel AI SDK format
89+
const vercelConfig = VercelProvider.toVercelAISDK(
90+
aiConfig,
91+
openai,
92+
{ nonInterpolatedMessages: [userMessage] },
93+
);
94+
95+
// Track streaming metrics using trackStreamMetricsOf with provider's extractor
96+
// Stream is returned immediately (synchronously), metrics tracked in background
97+
const streamResult = aiConfig.tracker.trackStreamMetricsOf(
98+
() => streamText(vercelConfig),
99+
VercelProvider.createStreamMetricsExtractor(),
67100
);
68101

102+
// Consume the stream immediately - no await needed before this!
69103
// eslint-disable-next-line no-restricted-syntax
70104
for await (const textPart of streamResult.textStream) {
71105
process.stdout.write(textPart);

0 commit comments

Comments
 (0)