Skip to content

Commit dc07917

Browse files
authored
chore: Update VercelAI example to use Vercel AI Provider methods (#973)
1 parent 28d3650 commit dc07917

File tree

2 files changed

+41
-11
lines changed

2 files changed

+41
-11
lines changed

packages/sdk/server-ai/examples/vercel-ai/package.json

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,10 @@
2121
"author": "LaunchDarkly",
2222
"license": "Apache-2.0",
2323
"dependencies": {
24-
"@ai-sdk/openai": "2.0.30",
25-
"@launchdarkly/node-server-sdk": "9.7.1",
26-
"@launchdarkly/server-sdk-ai": "0.12.3",
24+
"@ai-sdk/openai": "^2.0.30",
25+
"@launchdarkly/node-server-sdk": "^9.7.1",
26+
"@launchdarkly/server-sdk-ai": "^0.12.3",
27+
"@launchdarkly/server-sdk-ai-vercel": "^0.1.2",
2728
"ai": "5.0.0",
2829
"zod": "^3.23.8"
2930
},

packages/sdk/server-ai/examples/vercel-ai/src/index.ts

Lines changed: 37 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import { generateText, streamText } from 'ai';
44

55
import { init, type LDClient, type LDContext } from '@launchdarkly/node-server-sdk';
66
import { initAi } from '@launchdarkly/server-sdk-ai';
7+
import { VercelProvider } from '@launchdarkly/server-sdk-ai-vercel';
78

89
// Environment variables
910
const sdkKey = process.env.LAUNCHDARKLY_SDK_KEY ?? '';
@@ -41,31 +42,59 @@ async function main() {
4142
// Get AI configuration from LaunchDarkly
4243
const aiConfig = await aiClient.config(aiConfigKey, context, { model: { name: 'gpt-4' } });
4344

44-
if (!aiConfig.enabled) {
45+
if (!aiConfig.enabled || !aiConfig.tracker) {
4546
console.log('*** AI configuration is not enabled');
4647
process.exit(0);
4748
}
4849

4950
console.log('Using model:', aiConfig.model?.name);
5051

51-
// Example of using generateText (non-streaming)
52-
console.log('\n*** Generating text:');
5352
try {
5453
const userMessage = {
5554
role: 'user' as const,
5655
content: 'What can you help me with?',
5756
};
5857

59-
const result = await aiConfig.tracker.trackVercelAISDKGenerateTextMetrics(() =>
60-
generateText(aiConfig.toVercelAISDK(openai, { nonInterpolatedMessages: [userMessage] })),
58+
// Example of using generateText (non-streaming)
59+
console.log('\n*** Generating text:');
60+
61+
// Convert config to Vercel AI SDK format
62+
const vercelConfig = VercelProvider.toVercelAISDK(aiConfig, openai, {
63+
nonInterpolatedMessages: [userMessage],
64+
});
65+
66+
// Call the model and track metrics for the ai config
67+
const result = await aiConfig.tracker.trackMetricsOf(
68+
VercelProvider.getAIMetricsFromResponse,
69+
() => generateText(vercelConfig),
6170
);
71+
6272
console.log('Response:', result.text);
73+
} catch (err) {
74+
console.error('Error:', err);
75+
}
76+
77+
// Example 2: Using streamText with trackStreamMetricsOf (streaming)
78+
try {
79+
const userMessage = {
80+
role: 'user' as const,
81+
content: 'Count from 1 to 5.',
82+
};
6383

64-
process.stdout.write('Streaming Response: ');
65-
const streamResult = aiConfig.tracker.trackVercelAISDKStreamTextMetrics(() =>
66-
streamText(aiConfig.toVercelAISDK(openai, { nonInterpolatedMessages: [userMessage] })),
84+
// Example of using generateText (non-streaming)
85+
console.log('\n*** Streaming text:');
86+
// Convert config to Vercel AI SDK format
87+
const vercelConfig = VercelProvider.toVercelAISDK(aiConfig, openai, {
88+
nonInterpolatedMessages: [userMessage],
89+
});
90+
91+
// Stream is returned immediately (synchronously), metrics tracked in background
92+
const streamResult = aiConfig.tracker.trackStreamMetricsOf(
93+
() => streamText(vercelConfig),
94+
VercelProvider.getAIMetricsFromStream,
6795
);
6896

97+
// Consume the stream immediately - no await needed before this!
6998
// eslint-disable-next-line no-restricted-syntax
7099
for await (const textPart of streamResult.textStream) {
71100
process.stdout.write(textPart);

0 commit comments

Comments
 (0)