Skip to content

Commit fa245b2

Browse files
committed
feat: Add vercel example for AI SDK
1 parent 9d4e225 commit fa245b2

File tree

5 files changed

+200
-0
lines changed

5 files changed

+200
-0
lines changed

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
"packages/sdk/server-ai",
3434
"packages/sdk/server-ai/examples/bedrock",
3535
"packages/sdk/server-ai/examples/openai",
36+
"packages/sdk/server-ai/examples/vercel-ai",
3637
"packages/telemetry/browser-telemetry",
3738
"contract-tests",
3839
"packages/sdk/combined-browser"
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
# LaunchDarkly AI SDK for OpenAI Example
2+
3+
This package demonstrates the integration of LaunchDarkly's AI SDK with OpenAI via Vercel AI, allowing you to leverage LaunchDarkly's AI Config capabilities in AI-powered applications using Vercel's services.
4+
5+
## Installation and Build
6+
7+
When running as part of the js-core mono-repo the project will use local dependencies.
8+
As such those dependencies need built.
9+
10+
In the root of the repository run:
11+
12+
```bash
13+
yarn
14+
```
15+
16+
And then
17+
18+
```bash
19+
yarn build
20+
```
21+
22+
## Configuration
23+
24+
Before running the example, make sure to set the following environment variables:
25+
26+
- `LAUNCHDARKLY_SDK_KEY`: Your LaunchDarkly SDK key
27+
- `LAUNCHDARKLY_AI_CONFIG_KEY`: Your LaunchDarkly AI Config key (defaults to 'sample-ai-config' if not set)
28+
- `OPENAI_API_KEY`: Your OpenAI API key
29+
30+
## Usage
31+
32+
The main script (`index.js`) demonstrates how to:
33+
34+
1. Initialize the LaunchDarkly SDK
35+
2. Set up a user context
36+
3. Initialize the LaunchDarkly AI client
37+
4. Retrieve an AI model configuration
38+
5. Send a prompt to a Vercel AI Model (OpenAI)
39+
6. Track token usage
40+
41+
To run the example (in the vercel-ai directory):
42+
43+
```bash
44+
yarn start
45+
```
46+
47+
## Note
48+
49+
This example uses OpenAI's chat completions API. Make sure your LaunchDarkly AI Config is set up correctly to work with OpenAI's models and API structure.
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
{
2+
"name": "@launchdarkly/hello-vercel-ai",
3+
"version": "0.1.0",
4+
"description": "LaunchDarkly AI SDK for Node.js with Vercel AI",
5+
"private": true,
6+
"main": "dist/index.js",
7+
"types": "dist/index.d.ts",
8+
"scripts": {
9+
"build": "tsc",
10+
"start": "yarn build && node ./dist/index.js",
11+
"lint": "npx eslint . --ext .ts",
12+
"prettier": "prettier --write '**/*.@(js|ts|tsx|json|css)' --ignore-path ../../../.prettierignore",
13+
"lint:fix": "yarn run lint --fix",
14+
"check": "yarn prettier && yarn lint && yarn build && yarn test"
15+
},
16+
"keywords": [
17+
"launchdarkly",
18+
"ai",
19+
"llm"
20+
],
21+
"author": "LaunchDarkly",
22+
"license": "Apache-2.0",
23+
"dependencies": {
24+
"@ai-sdk/openai": "2.0.30",
25+
"@launchdarkly/node-server-sdk": "9.7.1",
26+
"@launchdarkly/server-sdk-ai": "0.11.3",
27+
"ai": "5.0.0",
28+
"dotenv": "16.5.0",
29+
"lodash": "4.17.21",
30+
"openai": "4.96.2",
31+
"zod": "^4.1.8"
32+
},
33+
"devDependencies": {
34+
"@tsconfig/node20": "20.1.4",
35+
"@types/lodash": "4.17.16",
36+
"@types/node": "22.15.3",
37+
"typescript": "5.8.3"
38+
},
39+
"directories": {
40+
"example": "example"
41+
},
42+
"repository": {
43+
"type": "git",
44+
"url": "github.com/launchdarkly/js-core"
45+
}
46+
}
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
/* eslint-disable no-console */
2+
import { openai } from '@ai-sdk/openai';
3+
import { generateText, streamText } from 'ai';
4+
import { init, type LDClient, type LDContext } from '@launchdarkly/node-server-sdk';
5+
import { initAi } from '@launchdarkly/server-sdk-ai';
6+
7+
// Environment variables
8+
const sdkKey = process.env.LAUNCHDARKLY_SDK_KEY ?? '';
9+
const aiConfigKey = process.env.LAUNCHDARKLY_AI_CONFIG_KEY || 'sample-ai-config';
10+
11+
// Validate required environment variables
12+
if (!sdkKey) {
13+
console.error('*** Please set the LAUNCHDARKLY_SDK_KEY env first');
14+
process.exit(1);
15+
}
16+
17+
if (!aiConfigKey) {
18+
console.error('*** Please set the LAUNCHDARKLY_AI_CONFIG_KEY env first');
19+
process.exit(1);
20+
}
21+
22+
let client: LDClient | undefined;
23+
24+
async function main() {
25+
// Initialize LaunchDarkly client
26+
client = init(sdkKey);
27+
28+
// Set up the context properties. This context should appear on your LaunchDarkly contexts dashboard
29+
const context: LDContext = {
30+
kind: 'user',
31+
key: 'example-user-key',
32+
name: 'Sandy',
33+
};
34+
35+
try {
36+
await client.waitForInitialization({ timeout: 10 });
37+
console.log('*** SDK successfully initialized');
38+
} catch (error) {
39+
console.log(`*** SDK failed to initialize: ${error}`);
40+
process.exit(1);
41+
}
42+
43+
const aiClient = initAi(client);
44+
45+
// Get AI configuration from LaunchDarkly
46+
const aiConfig = await aiClient.config(aiConfigKey, context, { model: { name: 'gpt-4' } });
47+
48+
if (!aiConfig.enabled) {
49+
console.log('*** AI configuration is not enabled');
50+
process.exit(0);
51+
}
52+
53+
console.log('Using model:', aiConfig.model?.name);
54+
55+
// Example of using generateText (non-streaming)
56+
console.log('\n*** Generating text:');
57+
try {
58+
const result = await aiConfig.tracker.trackVercelAISDKGenerateTextMetrics(() =>
59+
generateText(aiConfig.toVercelAISDK(openai)),
60+
);
61+
console.log('Response:', result.text);
62+
63+
process.stdout.write('Streaming Response: ');
64+
const streamResult = aiConfig.tracker.trackVercelAISDKStreamTextMetrics(() =>
65+
streamText(aiConfig.toVercelAISDK(openai)),
66+
);
67+
for await (const textPart of streamResult.textStream) {
68+
process.stdout.write(textPart);
69+
}
70+
71+
console.log('\nSuccess.');
72+
} catch (err) {
73+
console.error('Error:', err);
74+
}
75+
}
76+
77+
main()
78+
.catch((e) => console.error(e))
79+
.finally(async () => {
80+
await client?.flush();
81+
client?.close();
82+
});
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
{
2+
"extends": "@tsconfig/node20/tsconfig.json",
3+
"compilerOptions": {
4+
"noEmit": false,
5+
"outDir": "dist",
6+
"baseUrl": ".",
7+
"allowUnusedLabels": false,
8+
"allowUnreachableCode": false,
9+
"noFallthroughCasesInSwitch": true,
10+
"noUncheckedIndexedAccess": true,
11+
"noUnusedLocals": true,
12+
"noUnusedParameters": true,
13+
"forceConsistentCasingInFileNames": true,
14+
"declaration": true,
15+
"sourceMap": true,
16+
"resolveJsonModule": true,
17+
"module": "CommonJS",
18+
"moduleResolution": "Node"
19+
},
20+
"include": ["src"],
21+
"exclude": ["dist", "node_modules"]
22+
}

0 commit comments

Comments
 (0)