Skip to content

Commit 1db731b

Browse files
authored
feat: Add VercelAI Provider for AI SDK (#948)
Release-as: 0.1.0
1 parent 17d595a commit 1db731b

File tree

17 files changed

+647
-1
lines changed

17 files changed

+647
-1
lines changed

.github/workflows/manual-publish.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ on:
3535
- packages/tooling/jest
3636
- packages/sdk/browser
3737
- packages/sdk/server-ai
38+
- packages/ai-providers/server-ai-vercel
3839
- packages/ai-providers/server-ai-langchain
3940
- packages/telemetry/browser-telemetry
4041
- packages/sdk/combined-browser
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
name: ai-providers/server-ai-vercel
2+
3+
on:
4+
push:
5+
branches: [main, 'feat/**']
6+
paths-ignore:
7+
- '**.md' #Do not need to run CI for markdown changes.
8+
pull_request:
9+
branches: [main, 'feat/**']
10+
paths-ignore:
11+
- '**.md'
12+
13+
jobs:
14+
build-test-vercel-provider:
15+
runs-on: ubuntu-latest
16+
steps:
17+
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
18+
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
19+
with:
20+
node-version: 22.x
21+
registry-url: 'https://registry.npmjs.org'
22+
- id: shared
23+
name: Shared CI Steps
24+
uses: ./actions/ci
25+
with:
26+
workspace_name: '@launchdarkly/server-sdk-ai-vercel'
27+
workspace_path: packages/ai-providers/server-ai-vercel

.release-please-manifest.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
{
22
"packages/ai-providers/server-ai-langchain": "0.1.0",
3+
"packages/ai-providers/server-ai-vercel": "0.0.0",
34
"packages/sdk/akamai-base": "3.0.10",
45
"packages/sdk/akamai-edgekv": "1.4.12",
56
"packages/sdk/browser": "0.8.1",

README.md

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ This includes shared libraries, used by SDKs and other tools, as well as SDKs.
3838
| AI Providers | npm | issues | tests |
3939
| ------------------------------------------------------------------------------------------ | ------------------------------------------------------------- | ------------------------------------------- | ------------------------------------------------------------------- |
4040
| [@launchdarkly/server-sdk-ai-langchain](packages/ai-providers/server-ai-langchain/README.md) | [![NPM][server-ai-langchain-npm-badge]][server-ai-langchain-npm-link] | [server-ai-langchain][package-ai-providers-server-ai-langchain-issues] | [![Actions Status][server-ai-langchain-ci-badge]][server-ai-langchain-ci] |
41+
| [@launchdarkly/server-sdk-ai-vercel](packages/ai-providers/server-ai-vercel/README.md) | [![NPM][server-ai-vercel-npm-badge]][server-ai-vercel-npm-link] | [server-ai-vercel][package-ai-providers-server-ai-vercel-issues] | [![Actions Status][server-ai-vercel-ci-badge]][server-ai-vercel-ci] |
4142

4243
## Organization
4344

@@ -229,4 +230,10 @@ We encourage pull requests and other contributions from the community. Check out
229230
[server-ai-langchain-ci]: https://github.com/launchdarkly/js-core/actions/workflows/server-ai-langchain.yml
230231
[server-ai-langchain-npm-badge]: https://img.shields.io/npm/v/@launchdarkly/server-sdk-ai-langchain.svg?style=flat-square
231232
[server-ai-langchain-npm-link]: https://www.npmjs.com/package/@launchdarkly/server-sdk-ai-langchain
232-
[package-ai-providers-server-ai-langchain-issues]: https://github.com/launchdarkly/js-core/issues?q=is%3Aissue+is%3Aopen+label%3A%22package%3A+ai-providers%2Fserver-ai-langchain%22+
233+
[package-ai-providers-server-ai-langchain-issues]: https://github.com/launchdarkly/js-core/issues?q=is%3Aissue+is%3Aopen+label%3A%22package%3A+ai-providers%2Fserver-ai-langchain%22+
234+
[//]: # 'ai-providers/server-ai-vercel'
235+
[server-ai-vercel-ci-badge]: https://github.com/launchdarkly/js-core/actions/workflows/server-ai-vercel.yml/badge.svg
236+
[server-ai-vercel-ci]: https://github.com/launchdarkly/js-core/actions/workflows/server-ai-vercel.yml
237+
[server-ai-vercel-npm-badge]: https://img.shields.io/npm/v/@launchdarkly/server-sdk-ai-vercel.svg?style=flat-square
238+
[server-ai-vercel-npm-link]: https://www.npmjs.com/package/@launchdarkly/server-sdk-ai-vercel
239+
[package-ai-providers-server-ai-vercel-issues]: https://github.com/launchdarkly/js-core/issues?q=is%3Aissue+is%3Aopen+label%3A%22package%3A+ai-providers%2Fserver-ai-vercel%22+

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
"name": "@launchdarkly/js-core",
33
"workspaces": [
44
"packages/ai-providers/server-ai-langchain",
5+
"packages/ai-providers/server-ai-vercel",
56
"packages/shared/common",
67
"packages/shared/sdk-client",
78
"packages/shared/sdk-server",
Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
# LaunchDarkly AI SDK Vercel Provider for Server-Side JavaScript
2+
3+
[![NPM][server-ai-vercel-npm-badge]][server-ai-vercel-npm-link]
4+
[![Actions Status][server-ai-vercel-ci-badge]][server-ai-vercel-ci]
5+
[![Documentation][server-ai-vercel-ghp-badge]][server-ai-vercel-ghp-link]
6+
[![NPM][server-ai-vercel-dm-badge]][server-ai-vercel-npm-link]
7+
[![NPM][server-ai-vercel-dt-badge]][server-ai-vercel-npm-link]
8+
9+
# ⛔️⛔️⛔️⛔️
10+
11+
> [!CAUTION]
12+
> This library is a alpha version and should not be considered ready for production use while this message is visible.
13+
14+
# ☝️☝️☝️☝️☝️☝️
15+
16+
## LaunchDarkly overview
17+
18+
[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/home/getting-started) using LaunchDarkly today!
19+
20+
[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly)
21+
22+
## Quick Setup
23+
24+
This package provides Vercel AI SDK integration for the LaunchDarkly AI SDK. The simplest way to use it is with the LaunchDarkly AI SDK's `initChat` method:
25+
26+
1. Install the required packages:
27+
28+
```shell
29+
npm install @launchdarkly/server-sdk-ai @launchdarkly/server-sdk-ai-vercel --save
30+
# or
31+
yarn add @launchdarkly/server-sdk-ai @launchdarkly/server-sdk-ai-vercel
32+
```
33+
34+
2. Create a chat session and use it:
35+
36+
```typescript
37+
import { init } from '@launchdarkly/node-server-sdk';
38+
import { initAi } from '@launchdarkly/server-sdk-ai';
39+
40+
// Initialize LaunchDarkly client
41+
const ldClient = init(sdkKey);
42+
const aiClient = initAi(ldClient);
43+
44+
// Create a chat session
45+
const defaultConfig = {
46+
enabled: true,
47+
model: { name: 'gpt-4' },
48+
provider: { name: 'openai' }
49+
};
50+
const chat = await aiClient.initChat('my-chat-config', context, defaultConfig);
51+
52+
if (chat) {
53+
const response = await chat.invoke('What is the capital of France?');
54+
console.log(response.message.content);
55+
}
56+
```
57+
58+
For more information about using the LaunchDarkly AI SDK, see the [LaunchDarkly AI SDK documentation](https://github.com/launchdarkly/js-core/tree/main/packages/sdk/server-ai/README.md).
59+
60+
## Advanced Usage
61+
62+
For more control, you can use the Vercel AI provider package directly with LaunchDarkly configurations:
63+
64+
```typescript
65+
import { VercelProvider } from '@launchdarkly/server-sdk-ai-vercel';
66+
import { generateText } from 'ai';
67+
68+
// Create a Vercel AI model from LaunchDarkly configuration
69+
const model = await VercelProvider.createVercelModel(aiConfig);
70+
71+
// Convert LaunchDarkly messages and add user message
72+
const configMessages = aiConfig.messages || [];
73+
const userMessage = { role: 'user', content: 'What is the capital of France?' };
74+
const allMessages = [...configMessages, userMessage];
75+
76+
// Track the model call with LaunchDarkly tracking
77+
const response = await aiConfig.tracker.trackMetricsOf(
78+
(result) => VercelProvider.createAIMetrics(result),
79+
() => generateText({ model, messages: allMessages })
80+
);
81+
82+
console.log('AI Response:', response.text);
83+
```
84+
85+
## Contributing
86+
87+
We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK.
88+
89+
## About LaunchDarkly
90+
91+
- LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can:
92+
- Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases.
93+
- Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?).
94+
- Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file.
95+
- Grant access to certain features based on user attributes, like payment plan (eg: users on the 'gold' plan get access to more features than users in the 'silver' plan).
96+
- Disable parts of your application to facilitate maintenance, without taking everything offline.
97+
- LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Check out [our documentation](https://docs.launchdarkly.com/sdk) for a complete list.
98+
- Explore LaunchDarkly
99+
- [launchdarkly.com](https://www.launchdarkly.com/ 'LaunchDarkly Main Website') for more information
100+
- [docs.launchdarkly.com](https://docs.launchdarkly.com/ 'LaunchDarkly Documentation') for our documentation and SDK reference guides
101+
- [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ 'LaunchDarkly API Documentation') for our API documentation
102+
- [blog.launchdarkly.com](https://blog.launchdarkly.com/ 'LaunchDarkly Blog Documentation') for the latest product updates
103+
104+
[server-ai-vercel-ci-badge]: https://github.com/launchdarkly/js-core/actions/workflows/server-ai-vercel.yml/badge.svg
105+
[server-ai-vercel-ci]: https://github.com/launchdarkly/js-core/actions/workflows/server-ai-vercel.yml
106+
[server-ai-vercel-npm-badge]: https://img.shields.io/npm/v/@launchdarkly/server-sdk-ai-vercel.svg?style=flat-square
107+
[server-ai-vercel-npm-link]: https://www.npmjs.com/package/@launchdarkly/server-sdk-ai-vercel
108+
[server-ai-vercel-ghp-badge]: https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8
109+
[server-ai-vercel-ghp-link]: https://launchdarkly.github.io/js-core/packages/ai-providers/server-ai-vercel/docs/
110+
[server-ai-vercel-dm-badge]: https://img.shields.io/npm/dm/@launchdarkly/server-sdk-ai-vercel.svg?style=flat-square
111+
[server-ai-vercel-dt-badge]: https://img.shields.io/npm/dt/@launchdarkly/server-sdk-ai-vercel.svg?style=flat-square
Lines changed: 203 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,203 @@
1+
import { generateText } from 'ai';
2+
3+
import { VercelProvider } from '../src/VercelProvider';
4+
5+
// Mock Vercel AI SDK
6+
jest.mock('ai', () => ({
7+
generateText: jest.fn(),
8+
}));
9+
10+
describe('VercelProvider', () => {
11+
let mockModel: any;
12+
let provider: VercelProvider;
13+
14+
beforeEach(() => {
15+
mockModel = { name: 'test-model' };
16+
provider = new VercelProvider(mockModel, {});
17+
});
18+
19+
describe('createAIMetrics', () => {
20+
it('creates metrics with success=true and token usage', () => {
21+
const mockResponse = {
22+
usage: {
23+
promptTokens: 50,
24+
completionTokens: 50,
25+
totalTokens: 100,
26+
},
27+
};
28+
29+
const result = VercelProvider.createAIMetrics(mockResponse);
30+
31+
expect(result).toEqual({
32+
success: true,
33+
usage: {
34+
total: 100,
35+
input: 50,
36+
output: 50,
37+
},
38+
});
39+
});
40+
41+
it('creates metrics with success=true and no usage when usage is missing', () => {
42+
const mockResponse = {};
43+
44+
const result = VercelProvider.createAIMetrics(mockResponse);
45+
46+
expect(result).toEqual({
47+
success: true,
48+
usage: undefined,
49+
});
50+
});
51+
52+
it('handles partial usage data', () => {
53+
const mockResponse = {
54+
usage: {
55+
promptTokens: 30,
56+
// completionTokens and totalTokens missing
57+
},
58+
};
59+
60+
const result = VercelProvider.createAIMetrics(mockResponse);
61+
62+
expect(result).toEqual({
63+
success: true,
64+
usage: {
65+
total: 0,
66+
input: 30,
67+
output: 0,
68+
},
69+
});
70+
});
71+
});
72+
73+
describe('invokeModel', () => {
74+
it('invokes Vercel AI generateText and returns response', async () => {
75+
const mockResponse = {
76+
text: 'Hello! How can I help you today?',
77+
usage: {
78+
promptTokens: 10,
79+
completionTokens: 15,
80+
totalTokens: 25,
81+
},
82+
};
83+
84+
(generateText as jest.Mock).mockResolvedValue(mockResponse);
85+
86+
const messages = [{ role: 'user' as const, content: 'Hello!' }];
87+
88+
const result = await provider.invokeModel(messages);
89+
90+
expect(generateText).toHaveBeenCalledWith({
91+
model: mockModel,
92+
messages: [{ role: 'user', content: 'Hello!' }],
93+
});
94+
95+
expect(result).toEqual({
96+
message: {
97+
role: 'assistant',
98+
content: 'Hello! How can I help you today?',
99+
},
100+
metrics: {
101+
success: true,
102+
usage: {
103+
total: 25,
104+
input: 10,
105+
output: 15,
106+
},
107+
},
108+
});
109+
});
110+
111+
it('handles response without usage data', async () => {
112+
const mockResponse = {
113+
text: 'Hello! How can I help you today?',
114+
};
115+
116+
(generateText as jest.Mock).mockResolvedValue(mockResponse);
117+
118+
const messages = [{ role: 'user' as const, content: 'Hello!' }];
119+
120+
const result = await provider.invokeModel(messages);
121+
122+
expect(result).toEqual({
123+
message: {
124+
role: 'assistant',
125+
content: 'Hello! How can I help you today?',
126+
},
127+
metrics: {
128+
success: true,
129+
usage: undefined,
130+
},
131+
});
132+
});
133+
});
134+
135+
describe('getModel', () => {
136+
it('returns the underlying Vercel AI model', () => {
137+
const model = provider.getModel();
138+
expect(model).toBe(mockModel);
139+
});
140+
});
141+
142+
describe('createVercelModel', () => {
143+
it('creates OpenAI model for openai provider', async () => {
144+
const mockAiConfig = {
145+
model: { name: 'gpt-4', parameters: {} },
146+
provider: { name: 'openai' },
147+
enabled: true,
148+
tracker: {} as any,
149+
toVercelAISDK: jest.fn(),
150+
};
151+
152+
// Mock the dynamic import
153+
jest.doMock('@ai-sdk/openai', () => ({
154+
openai: jest.fn().mockReturnValue(mockModel),
155+
}));
156+
157+
const result = await VercelProvider.createVercelModel(mockAiConfig);
158+
expect(result).toBe(mockModel);
159+
});
160+
161+
it('throws error for unsupported provider', async () => {
162+
const mockAiConfig = {
163+
model: { name: 'test-model', parameters: {} },
164+
provider: { name: 'unsupported' },
165+
enabled: true,
166+
tracker: {} as any,
167+
toVercelAISDK: jest.fn(),
168+
};
169+
170+
await expect(VercelProvider.createVercelModel(mockAiConfig)).rejects.toThrow(
171+
'Unsupported Vercel AI provider: unsupported',
172+
);
173+
});
174+
});
175+
176+
describe('create', () => {
177+
it('creates VercelProvider with correct model and parameters', async () => {
178+
const mockAiConfig = {
179+
model: {
180+
name: 'gpt-4',
181+
parameters: {
182+
temperature: 0.7,
183+
maxTokens: 1000,
184+
},
185+
},
186+
provider: { name: 'openai' },
187+
enabled: true,
188+
tracker: {} as any,
189+
toVercelAISDK: jest.fn(),
190+
};
191+
192+
// Mock the dynamic import
193+
jest.doMock('@ai-sdk/openai', () => ({
194+
openai: jest.fn().mockReturnValue(mockModel),
195+
}));
196+
197+
const result = await VercelProvider.create(mockAiConfig);
198+
199+
expect(result).toBeInstanceOf(VercelProvider);
200+
expect(result.getModel()).toBeDefined();
201+
});
202+
});
203+
});
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
module.exports = {
2+
transform: { '^.+\\.ts?$': 'ts-jest' },
3+
testMatch: ['**/__tests__/**/*test.ts?(x)'],
4+
testEnvironment: 'node',
5+
moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'],
6+
collectCoverageFrom: ['src/**/*.ts'],
7+
};

0 commit comments

Comments
 (0)