Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions .github/workflows/release-please.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ jobs:
package-browser-released: ${{ steps.release.outputs['packages/sdk/browser--release_created'] }}
package-server-ai-released: ${{ steps.release.outputs['packages/sdk/server-ai--release_created'] }}
package-server-ai-langchain-released: ${{ steps.release.outputs['packages/ai-providers/server-ai-langchain--release_created'] }}
package-server-ai-openai-released: ${{ steps.release.outputs['packages/ai-providers/server-ai-openai--release_created'] }}
package-browser-telemetry-released: ${{ steps.release.outputs['packages/telemetry/browser-telemetry--release_created'] }}
package-combined-browser-released: ${{ steps.release.outputs['packages/sdk/combined-browser--release_created'] }}
steps:
Expand Down Expand Up @@ -481,3 +482,23 @@ jobs:
with:
workspace_path: packages/ai-providers/server-ai-langchain
aws_assume_role: ${{ vars.AWS_ROLE_ARN }}

release-server-ai-openai:
runs-on: ubuntu-latest
needs: ['release-please', 'release-server-ai']
permissions:
id-token: write
contents: write
if: ${{ always() && !failure() && !cancelled() && needs.release-please.outputs.package-server-ai-openai-released == 'true'}}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 22.x
registry-url: 'https://registry.npmjs.org'
- id: release-server-ai-openai
name: Full release of packages/ai-providers/server-ai-openai
uses: ./actions/full-release
with:
workspace_path: packages/ai-providers/server-ai-openai
aws_assume_role: ${{ vars.AWS_ROLE_ARN }}
1 change: 1 addition & 0 deletions .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"packages/ai-providers/server-ai-langchain": "0.1.0",
"packages/ai-providers/server-ai-openai": "0.0.0",
"packages/sdk/akamai-base": "3.0.10",
"packages/sdk/akamai-edgekv": "1.4.12",
"packages/sdk/browser": "0.8.1",
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"name": "@launchdarkly/js-core",
"workspaces": [
"packages/ai-providers/server-ai-langchain",
"packages/ai-providers/server-ai-openai",
"packages/shared/common",
"packages/shared/sdk-client",
"packages/shared/sdk-server",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,231 @@
import { OpenAI } from 'openai';

import { OpenAIProvider } from '../src/OpenAIProvider';

// Mock OpenAI
jest.mock('openai', () => ({
OpenAI: jest.fn().mockImplementation(() => ({
chat: {
completions: {
create: jest.fn().mockResolvedValue({
choices: [{ message: { content: 'Test response' } }],
usage: { prompt_tokens: 10, completion_tokens: 5, total_tokens: 15 },
}),
},
},
})),
}));

describe('OpenAIProvider', () => {
let mockOpenAI: jest.Mocked<OpenAI>;
let provider: OpenAIProvider;

beforeEach(() => {
mockOpenAI = new OpenAI() as jest.Mocked<OpenAI>;
provider = new OpenAIProvider(mockOpenAI, 'gpt-3.5-turbo', {});
});

describe('createAIMetrics', () => {
it('creates metrics with success=true and token usage', () => {
const mockResponse = {
usage: {
prompt_tokens: 50,
completion_tokens: 50,
total_tokens: 100,
},
};

const result = OpenAIProvider.createAIMetrics(mockResponse);

expect(result).toEqual({
success: true,
usage: {
total: 100,
input: 50,
output: 50,
},
});
});

it('creates metrics with success=true and no usage when usage is missing', () => {
const mockResponse = {};

const result = OpenAIProvider.createAIMetrics(mockResponse);

expect(result).toEqual({
success: true,
usage: undefined,
});
});

it('handles partial usage data', () => {
const mockResponse = {
usage: {
prompt_tokens: 30,
// completion_tokens and total_tokens missing
},
};

const result = OpenAIProvider.createAIMetrics(mockResponse);

expect(result).toEqual({
success: true,
usage: {
total: 0,
input: 30,
output: 0,
},
});
});
});

describe('invokeModel', () => {
it('invokes OpenAI chat completions and returns response', async () => {
const mockResponse = {
choices: [
{
message: {
content: 'Hello! How can I help you today?',
},
},
],
usage: {
prompt_tokens: 10,
completion_tokens: 15,
total_tokens: 25,
},
};

(mockOpenAI.chat.completions.create as jest.Mock).mockResolvedValue(mockResponse as any);

const messages = [{ role: 'user' as const, content: 'Hello!' }];

const result = await provider.invokeModel(messages);

expect(mockOpenAI.chat.completions.create).toHaveBeenCalledWith({
model: 'gpt-3.5-turbo',
messages: [{ role: 'user', content: 'Hello!' }],
});

expect(result).toEqual({
message: {
role: 'assistant',
content: 'Hello! How can I help you today?',
},
metrics: {
success: true,
usage: {
total: 25,
input: 10,
output: 15,
},
},
});
});

it('returns unsuccessful response when no content in response', async () => {
const mockResponse = {
choices: [
{
message: {
// content is missing
},
},
],
};

(mockOpenAI.chat.completions.create as jest.Mock).mockResolvedValue(mockResponse as any);

const messages = [{ role: 'user' as const, content: 'Hello!' }];

const result = await provider.invokeModel(messages);

expect(result).toEqual({
message: {
role: 'assistant',
content: '',
},
metrics: {
success: false,
usage: undefined,
},
});
});

it('returns unsuccessful response when choices array is empty', async () => {
const mockResponse = {
choices: [],
};

(mockOpenAI.chat.completions.create as jest.Mock).mockResolvedValue(mockResponse as any);

const messages = [{ role: 'user' as const, content: 'Hello!' }];

const result = await provider.invokeModel(messages);

expect(result).toEqual({
message: {
role: 'assistant',
content: '',
},
metrics: {
success: false,
usage: undefined,
},
});
});

it('returns unsuccessful response when choices is undefined', async () => {
const mockResponse = {
// choices is missing entirely
};

(mockOpenAI.chat.completions.create as jest.Mock).mockResolvedValue(mockResponse as any);

const messages = [{ role: 'user' as const, content: 'Hello!' }];

const result = await provider.invokeModel(messages);

expect(result).toEqual({
message: {
role: 'assistant',
content: '',
},
metrics: {
success: false,
usage: undefined,
},
});
});
});

describe('getClient', () => {
it('returns the underlying OpenAI client', () => {
const client = provider.getClient();
expect(client).toBe(mockOpenAI);
});
});

describe('create', () => {
it('creates OpenAIProvider with correct model and parameters', async () => {
const mockAiConfig = {
model: {
name: 'gpt-4',
parameters: {
temperature: 0.7,
max_tokens: 1000,
},
},
provider: { name: 'openai' },
enabled: true,
tracker: {} as any,
toVercelAISDK: jest.fn(),
};

const result = await OpenAIProvider.create(mockAiConfig);

expect(result).toBeInstanceOf(OpenAIProvider);
expect(result.getClient()).toBeDefined();
});
});
});
14 changes: 14 additions & 0 deletions packages/ai-providers/server-ai-openai/jest.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
roots: ['<rootDir>'],
testMatch: ['**/__tests__/**/*.test.ts'],
collectCoverageFrom: [
'src/**/*.ts',
'!src/**/*.d.ts',
'!src/**/*.test.ts',
'!src/**/*.spec.ts',
],
coverageDirectory: 'coverage',
coverageReporters: ['text', 'lcov', 'html'],
};
54 changes: 54 additions & 0 deletions packages/ai-providers/server-ai-openai/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
{
"name": "@launchdarkly/server-sdk-ai-openai",
"version": "0.0.0",
"description": "LaunchDarkly AI SDK OpenAI Provider for Server-Side JavaScript",
"homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/ai-providers/server-ai-openai",
"repository": {
"type": "git",
"url": "https://github.com/launchdarkly/js-core.git"
},
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "commonjs",
"scripts": {
"build": "npx tsc",
"lint": "npx eslint . --ext .ts",
"prettier": "prettier --write '**/*.@(js|ts|tsx|json|css)' --ignore-path ../../../.prettierignore",
"lint:fix": "yarn run lint --fix",
"check": "yarn prettier && yarn lint && yarn build && yarn test",
"test": "jest"
},
"keywords": [
"launchdarkly",
"ai",
"llm",
"openai"
],
"author": "LaunchDarkly",
"license": "Apache-2.0",
"dependencies": {
"@launchdarkly/server-sdk-ai": "^0.12.0",
"openai": "^4.0.0"
},
"devDependencies": {
"@launchdarkly/js-server-sdk-common": "2.16.2",
"@trivago/prettier-plugin-sort-imports": "^4.1.1",
"@types/jest": "^29.5.3",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"eslint": "^8.45.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-airbnb-typescript": "^17.1.0",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-jest": "^27.6.3",
"eslint-plugin-prettier": "^5.0.0",
"jest": "^29.6.1",
"prettier": "^3.0.0",
"ts-jest": "^29.1.1",
"typescript": "5.1.6"
},
"peerDependencies": {
"@launchdarkly/js-server-sdk-common": "2.x"
}
}
Loading