diff --git a/.release-please-manifest.json b/.release-please-manifest.json index d5001d1e2f..7d51191fcb 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,22 +1,25 @@ { - "packages/shared/common": "2.19.0", - "packages/shared/sdk-server": "2.16.2", - "packages/sdk/server-node": "9.10.2", + "packages/ai-providers/server-ai-langchain": "0.1.0-alpha.0", + "packages/sdk/akamai-base": "3.0.10", + "packages/sdk/akamai-edgekv": "1.4.12", + "packages/sdk/browser": "0.8.1", "packages/sdk/cloudflare": "2.7.10", + "packages/sdk/combined-browser": "0.0.0", "packages/sdk/fastly": "0.2.1", - "packages/shared/sdk-server-edge": "2.6.9", + "packages/sdk/react-native": "10.11.0", + "packages/sdk/react-universal": "0.0.1", + "packages/sdk/server-ai": "0.11.4", + "packages/sdk/server-node": "9.10.2", + "packages/sdk/svelte": "0.1.0", "packages/sdk/vercel": "1.3.34", - "packages/sdk/akamai-base": "3.0.10", - "packages/sdk/akamai-edgekv": "1.4.12", "packages/shared/akamai-edgeworker-sdk": "2.0.10", + "packages/shared/common": "2.19.0", + "packages/shared/sdk-client": "1.15.1", + "packages/shared/sdk-server": "2.16.2", + "packages/shared/sdk-server-edge": "2.6.9", "packages/store/node-server-sdk-dynamodb": "6.2.14", "packages/store/node-server-sdk-redis": "4.2.14", - "packages/shared/sdk-client": "1.15.1", - "packages/sdk/react-native": "10.11.0", - "packages/telemetry/node-server-sdk-otel": "1.3.2", - "packages/sdk/browser": "0.8.1", - "packages/sdk/server-ai": "0.11.4", "packages/telemetry/browser-telemetry": "1.0.11", - "packages/tooling/jest": "0.1.11", - "packages/sdk/combined-browser": "0.0.0" + "packages/telemetry/node-server-sdk-otel": "1.3.2", + "packages/tooling/jest": "0.1.11" } diff --git a/package.json b/package.json index 313d3cf351..e05ef41e6b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,7 @@ { "name": "@launchdarkly/js-core", "workspaces": [ + "packages/ai-providers/server-ai-langchain", "packages/shared/common", "packages/shared/sdk-client", "packages/shared/sdk-server", diff --git a/packages/ai-providers/server-ai-langchain/__tests__/LangChainProvider.test.ts b/packages/ai-providers/server-ai-langchain/__tests__/LangChainProvider.test.ts new file mode 100644 index 0000000000..0c1cd38f23 --- /dev/null +++ b/packages/ai-providers/server-ai-langchain/__tests__/LangChainProvider.test.ts @@ -0,0 +1,62 @@ +import { AIMessage, HumanMessage, SystemMessage } from 'langchain/schema'; + +import { LangChainProvider } from '../src/LangChainProvider'; + +describe('LangChainProvider', () => { + describe('convertMessagesToLangChain', () => { + it('converts system messages to SystemMessage', () => { + const messages = [{ role: 'system' as const, content: 'You are a helpful assistant.' }]; + const result = LangChainProvider.convertMessagesToLangChain(messages); + + expect(result).toHaveLength(1); + expect(result[0]).toBeInstanceOf(SystemMessage); + expect(result[0].content).toBe('You are a helpful assistant.'); + }); + + it('converts user messages to HumanMessage', () => { + const messages = [{ role: 'user' as const, content: 'Hello, how are you?' }]; + const result = LangChainProvider.convertMessagesToLangChain(messages); + + expect(result).toHaveLength(1); + expect(result[0]).toBeInstanceOf(HumanMessage); + expect(result[0].content).toBe('Hello, how are you?'); + }); + + it('converts assistant messages to AIMessage', () => { + const messages = [{ role: 'assistant' as const, content: 'I am doing well, thank you!' }]; + const result = LangChainProvider.convertMessagesToLangChain(messages); + + expect(result).toHaveLength(1); + expect(result[0]).toBeInstanceOf(AIMessage); + expect(result[0].content).toBe('I am doing well, thank you!'); + }); + + it('converts multiple messages in order', () => { + const messages = [ + { role: 'system' as const, content: 'You are a helpful assistant.' }, + { role: 'user' as const, content: 'What is the weather like?' }, + { role: 'assistant' as const, content: 'I cannot check the weather.' }, + ]; + const result = LangChainProvider.convertMessagesToLangChain(messages); + + expect(result).toHaveLength(3); + expect(result[0]).toBeInstanceOf(SystemMessage); + expect(result[1]).toBeInstanceOf(HumanMessage); + expect(result[2]).toBeInstanceOf(AIMessage); + }); + + it('throws error for unsupported message role', () => { + const messages = [{ role: 'unknown' as any, content: 'Test message' }]; + + expect(() => LangChainProvider.convertMessagesToLangChain(messages)).toThrow( + 'Unsupported message role: unknown' + ); + }); + + it('handles empty message array', () => { + const result = LangChainProvider.convertMessagesToLangChain([]); + + expect(result).toHaveLength(0); + }); + }); +}); diff --git a/packages/ai-providers/server-ai-langchain/jest.config.js b/packages/ai-providers/server-ai-langchain/jest.config.js new file mode 100644 index 0000000000..9e3ea08f04 --- /dev/null +++ b/packages/ai-providers/server-ai-langchain/jest.config.js @@ -0,0 +1,9 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + roots: ['/src'], + testMatch: ['**/__tests__/**/*.test.ts'], + collectCoverageFrom: ['src/**/*.ts', '!src/**/*.d.ts'], + coverageDirectory: 'coverage', + coverageReporters: ['text', 'lcov', 'html'], +}; diff --git a/packages/ai-providers/server-ai-langchain/package.json b/packages/ai-providers/server-ai-langchain/package.json new file mode 100644 index 0000000000..a97c2dd95a --- /dev/null +++ b/packages/ai-providers/server-ai-langchain/package.json @@ -0,0 +1,55 @@ +{ + "name": "@launchdarkly/server-sdk-ai-langchain", + "version": "0.1.0-alpha.0", + "description": "LaunchDarkly AI SDK LangChain Provider for Server-Side JavaScript", + "homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/ai-providers/server-ai-langchain", + "repository": { + "type": "git", + "url": "https://github.com/launchdarkly/js-core.git" + }, + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "commonjs", + "scripts": { + "build": "npx tsc", + "lint": "npx eslint . --ext .ts", + "prettier": "prettier --write '**/*.@(js|ts|tsx|json|css)' --ignore-path ../../../.prettierignore", + "lint:fix": "yarn run lint --fix", + "check": "yarn prettier && yarn lint && yarn build && yarn test", + "test": "jest" + }, + "keywords": [ + "launchdarkly", + "ai", + "llm", + "langchain" + ], + "author": "LaunchDarkly", + "license": "Apache-2.0", + "dependencies": { + "@langchain/core": ">=0.2.21 <0.3.0", + "@launchdarkly/server-sdk-ai": "0.11.4", + "langchain": "^0.2.11" + }, + "devDependencies": { + "@launchdarkly/js-server-sdk-common": "2.16.2", + "@trivago/prettier-plugin-sort-imports": "^4.1.1", + "@types/jest": "^29.5.3", + "@typescript-eslint/eslint-plugin": "^6.20.0", + "@typescript-eslint/parser": "^6.20.0", + "eslint": "^8.45.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-airbnb-typescript": "^17.1.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-import": "^2.27.5", + "eslint-plugin-jest": "^27.6.3", + "eslint-plugin-prettier": "^5.0.0", + "jest": "^29.6.1", + "prettier": "^3.0.0", + "ts-jest": "^29.1.1", + "typescript": "5.1.6" + }, + "peerDependencies": { + "@launchdarkly/js-server-sdk-common": "2.x" + } +} diff --git a/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts b/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts new file mode 100644 index 0000000000..b80134dd7c --- /dev/null +++ b/packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts @@ -0,0 +1,123 @@ +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { AIMessage, HumanMessage, SystemMessage } from '@langchain/core/messages'; +import { initChatModel } from 'langchain/chat_models/universal'; + +import { + LDAIConfig, + LDAIConfigTracker, + LDMessage, + LDTokenUsage, +} from '@launchdarkly/server-sdk-ai'; + +/** + * LangChain provider utilities and helper functions. + */ +export class LangChainProvider { + /** + * Map LaunchDarkly provider names to LangChain provider names. + * This method enables seamless integration between LaunchDarkly's standardized + * provider naming and LangChain's naming conventions. + */ + static mapProvider(ldProviderName: string): string { + const lowercasedName = ldProviderName.toLowerCase(); + + const mapping: Record = { + gemini: 'google-genai', + }; + + return mapping[lowercasedName] || lowercasedName; + } + + /** + * Create token usage information from a LangChain provider response. + * This method extracts token usage information from LangChain responses + * and returns a LaunchDarkly TokenUsage object. + */ + static createTokenUsage(langChainResponse: AIMessage): LDTokenUsage | undefined { + if (!langChainResponse?.response_metadata?.tokenUsage) { + return undefined; + } + + const { tokenUsage } = langChainResponse.response_metadata; + + return { + total: tokenUsage.totalTokens || 0, + input: tokenUsage.promptTokens || 0, + output: tokenUsage.completionTokens || 0, + }; + } + + /** + * Convert LaunchDarkly messages to LangChain messages. + * This helper method enables developers to work directly with LangChain message types + * while maintaining compatibility with LaunchDarkly's standardized message format. + */ + static convertMessagesToLangChain( + messages: LDMessage[], + ): (HumanMessage | SystemMessage | AIMessage)[] { + return messages.map((msg) => { + switch (msg.role) { + case 'system': + return new SystemMessage(msg.content); + case 'user': + return new HumanMessage(msg.content); + case 'assistant': + return new AIMessage(msg.content); + default: + throw new Error(`Unsupported message role: ${msg.role}`); + } + }); + } + + /** + * Track metrics for a LangChain callable execution. + * This helper method enables developers to work directly with LangChain callables + * while ensuring consistent tracking behavior. + */ + static async trackMetricsOf( + tracker: LDAIConfigTracker, + callable: () => Promise, + ): Promise { + return tracker.trackDurationOf(async () => { + try { + const result = await callable(); + + // Extract and track token usage if available + const tokenUsage = this.createTokenUsage(result); + if (tokenUsage) { + tracker.trackTokens({ + total: tokenUsage.total, + input: tokenUsage.input, + output: tokenUsage.output, + }); + } + + tracker.trackSuccess(); + return result; + } catch (error) { + tracker.trackError(); + throw error; + } + }); + } + + /** + * Create a LangChain model from an AI configuration. + * This public helper method enables developers to initialize their own LangChain models + * using LaunchDarkly AI configurations. + * + * @param aiConfig The LaunchDarkly AI configuration + * @returns A Promise that resolves to a configured LangChain BaseChatModel + */ + static async createLangChainModel(aiConfig: LDAIConfig): Promise { + const modelName = aiConfig.model?.name || ''; + const provider = aiConfig.provider?.name || ''; + const parameters = aiConfig.model?.parameters || {}; + + // Use LangChain's universal initChatModel to support multiple providers + return initChatModel(modelName, { + modelProvider: this.mapProvider(provider), + ...parameters, + }); + } +} diff --git a/packages/ai-providers/server-ai-langchain/src/LangChainTrackedChat.ts b/packages/ai-providers/server-ai-langchain/src/LangChainTrackedChat.ts new file mode 100644 index 0000000000..b0dd093dfb --- /dev/null +++ b/packages/ai-providers/server-ai-langchain/src/LangChainTrackedChat.ts @@ -0,0 +1,86 @@ +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { AIMessage, HumanMessage, SystemMessage } from '@langchain/core/messages'; + +import { + BaseTrackedChat, + ChatResponse, + LDAIConfig, + LDAIConfigTracker, + LDMessage, +} from '@launchdarkly/server-sdk-ai'; + +import { LangChainProvider } from './LangChainProvider'; + +/** + * LangChain-specific implementation of TrackedChat. + * This implementation integrates LangChain models with LaunchDarkly's tracking capabilities. + */ +export class LangChainTrackedChat extends BaseTrackedChat { + private _llm: BaseChatModel; + + constructor(aiConfig: LDAIConfig, tracker: LDAIConfigTracker, llm: BaseChatModel) { + super(aiConfig, tracker); + this._llm = llm; + } + + /** + * Provider-specific implementation that converts LDMessage[] to LangChain format, + * invokes the model, and returns a ChatResponse. + */ + protected async invokeModel(messages: LDMessage[]): Promise { + // Convert LDMessage[] to LangChain messages + const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages); + + // Get the LangChain response + const response = await this._llm.invoke(langchainMessages); + + // Extract token usage if available using the helper method + const usage = LangChainProvider.createTokenUsage(response); + + // Handle different content types from LangChain + let content: string; + if (typeof response.content === 'string') { + content = response.content; + } else if (Array.isArray(response.content)) { + // Handle complex content (e.g., with images) + content = response.content + .map((item: any) => { + if (typeof item === 'string') return item; + if (item.type === 'text') return item.text; + return ''; + }) + .join(''); + } else { + content = String(response.content); + } + + // Create the assistant message + const assistantMessage: LDMessage = { + role: 'assistant', + content, + }; + + return { + message: assistantMessage, + usage, + }; + } + + /** + * LangChain-specific invoke method that accepts LangChain-native message types. + * This is the main implementation that does all the tracking and LangChain logic. + */ + async trackLangChainInvoke( + messages: (HumanMessage | SystemMessage | AIMessage)[], + ): Promise { + // Use the trackMetricsOf helper to handle all tracking automatically + return LangChainProvider.trackMetricsOf(this.tracker, () => this._llm.invoke(messages)); + } + + /** + * Get the underlying LangChain model instance. + */ + async getChatModel(): Promise { + return this._llm; + } +} diff --git a/packages/ai-providers/server-ai-langchain/src/index.ts b/packages/ai-providers/server-ai-langchain/src/index.ts new file mode 100644 index 0000000000..ea8dcd6bf4 --- /dev/null +++ b/packages/ai-providers/server-ai-langchain/src/index.ts @@ -0,0 +1,11 @@ +/** + * This is the API reference for the LaunchDarkly AI SDK LangChain Provider for Server-Side JavaScript. + * + * This package provides LangChain integration for the LaunchDarkly AI SDK, allowing you to use + * LangChain models and chains with LaunchDarkly's tracking and configuration capabilities. + * + * @packageDocumentation + */ + +export * from './LangChainTrackedChat'; +export * from './LangChainProvider'; diff --git a/packages/ai-providers/server-ai-langchain/tsconfig.eslint.json b/packages/ai-providers/server-ai-langchain/tsconfig.eslint.json new file mode 100644 index 0000000000..67f3670709 --- /dev/null +++ b/packages/ai-providers/server-ai-langchain/tsconfig.eslint.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "include": ["src/**/*", "**/*.test.ts", "**/*.spec.ts"] +} diff --git a/packages/ai-providers/server-ai-langchain/tsconfig.json b/packages/ai-providers/server-ai-langchain/tsconfig.json new file mode 100644 index 0000000000..6238d6a0f5 --- /dev/null +++ b/packages/ai-providers/server-ai-langchain/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "CommonJS", + "lib": ["ES2020"], + "moduleResolution": "node", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "outDir": "./dist", + "rootDir": "./src", + "declaration": true, + "declarationMap": true, + "sourceMap": true + }, + "include": ["src/**/*"], + "exclude": ["dist", "node_modules", "**/*.test.ts", "**/*.spec.ts"] +} diff --git a/release-please-config.json b/release-please-config.json index 9fc35f4bed..69da9f84fb 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -1,5 +1,10 @@ { "packages": { + "packages/ai-providers/server-ai-langchain": { + "bump-minor-pre-major": true, + "release-as": "0.1.0-alpha.0", + "prerelease": true + }, "packages/shared/common": {}, "packages/shared/sdk-client": {}, "packages/shared/sdk-server": {},