diff --git a/packages/sdk/server-ai/src/LDAIClientImpl.ts b/packages/sdk/server-ai/src/LDAIClientImpl.ts index da50b04399..4ad0021565 100644 --- a/packages/sdk/server-ai/src/LDAIClientImpl.ts +++ b/packages/sdk/server-ai/src/LDAIClientImpl.ts @@ -3,6 +3,7 @@ import * as Mustache from 'mustache'; import { LDContext } from '@launchdarkly/js-server-sdk-common'; import { LDAIAgent, LDAIAgentConfig, LDAIAgentDefaults } from './api/agents'; +import { BaseTrackedChat, TrackedChatFactory } from './api/chat'; import { LDAIConfig, LDAIConfigTracker, @@ -222,4 +223,26 @@ export class LDAIClientImpl implements LDAIClient { return agents; } + + async initChat( + key: string, + context: LDContext, + defaultValue: LDAIDefaults, + variables?: Record, + ): Promise { + // Track chat initialization + this._ldClient.track('$ld:ai:config:function:initChat', context, key, 1); + + const config = await this.config(key, context, defaultValue, variables); + + // Return null if the configuration is disabled + if (!config.enabled) { + return undefined; + } + + // Create the TrackedChat instance based on the provider + const chat = await TrackedChatFactory.create(config, config.tracker); + + return chat; + } } diff --git a/packages/sdk/server-ai/src/api/LDAIClient.ts b/packages/sdk/server-ai/src/api/LDAIClient.ts index be02e887d1..03d17c1d13 100644 --- a/packages/sdk/server-ai/src/api/LDAIClient.ts +++ b/packages/sdk/server-ai/src/api/LDAIClient.ts @@ -1,6 +1,7 @@ import { LDContext } from '@launchdarkly/js-server-sdk-common'; import { LDAIAgent, LDAIAgentConfig, LDAIAgentDefaults } from './agents'; +import { BaseTrackedChat } from './chat'; import { LDAIConfig, LDAIDefaults } from './config/LDAIConfig'; /** @@ -143,4 +144,47 @@ export interface LDAIClient { agentConfigs: T, context: LDContext, ): Promise>; + + /** + * Initializes and returns a new TrackedChat instance for chat interactions. + * This method serves as the primary entry point for creating TrackedChat instances from configuration. + * + * @param key The key identifying the AI chat configuration to use. + * @param context The standard LDContext used when evaluating flags. + * @param defaultValue A default value representing a standard AI chat config result. + * @param variables Dictionary of values for instruction interpolation. + * @returns A promise that resolves to the TrackedChat instance, or null if the configuration is disabled. + * + * @example + * ``` + * const key = "customer_support_chat"; + * const context = {...}; + * const defaultValue = { + * config: { + * enabled: false, + * model: { name: "gpt-4" }, + * messages: [ + * { role: "system", content: "You are a helpful customer support agent." } + * ] + * } + * }; + * const variables = { customerName: 'John' }; + * + * const chat = await client.initChat(key, context, defaultValue, variables); + * if (chat) { + * const response = await chat.invoke("I need help with my order"); + * console.log(response.message.content); + * + * // Access configuration and tracker if needed + * console.log('Model:', chat.getConfig().model?.name); + * chat.getTracker().trackSuccess(); + * } + * ``` + */ + initChat( + key: string, + context: LDContext, + defaultValue: LDAIDefaults, + variables?: Record, + ): Promise; } diff --git a/packages/sdk/server-ai/src/api/chat/BaseTrackedChat.ts b/packages/sdk/server-ai/src/api/chat/BaseTrackedChat.ts new file mode 100644 index 0000000000..e30c4bb1fe --- /dev/null +++ b/packages/sdk/server-ai/src/api/chat/BaseTrackedChat.ts @@ -0,0 +1,82 @@ +import { LDAIConfig, LDMessage } from '../config/LDAIConfig'; +import { LDAIConfigTracker } from '../config/LDAIConfigTracker'; +import { ChatResponse } from './TrackedChat'; + +/** + * Base implementation of TrackedChat that provides common functionality. + * This can be extended by provider-specific implementations. + */ +export abstract class BaseTrackedChat { + protected messages: LDMessage[]; + + constructor( + protected readonly aiConfig: LDAIConfig, + protected readonly tracker: LDAIConfigTracker, + ) { + this.messages = aiConfig.messages || []; + } + + /** + * Invoke the chat model with a prompt string. + * This method handles conversation management and tracking, delegating to the provider's invokeModel method. + */ + async invoke(prompt: string): Promise { + // Convert prompt string to LDMessage with role 'user' and add to conversation history + const userMessage: LDMessage = { + role: 'user', + content: prompt, + }; + this.messages.push(userMessage); + + // Delegate to provider-specific implementation with tracking + const response = await this.trackMetricsOf(() => this.invokeModel(this.messages)); + + // Add the assistant response to the conversation history + this.messages.push(response.message); + + return response; + } + + /** + * Abstract method that providers must implement to handle the actual model invocation. + * This method should convert messages to provider format, invoke the model, and return a ChatResponse. + */ + protected abstract invokeModel(messages: LDMessage[]): Promise; + + /** + * Track metrics for a ChatResponse execution. + * This method handles duration tracking, token usage tracking, and success/error tracking. + */ + protected async trackMetricsOf(callable: () => Promise): Promise { + return this.tracker.trackDurationOf(async () => { + try { + const result = await callable(); + + // Track token usage if available + if (result.usage) { + this.tracker.trackTokens(result.usage); + } + + this.tracker.trackSuccess(); + return result; + } catch (error) { + this.tracker.trackError(); + throw error; + } + }); + } + + /** + * Get the underlying AI configuration used to initialize this TrackedChat. + */ + getConfig(): LDAIConfig { + return this.aiConfig; + } + + /** + * Get the underlying AI configuration tracker used to initialize this TrackedChat. + */ + getTracker(): LDAIConfigTracker { + return this.tracker; + } +} diff --git a/packages/sdk/server-ai/src/api/chat/TrackedChat.ts b/packages/sdk/server-ai/src/api/chat/TrackedChat.ts new file mode 100644 index 0000000000..f09a92f99c --- /dev/null +++ b/packages/sdk/server-ai/src/api/chat/TrackedChat.ts @@ -0,0 +1,60 @@ +import { LDAIConfig, LDMessage } from '../config/LDAIConfig'; +import { LDAIConfigTracker } from '../config/LDAIConfigTracker'; +import { LDTokenUsage } from '../metrics/LDTokenUsage'; + +/** + * Chat response structure. + */ +export interface ChatResponse { + /** + * The response message from the AI. + */ + message: LDMessage; + + /** + * Token usage information. + */ + usage?: LDTokenUsage; + + /** + * Additional metadata from the provider. + */ + metadata?: Record; +} + +/** + * Interface for provider-specific tracked chat implementations. + */ +export interface ProviderTrackedChat { + /** + * Invoke the chat model with the provided messages. + * This method provides a consistent interface for chat model execution while integrating + * LaunchDarkly-specific functionality. + * + * @param prompt A prompt string that will be converted to a user message and added to the conversation history. + * @returns A promise that resolves to the chat response. + */ + invoke(prompt: string): Promise; + + /** + * Get the underlying AI configuration used to initialize this TrackedChat. + * + * @returns The AI configuration. + */ + getConfig(): LDAIConfig; + + /** + * Get the underlying AI configuration tracker used to initialize this TrackedChat. + * + * @returns The AI configuration tracker. + */ + getTracker(): LDAIConfigTracker; + + /** + * Get the underlying provider-specific chat model instance. + * This provides direct access to the underlying provider chat model for advanced use cases. + * + * @returns The configured provider-specific chat model instance. + */ + getChatModel(): unknown; +} diff --git a/packages/sdk/server-ai/src/api/chat/TrackedChatFactory.ts b/packages/sdk/server-ai/src/api/chat/TrackedChatFactory.ts new file mode 100644 index 0000000000..d6e3e17682 --- /dev/null +++ b/packages/sdk/server-ai/src/api/chat/TrackedChatFactory.ts @@ -0,0 +1,68 @@ +import { LDAIConfig } from '../config/LDAIConfig'; +import { LDAIConfigTracker } from '../config/LDAIConfigTracker'; +import { BaseTrackedChat } from './BaseTrackedChat'; + +/** + * Factory for creating TrackedChat instances based on the provider configuration. + */ +export class TrackedChatFactory { + /** + * Create a TrackedChat instance based on the AI configuration. + * This method attempts to load provider-specific implementations dynamically. + * Returns undefined if the provider is not supported. + */ + static async create( + aiConfig: LDAIConfig, + tracker: LDAIConfigTracker, + ): Promise { + const providerName = aiConfig.provider?.name?.toLowerCase(); + let trackedChat: BaseTrackedChat | undefined; + + // Try specific implementations for the provider + switch (providerName) { + case 'openai': + trackedChat = undefined; + break; + case 'bedrock': + trackedChat = undefined; + break; + default: + trackedChat = undefined; + } + + // If no specific implementation worked, try LangChain as fallback + if (!trackedChat) { + trackedChat = await this._createLangChainTrackedChat(aiConfig, tracker); + } + + // If LangChain didn't work, try Vercel as fallback + if (!trackedChat) { + // TODO: Return Vercel AI SDK implementation when available + // trackedChat = this._createVercelTrackedChat(aiConfig, tracker); + } + + return trackedChat; + } + + /** + * Create a LangChain TrackedChat instance if the LangChain provider is available. + */ + private static async _createLangChainTrackedChat( + aiConfig: LDAIConfig, + tracker: LDAIConfigTracker, + ): Promise { + try { + // Try to dynamically import the LangChain provider + // This will work if @launchdarkly/server-sdk-ai-langchain is installed + // eslint-disable-next-line @typescript-eslint/no-require-imports, import/no-extraneous-dependencies + const { LangChainTrackedChat, LangChainProvider } = require('@launchdarkly/server-sdk-ai-langchain'); + + // Build the LLM during factory creation to catch errors early + const llm = await LangChainProvider.createLangChainModel(aiConfig); + return new LangChainTrackedChat(aiConfig, tracker, llm); + } catch (error) { + // If the LangChain provider is not available or LLM creation fails, return undefined + return undefined; + } + } +} diff --git a/packages/sdk/server-ai/src/api/chat/index.ts b/packages/sdk/server-ai/src/api/chat/index.ts new file mode 100644 index 0000000000..b24488a858 --- /dev/null +++ b/packages/sdk/server-ai/src/api/chat/index.ts @@ -0,0 +1,3 @@ +export * from './BaseTrackedChat'; +export * from './TrackedChat'; +export * from './TrackedChatFactory'; diff --git a/packages/sdk/server-ai/src/api/index.ts b/packages/sdk/server-ai/src/api/index.ts index cd6333b027..1581770896 100644 --- a/packages/sdk/server-ai/src/api/index.ts +++ b/packages/sdk/server-ai/src/api/index.ts @@ -1,4 +1,5 @@ export * from './config'; export * from './agents'; +export * from './chat'; export * from './metrics'; export * from './LDAIClient';