Skip to content

Commit 50aef7f

Browse files
committed
feat: Add support for TrackedChats in the AI SDK
1 parent f0b9b5d commit 50aef7f

File tree

7 files changed

+281
-0
lines changed

7 files changed

+281
-0
lines changed

packages/sdk/server-ai/src/LDAIClientImpl.ts

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import * as Mustache from 'mustache';
33
import { LDContext } from '@launchdarkly/js-server-sdk-common';
44

55
import { LDAIAgent, LDAIAgentConfig, LDAIAgentDefaults } from './api/agents';
6+
import { BaseTrackedChat, TrackedChatFactory } from './api/chat';
67
import {
78
LDAIConfig,
89
LDAIConfigTracker,
@@ -222,4 +223,26 @@ export class LDAIClientImpl implements LDAIClient {
222223

223224
return agents;
224225
}
226+
227+
async initChat(
228+
key: string,
229+
context: LDContext,
230+
defaultValue: LDAIDefaults,
231+
variables?: Record<string, unknown>,
232+
): Promise<BaseTrackedChat | undefined> {
233+
// Track chat initialization
234+
this._ldClient.track('$ld:ai:config:function:initChat', context, key, 1);
235+
236+
const config = await this.config(key, context, defaultValue, variables);
237+
238+
// Return null if the configuration is disabled
239+
if (!config.enabled) {
240+
return undefined;
241+
}
242+
243+
// Create the TrackedChat instance based on the provider
244+
const chat = await TrackedChatFactory.create(config, config.tracker);
245+
246+
return chat;
247+
}
225248
}

packages/sdk/server-ai/src/api/LDAIClient.ts

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import { LDContext } from '@launchdarkly/js-server-sdk-common';
22

33
import { LDAIAgent, LDAIAgentConfig, LDAIAgentDefaults } from './agents';
4+
import { BaseTrackedChat } from './chat';
45
import { LDAIConfig, LDAIDefaults } from './config/LDAIConfig';
56

67
/**
@@ -143,4 +144,47 @@ export interface LDAIClient {
143144
agentConfigs: T,
144145
context: LDContext,
145146
): Promise<Record<T[number]['key'], LDAIAgent>>;
147+
148+
/**
149+
* Initializes and returns a new TrackedChat instance for chat interactions.
150+
* This method serves as the primary entry point for creating TrackedChat instances from configuration.
151+
*
152+
* @param key The key identifying the AI chat configuration to use.
153+
* @param context The standard LDContext used when evaluating flags.
154+
* @param defaultValue A default value representing a standard AI chat config result.
155+
* @param variables Dictionary of values for instruction interpolation.
156+
* @returns A promise that resolves to the TrackedChat instance, or null if the configuration is disabled.
157+
*
158+
* @example
159+
* ```
160+
* const key = "customer_support_chat";
161+
* const context = {...};
162+
* const defaultValue = {
163+
* config: {
164+
* enabled: false,
165+
* model: { name: "gpt-4" },
166+
* messages: [
167+
* { role: "system", content: "You are a helpful customer support agent." }
168+
* ]
169+
* }
170+
* };
171+
* const variables = { customerName: 'John' };
172+
*
173+
* const chat = await client.initChat(key, context, defaultValue, variables);
174+
* if (chat) {
175+
* const response = await chat.invoke("I need help with my order");
176+
* console.log(response.message.content);
177+
*
178+
* // Access configuration and tracker if needed
179+
* console.log('Model:', chat.getConfig().model?.name);
180+
* chat.getTracker().trackSuccess();
181+
* }
182+
* ```
183+
*/
184+
initChat(
185+
key: string,
186+
context: LDContext,
187+
defaultValue: LDAIDefaults,
188+
variables?: Record<string, unknown>,
189+
): Promise<BaseTrackedChat | undefined>;
146190
}
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
import { LDAIConfig, LDMessage } from '../config/LDAIConfig';
2+
import { LDAIConfigTracker } from '../config/LDAIConfigTracker';
3+
import { ChatResponse } from './TrackedChat';
4+
5+
/**
6+
* Base implementation of TrackedChat that provides common functionality.
7+
* This can be extended by provider-specific implementations.
8+
*/
9+
export abstract class BaseTrackedChat {
10+
protected messages: LDMessage[];
11+
12+
constructor(
13+
protected readonly aiConfig: LDAIConfig,
14+
protected readonly tracker: LDAIConfigTracker,
15+
) {
16+
this.messages = aiConfig.messages || [];
17+
}
18+
19+
/**
20+
* Invoke the chat model with a prompt string.
21+
* This method handles conversation management and tracking, delegating to the provider's invokeModel method.
22+
*/
23+
async invoke(prompt: string): Promise<ChatResponse> {
24+
// Convert prompt string to LDMessage with role 'user' and add to conversation history
25+
const userMessage: LDMessage = {
26+
role: 'user',
27+
content: prompt,
28+
};
29+
this.messages.push(userMessage);
30+
31+
// Delegate to provider-specific implementation with tracking
32+
const response = await this.trackMetricsOf(() => this.invokeModel(this.messages));
33+
34+
// Add the assistant response to the conversation history
35+
this.messages.push(response.message);
36+
37+
return response;
38+
}
39+
40+
/**
41+
* Abstract method that providers must implement to handle the actual model invocation.
42+
* This method should convert messages to provider format, invoke the model, and return a ChatResponse.
43+
*/
44+
protected abstract invokeModel(messages: LDMessage[]): Promise<ChatResponse>;
45+
46+
/**
47+
* Track metrics for a ChatResponse execution.
48+
* This method handles duration tracking, token usage tracking, and success/error tracking.
49+
*/
50+
protected async trackMetricsOf(callable: () => Promise<ChatResponse>): Promise<ChatResponse> {
51+
return this.tracker.trackDurationOf(async () => {
52+
try {
53+
const result = await callable();
54+
55+
// Track token usage if available
56+
if (result.usage) {
57+
this.tracker.trackTokens(result.usage);
58+
}
59+
60+
this.tracker.trackSuccess();
61+
return result;
62+
} catch (error) {
63+
this.tracker.trackError();
64+
throw error;
65+
}
66+
});
67+
}
68+
69+
/**
70+
* Get the underlying AI configuration used to initialize this TrackedChat.
71+
*/
72+
getConfig(): LDAIConfig {
73+
return this.aiConfig;
74+
}
75+
76+
/**
77+
* Get the underlying AI configuration tracker used to initialize this TrackedChat.
78+
*/
79+
getTracker(): LDAIConfigTracker {
80+
return this.tracker;
81+
}
82+
}
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import { LDAIConfig, LDMessage } from '../config/LDAIConfig';
2+
import { LDAIConfigTracker } from '../config/LDAIConfigTracker';
3+
import { LDTokenUsage } from '../metrics/LDTokenUsage';
4+
5+
/**
6+
* Chat response structure.
7+
*/
8+
export interface ChatResponse {
9+
/**
10+
* The response message from the AI.
11+
*/
12+
message: LDMessage;
13+
14+
/**
15+
* Token usage information.
16+
*/
17+
usage?: LDTokenUsage;
18+
19+
/**
20+
* Additional metadata from the provider.
21+
*/
22+
metadata?: Record<string, unknown>;
23+
}
24+
25+
/**
26+
* Interface for provider-specific tracked chat implementations.
27+
*/
28+
export interface ProviderTrackedChat {
29+
/**
30+
* Invoke the chat model with the provided messages.
31+
* This method provides a consistent interface for chat model execution while integrating
32+
* LaunchDarkly-specific functionality.
33+
*
34+
* @param prompt A prompt string that will be converted to a user message and added to the conversation history.
35+
* @returns A promise that resolves to the chat response.
36+
*/
37+
invoke(prompt: string): Promise<ChatResponse>;
38+
39+
/**
40+
* Get the underlying AI configuration used to initialize this TrackedChat.
41+
*
42+
* @returns The AI configuration.
43+
*/
44+
getConfig(): LDAIConfig;
45+
46+
/**
47+
* Get the underlying AI configuration tracker used to initialize this TrackedChat.
48+
*
49+
* @returns The AI configuration tracker.
50+
*/
51+
getTracker(): LDAIConfigTracker;
52+
53+
/**
54+
* Get the underlying provider-specific chat model instance.
55+
* This provides direct access to the underlying provider chat model for advanced use cases.
56+
*
57+
* @returns The configured provider-specific chat model instance.
58+
*/
59+
getChatModel(): unknown;
60+
}
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
import { LDAIConfig } from '../config/LDAIConfig';
2+
import { LDAIConfigTracker } from '../config/LDAIConfigTracker';
3+
import { BaseTrackedChat } from './BaseTrackedChat';
4+
5+
/**
6+
* Factory for creating TrackedChat instances based on the provider configuration.
7+
*/
8+
export class TrackedChatFactory {
9+
/**
10+
* Create a TrackedChat instance based on the AI configuration.
11+
* This method attempts to load provider-specific implementations dynamically.
12+
* Returns undefined if the provider is not supported.
13+
*/
14+
static async create(
15+
aiConfig: LDAIConfig,
16+
tracker: LDAIConfigTracker,
17+
): Promise<BaseTrackedChat | undefined> {
18+
const providerName = aiConfig.provider?.name?.toLowerCase();
19+
let trackedChat: BaseTrackedChat | undefined;
20+
21+
// Try specific implementations for the provider
22+
switch (providerName) {
23+
case 'openai':
24+
trackedChat = undefined;
25+
break;
26+
case 'bedrock':
27+
trackedChat = undefined;
28+
break;
29+
default:
30+
trackedChat = undefined;
31+
}
32+
33+
// If no specific implementation worked, try LangChain as fallback
34+
if (!trackedChat) {
35+
trackedChat = await this._createLangChainTrackedChat(aiConfig, tracker);
36+
}
37+
38+
// If LangChain didn't work, try Vercel as fallback
39+
if (!trackedChat) {
40+
// TODO: Return Vercel AI SDK implementation when available
41+
// trackedChat = this._createVercelTrackedChat(aiConfig, tracker);
42+
}
43+
44+
return trackedChat;
45+
}
46+
47+
/**
48+
* Create a LangChain TrackedChat instance if the LangChain provider is available.
49+
*/
50+
private static async _createLangChainTrackedChat(
51+
aiConfig: LDAIConfig,
52+
tracker: LDAIConfigTracker,
53+
): Promise<BaseTrackedChat | undefined> {
54+
try {
55+
// Try to dynamically import the LangChain provider
56+
// This will work if @launchdarkly/server-sdk-ai-langchain is installed
57+
// eslint-disable-next-line @typescript-eslint/no-require-imports, import/no-extraneous-dependencies
58+
const { LangChainTrackedChat, LangChainProvider } = require('@launchdarkly/server-sdk-ai-langchain');
59+
60+
// Build the LLM during factory creation to catch errors early
61+
const llm = await LangChainProvider.createLangChainModel(aiConfig);
62+
return new LangChainTrackedChat(aiConfig, tracker, llm);
63+
} catch (error) {
64+
// If the LangChain provider is not available or LLM creation fails, return undefined
65+
return undefined;
66+
}
67+
}
68+
}
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
export * from './BaseTrackedChat';
2+
export * from './TrackedChat';
3+
export * from './TrackedChatFactory';
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
export * from './config';
22
export * from './agents';
3+
export * from './chat';
34
export * from './metrics';
45
export * from './LDAIClient';

0 commit comments

Comments
 (0)