Skip to content

Commit 88d2cb0

Browse files
authored
Merge branch 'main' into jb/sdk-1522/structed-model-provider-langchain
2 parents 24d7520 + 6ecd9ab commit 88d2cb0

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+2560
-748
lines changed

.release-please-manifest.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
{
2-
"packages/ai-providers/server-ai-langchain": "0.1.3",
3-
"packages/ai-providers/server-ai-openai": "0.1.2",
4-
"packages/ai-providers/server-ai-vercel": "0.1.2",
2+
"packages/ai-providers/server-ai-langchain": "0.2.0",
3+
"packages/ai-providers/server-ai-openai": "0.2.0",
4+
"packages/ai-providers/server-ai-vercel": "0.2.0",
55
"packages/sdk/akamai-base": "3.0.10",
66
"packages/sdk/akamai-edgekv": "1.4.12",
77
"packages/sdk/browser": "0.8.1",
88
"packages/sdk/cloudflare": "2.7.10",
99
"packages/sdk/combined-browser": "0.0.0",
1010
"packages/sdk/fastly": "0.2.1",
1111
"packages/sdk/react-native": "10.12.0",
12-
"packages/sdk/server-ai": "0.12.3",
12+
"packages/sdk/server-ai": "0.13.0",
1313
"packages/sdk/server-node": "9.10.2",
1414
"packages/sdk/vercel": "1.3.34",
1515
"packages/shared/akamai-edgeworker-sdk": "2.0.10",

packages/ai-providers/server-ai-langchain/CHANGELOG.md

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,19 @@
11
# Changelog
22

3+
## [0.2.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-langchain-v0.1.3...server-sdk-ai-langchain-v0.2.0) (2025-11-04)
4+
5+
6+
### Features
7+
8+
* Renamed createAIMetrics to getAIMetricsFromResponse ([#977](https://github.com/launchdarkly/js-core/issues/977)) ([05b4667](https://github.com/launchdarkly/js-core/commit/05b4667fe6385672f89c84d49302ce40f99e28d5))
9+
10+
11+
### Dependencies
12+
13+
* The following workspace dependencies were updated
14+
* devDependencies
15+
* @launchdarkly/server-sdk-ai bumped from ^0.12.3 to ^0.13.0
16+
317
## [0.1.3](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-langchain-v0.1.2...server-sdk-ai-langchain-v0.1.3) (2025-10-24)
418

519

packages/ai-providers/server-ai-langchain/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ const allMessages = [...LangChainProvider.convertMessagesToLangChain(configMessa
9494

9595
// Track the model call with LaunchDarkly tracking
9696
const response = await aiConfig.tracker.trackMetricsOf(
97-
(result) => LangChainProvider.createAIMetrics(result),
97+
LangChainProvider.getAIMetricsFromResponse,
9898
() => llm.invoke(allMessages)
9999
);
100100

packages/ai-providers/server-ai-langchain/__tests__/LangChainProvider.test.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ describe('LangChainProvider', () => {
7373
});
7474
});
7575

76-
describe('createAIMetrics', () => {
76+
describe('getAIMetricsFromResponse', () => {
7777
it('creates metrics with success=true and token usage', () => {
7878
const mockResponse = new AIMessage('Test response');
7979
mockResponse.response_metadata = {
@@ -84,7 +84,7 @@ describe('LangChainProvider', () => {
8484
},
8585
};
8686

87-
const result = LangChainProvider.createAIMetrics(mockResponse);
87+
const result = LangChainProvider.getAIMetricsFromResponse(mockResponse);
8888

8989
expect(result).toEqual({
9090
success: true,
@@ -99,7 +99,7 @@ describe('LangChainProvider', () => {
9999
it('creates metrics with success=true and no usage when metadata is missing', () => {
100100
const mockResponse = new AIMessage('Test response');
101101

102-
const result = LangChainProvider.createAIMetrics(mockResponse);
102+
const result = LangChainProvider.getAIMetricsFromResponse(mockResponse);
103103

104104
expect(result).toEqual({
105105
success: true,

packages/ai-providers/server-ai-langchain/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@launchdarkly/server-sdk-ai-langchain",
3-
"version": "0.1.3",
3+
"version": "0.2.0",
44
"description": "LaunchDarkly AI SDK LangChain Provider for Server-Side JavaScript",
55
"homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/ai-providers/server-ai-langchain",
66
"repository": {
@@ -28,7 +28,7 @@
2828
"license": "Apache-2.0",
2929
"devDependencies": {
3030
"@langchain/core": "^0.3.0",
31-
"@launchdarkly/server-sdk-ai": "^0.12.0",
31+
"@launchdarkly/server-sdk-ai": "^0.13.0",
3232
"@trivago/prettier-plugin-sort-imports": "^4.1.1",
3333
"@types/jest": "^29.5.3",
3434
"@typescript-eslint/eslint-plugin": "^6.20.0",

packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ export class LangChainProvider extends AIProvider {
5353
const response: AIMessage = await this._llm.invoke(langchainMessages);
5454

5555
// Generate metrics early (assumes success by default)
56-
const metrics = LangChainProvider.createAIMetrics(response);
56+
const metrics = LangChainProvider.getAIMetricsFromResponse(response);
5757

5858
// Extract text content from the response
5959
let content: string = '';
@@ -170,25 +170,27 @@ export class LangChainProvider extends AIProvider {
170170
}
171171

172172
/**
173-
* Create AI metrics information from a LangChain provider response.
173+
* Get AI metrics from a LangChain provider response.
174174
* This method extracts token usage information and success status from LangChain responses
175175
* and returns a LaunchDarkly AIMetrics object.
176176
*
177-
* @param langChainResponse The response from the LangChain model
177+
* @param response The response from the LangChain model
178+
* @returns LDAIMetrics with success status and token usage
179+
*
178180
* @example
179181
* ```typescript
180182
* // Use with tracker.trackMetricsOf for automatic tracking
181183
* const response = await tracker.trackMetricsOf(
182-
* (result: AIMessage) => LangChainProvider.createAIMetrics(result),
184+
* LangChainProvider.getAIMetricsFromResponse,
183185
* () => llm.invoke(messages)
184186
* );
185187
* ```
186188
*/
187-
static createAIMetrics(langChainResponse: AIMessage): LDAIMetrics {
189+
static getAIMetricsFromResponse(response: AIMessage): LDAIMetrics {
188190
// Extract token usage if available
189191
let usage: LDTokenUsage | undefined;
190-
if (langChainResponse?.response_metadata?.tokenUsage) {
191-
const { tokenUsage } = langChainResponse.response_metadata;
192+
if (response?.response_metadata?.tokenUsage) {
193+
const { tokenUsage } = response.response_metadata;
192194
usage = {
193195
total: tokenUsage.totalTokens || 0,
194196
input: tokenUsage.promptTokens || 0,
@@ -203,6 +205,19 @@ export class LangChainProvider extends AIProvider {
203205
};
204206
}
205207

208+
/**
209+
* Create AI metrics information from a LangChain provider response.
210+
* This method extracts token usage information and success status from LangChain responses
211+
* and returns a LaunchDarkly AIMetrics object.
212+
*
213+
* @deprecated Use `getAIMetricsFromResponse()` instead.
214+
* @param langChainResponse The response from the LangChain model
215+
* @returns LDAIMetrics with success status and token usage
216+
*/
217+
static createAIMetrics(langChainResponse: AIMessage): LDAIMetrics {
218+
return LangChainProvider.getAIMetricsFromResponse(langChainResponse);
219+
}
220+
206221
/**
207222
* Convert LaunchDarkly messages to LangChain messages.
208223
* This helper method enables developers to work directly with LangChain message types

packages/ai-providers/server-ai-openai/CHANGELOG.md

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,19 @@
11
# Changelog
22

3+
## [0.2.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-openai-v0.1.2...server-sdk-ai-openai-v0.2.0) (2025-11-04)
4+
5+
6+
### Features
7+
8+
* Renamed createAIMetrics to getAIMetricsFromResponse ([#977](https://github.com/launchdarkly/js-core/issues/977)) ([05b4667](https://github.com/launchdarkly/js-core/commit/05b4667fe6385672f89c84d49302ce40f99e28d5))
9+
10+
11+
### Dependencies
12+
13+
* The following workspace dependencies were updated
14+
* devDependencies
15+
* @launchdarkly/server-sdk-ai bumped from ^0.12.3 to ^0.13.0
16+
317
## [0.1.2](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-openai-v0.1.1...server-sdk-ai-openai-v0.1.2) (2025-10-24)
418

519

packages/ai-providers/server-ai-openai/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ const allMessages = [...configMessages, userMessage];
7878

7979
// Track the model call with LaunchDarkly tracking
8080
const response = await aiConfig.tracker.trackMetricsOf(
81-
(result) => OpenAIProvider.createAIMetrics(result),
81+
OpenAIProvider.getAIMetricsFromResponse,
8282
() => client.chat.completions.create({
8383
model: 'gpt-4',
8484
messages: allMessages,

packages/ai-providers/server-ai-openai/__tests__/OpenAIProvider.test.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ describe('OpenAIProvider', () => {
2525
provider = new OpenAIProvider(mockOpenAI, 'gpt-3.5-turbo', {});
2626
});
2727

28-
describe('createAIMetrics', () => {
28+
describe('getAIMetricsFromResponse', () => {
2929
it('creates metrics with success=true and token usage', () => {
3030
const mockResponse = {
3131
usage: {
@@ -35,7 +35,7 @@ describe('OpenAIProvider', () => {
3535
},
3636
};
3737

38-
const result = OpenAIProvider.createAIMetrics(mockResponse);
38+
const result = OpenAIProvider.getAIMetricsFromResponse(mockResponse);
3939

4040
expect(result).toEqual({
4141
success: true,
@@ -50,7 +50,7 @@ describe('OpenAIProvider', () => {
5050
it('creates metrics with success=true and no usage when usage is missing', () => {
5151
const mockResponse = {};
5252

53-
const result = OpenAIProvider.createAIMetrics(mockResponse);
53+
const result = OpenAIProvider.getAIMetricsFromResponse(mockResponse);
5454

5555
expect(result).toEqual({
5656
success: true,
@@ -66,7 +66,7 @@ describe('OpenAIProvider', () => {
6666
},
6767
};
6868

69-
const result = OpenAIProvider.createAIMetrics(mockResponse);
69+
const result = OpenAIProvider.getAIMetricsFromResponse(mockResponse);
7070

7171
expect(result).toEqual({
7272
success: true,

packages/ai-providers/server-ai-openai/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@launchdarkly/server-sdk-ai-openai",
3-
"version": "0.1.2",
3+
"version": "0.2.0",
44
"description": "LaunchDarkly AI SDK OpenAI Provider for Server-Side JavaScript",
55
"homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/ai-providers/server-ai-openai",
66
"repository": {
@@ -28,7 +28,7 @@
2828
"license": "Apache-2.0",
2929
"devDependencies": {
3030
"@launchdarkly/js-server-sdk-common": "2.16.2",
31-
"@launchdarkly/server-sdk-ai": "^0.12.3",
31+
"@launchdarkly/server-sdk-ai": "^0.13.0",
3232
"@trivago/prettier-plugin-sort-imports": "^4.1.1",
3333
"@types/jest": "^29.5.3",
3434
"@typescript-eslint/eslint-plugin": "^6.20.0",

0 commit comments

Comments
 (0)