Skip to content

Commit fcad822

Browse files
committed
Merge branch 'dob/modelConfig' of github.com:launchdarkly/js-server-sdk-private into dob/modelConfig
2 parents f95775b + 5ea8a83 commit fcad822

18 files changed

+261
-164
lines changed

packages/sdk/ai/package.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@
55
"main": "dist/index.js",
66
"types": "dist/index.d.ts",
77
"scripts": {
8-
"build": "tsc",
9-
"test": "jest",
10-
"lint": "eslint . --ext .ts"
8+
"build": "npx tsc",
9+
"lint": "npx eslint . --ext .ts",
10+
"lint:fix": "yarn run lint --fix"
1111
},
1212
"keywords": [
1313
"launchdarkly",

packages/sdk/ai/src/LDAIConfigTracker.ts

Lines changed: 0 additions & 77 deletions
This file was deleted.
Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
import { LDClient, LDContext } from '@launchdarkly/node-server-sdk';
2+
3+
import { LDAIConfigTracker } from './api/config';
4+
import { createBedrockTokenUsage, FeedbackKind, TokenUsage } from './api/metrics';
5+
import { createOpenAiUsage } from './api/metrics/OpenAiUsage';
6+
7+
export class LDAIConfigTrackerImpl implements LDAIConfigTracker {
8+
private _ldClient: LDClient;
9+
private _variationId: string;
10+
private _configKey: string;
11+
private _context: LDContext;
12+
13+
constructor(ldClient: LDClient, configKey: string, versionId: string, context: LDContext) {
14+
this._ldClient = ldClient;
15+
this._variationId = versionId;
16+
this._configKey = configKey;
17+
this._context = context;
18+
}
19+
20+
private _getTrackData(): { variationId: string; configKey: string } {
21+
return {
22+
variationId: this._variationId,
23+
configKey: this._configKey,
24+
};
25+
}
26+
27+
trackDuration(duration: number): void {
28+
this._ldClient.track('$ld:ai:duration:total', this._context, this._getTrackData(), duration);
29+
}
30+
31+
async trackDurationOf(func: (...args: any[]) => Promise<any>, ...args: any[]): Promise<any> {
32+
const startTime = Date.now();
33+
const result = await func(...args);
34+
const endTime = Date.now();
35+
const duration = endTime - startTime; // duration in milliseconds
36+
this.trackDuration(duration);
37+
return result;
38+
}
39+
40+
trackError(error: number): void {
41+
this._ldClient.track('$ld:ai:error', this._context, this._getTrackData(), error);
42+
}
43+
44+
trackFeedback(feedback: { kind: FeedbackKind }): void {
45+
if (feedback.kind === FeedbackKind.Positive) {
46+
this._ldClient.track('$ld:ai:feedback:user:positive', this._context, this._getTrackData(), 1);
47+
} else if (feedback.kind === FeedbackKind.Negative) {
48+
this._ldClient.track('$ld:ai:feedback:user:negative', this._context, this._getTrackData(), 1);
49+
}
50+
}
51+
52+
trackGeneration(generation: number): void {
53+
this._ldClient.track('$ld:ai:generation', this._context, this._getTrackData(), generation);
54+
}
55+
56+
async trackOpenAI(func: (...args: any[]) => Promise<any>, ...args: any[]): Promise<any> {
57+
const result = await this.trackDurationOf(func, ...args);
58+
this.trackGeneration(1);
59+
if (result.usage) {
60+
this.trackTokens(createOpenAiUsage(result.usage));
61+
}
62+
return result;
63+
}
64+
65+
async trackBedrockConverse(res: {
66+
$metadata?: { httpStatusCode: number };
67+
metrics?: { latencyMs: number };
68+
usage?: {
69+
inputTokens: number;
70+
outputTokens: number;
71+
totalTokens: number;
72+
};
73+
}): Promise<any> {
74+
if (res.$metadata?.httpStatusCode === 200) {
75+
this.trackGeneration(1);
76+
} else if (res.$metadata?.httpStatusCode && res.$metadata.httpStatusCode >= 400) {
77+
this.trackError(res.$metadata.httpStatusCode);
78+
}
79+
if (res.metrics) {
80+
this.trackDuration(res.metrics.latencyMs);
81+
}
82+
if (res.usage) {
83+
this.trackTokens(createBedrockTokenUsage(res.usage));
84+
}
85+
return res;
86+
}
87+
88+
trackTokens(tokens: TokenUsage): void {
89+
const trackData = this._getTrackData();
90+
if (tokens.total > 0) {
91+
this._ldClient.track('$ld:ai:tokens:total', this._context, trackData, tokens.total);
92+
}
93+
if (tokens.input > 0) {
94+
this._ldClient.track('$ld:ai:tokens:input', this._context, trackData, tokens.input);
95+
}
96+
if (tokens.output > 0) {
97+
this._ldClient.track('$ld:ai:tokens:output', this._context, trackData, tokens.output);
98+
}
99+
}
100+
}

packages/sdk/ai/src/api/config/LDAIConfig.ts

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,17 @@ import { LDAIConfigTracker } from './LDAIConfigTracker';
55
*/
66
export interface LDAIConfig {
77
/**
8-
* The result of the AI Config evaluation.
8+
* The result of the AI Config customization.
99
*/
1010
config: unknown;
1111

1212
/**
13-
* A tracker which can be used to generate analytics for the migration.
13+
* A tracker which can be used to generate analytics.
1414
*/
1515
tracker: LDAIConfigTracker;
16+
17+
/**
18+
* Whether the configuration is not found.
19+
*/
20+
noConfiguration: boolean;
1621
}
Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
1-
import { BedrockTokenUsage, FeedbackKind, TokenUsage, UnderscoreTokenUsage } from '../metrics';
1+
import { FeedbackKind, TokenUsage } from '../metrics';
22

33
export interface LDAIConfigTracker {
44
trackDuration: (duration: number) => void;
5-
trackTokens: (tokens: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage) => void;
5+
trackTokens: (tokens: TokenUsage) => void;
66
trackError: (error: number) => void;
77
trackGeneration: (generation: number) => void;
88
trackFeedback: (feedback: { kind: FeedbackKind }) => void;
9+
trackDurationOf: (func: (...args: any[]) => Promise<any>, ...args: any[]) => Promise<any>;
10+
trackOpenAI: (func: (...args: any[]) => Promise<any>, ...args: any[]) => any;
11+
trackBedrockConverse: (res: any) => any;
912
}
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
export * from './LDAIConfig';
2-
export * from './LDAIConfigTracker';
2+
export { LDAIConfigTracker } from './LDAIConfigTracker';

packages/sdk/ai/src/api/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
export * from './config';
2+
export * from './metrics';
Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
1-
export interface BedrockTokenUsage {
1+
import { TokenUsage } from './TokenUsage';
2+
3+
export function createBedrockTokenUsage(data: {
4+
totalTokens: number;
25
inputTokens: number;
36
outputTokens: number;
4-
totalTokens: number;
7+
}): TokenUsage {
8+
return {
9+
total: data.totalTokens || 0,
10+
input: data.inputTokens || 0,
11+
output: data.outputTokens || 0,
12+
};
513
}
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
export class OpenAITokenUsage {
2+
total_tokens: number;
3+
prompt_tokens: number;
4+
completion_tokens: number;
5+
6+
constructor(data: any) {
7+
this.total_tokens = data.total_tokens;
8+
this.prompt_tokens = data.prompt_tokens;
9+
this.completion_tokens = data.completion_tokens;
10+
}
11+
12+
toMetrics() {
13+
return {
14+
total: this.total_tokens,
15+
input: this.prompt_tokens,
16+
output: this.completion_tokens,
17+
};
18+
}
19+
}
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import { TokenUsage } from './TokenUsage';
2+
3+
export function createOpenAiUsage(data: any): TokenUsage {
4+
return {
5+
total: data.total_tokens ?? 0,
6+
input: data.prompt_token ?? 0,
7+
output: data.completion_token ?? 0,
8+
};
9+
}

0 commit comments

Comments
 (0)