Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions packages/sdk/ai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
"main": "dist/index.js",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think the package name should follow our normal conventions, likely @launchdarkly/node-server-sdk-ai. I can see this might require some discussion.

"types": "dist/index.d.ts",
"scripts": {
"build": "tsc",
"test": "jest",
"lint": "eslint . --ext .ts"
"build": "npx tsc",
"lint": "npx eslint . --ext .ts",
"lint:fix": "yarn run lint --fix"
},
"keywords": [
"launchdarkly",
Expand Down
92 changes: 63 additions & 29 deletions packages/sdk/ai/src/LDAIConfigTracker.ts
Original file line number Diff line number Diff line change
@@ -1,36 +1,88 @@
import { LDClient, LDContext } from '@launchdarkly/node-server-sdk';

import { BedrockTokenUsage, FeedbackKind, TokenUsage, UnderscoreTokenUsage } from './api/metrics';
import { usageToTokenMetrics } from './trackUtils';
import {
BedrockTokenUsage,
FeedbackKind,
OpenAITokenUsage,
TokenUsage,
UnderscoreTokenUsage,
} from './api/metrics';

export class LDAIConfigTracker {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I need to check the base PR, but this should be implementing an interface, and the interface is what the customer is aware of.

private ldClient: LDClient;
private configKey: string;
private variationId: string;
private configKey: string;
private context: LDContext;

constructor(ldClient: LDClient, configKey: string, variationId: string, context: LDContext) {
this.ldClient = ldClient;
this.configKey = configKey;
this.variationId = variationId;
this.configKey = configKey;
this.context = context;
}

getTrackData() {
private getTrackData() {
return {
configKey: this.configKey,
variationId: this.variationId,
configKey: this.configKey,
};
}

trackDuration(duration: number): void {
this.ldClient.track('$ld:ai:duration:total', this.context, this.variationId, duration);
this.ldClient.track('$ld:ai:duration:total', this.context, this.getTrackData(), duration);
}

async trackDurationOf(func: Function, ...args: any[]): Promise<any> {
const startTime = Date.now();
const result = await func(...args);
const endTime = Date.now();
const duration = endTime - startTime; // duration in milliseconds
this.trackDuration(duration);
return result;
}

trackError(error: number): void {
this.ldClient.track('$ld:ai:error', this.context, this.getTrackData(), error);
}

trackFeedback(feedback: { kind: FeedbackKind }): void {
if (feedback.kind === FeedbackKind.Positive) {
this.ldClient.track('$ld:ai:feedback:user:positive', this.context, this.getTrackData(), 1);
} else if (feedback.kind === FeedbackKind.Negative) {
this.ldClient.track('$ld:ai:feedback:user:negative', this.context, this.getTrackData(), 1);
}
}

trackGeneration(generation: number): void {
this.ldClient.track('$ld:ai:generation', this.context, this.getTrackData(), generation);
}

trackTokens(tokens: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage) {
console.log('tracking LLM tokens', tokens);
const tokenMetrics = usageToTokenMetrics(tokens);
console.log('token metrics', tokenMetrics);
async trackOpenAI(func: Function, ...args: any[]): Promise<any> {
const result = await this.trackDurationOf(func, ...args);
this.trackGeneration(1);
if (result.usage) {
this.trackTokens(new OpenAITokenUsage(result.usage));
}
return result;
}

async trackBedrockConverse(res: any): Promise<any> {
if (res.$metadata?.httpStatusCode === 200) {
this.trackGeneration(1);
} else if (res.$metadata?.httpStatusCode >= 400) {
this.trackError(res.$metadata.httpStatusCode);
}
if (res.metrics) {
this.trackDuration(res.metrics.latencyMs);
}
if (res.usage) {
this.trackTokens(new BedrockTokenUsage(res.usage));
}
return res;
}

trackTokens(tokens: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage): void {
const tokenMetrics = tokens.toMetrics();
if (tokenMetrics.total > 0) {
this.ldClient.track(
'$ld:ai:tokens:total',
Expand All @@ -40,7 +92,6 @@ export class LDAIConfigTracker {
);
}
if (tokenMetrics.input > 0) {
console.log('tracking input tokens', tokenMetrics.input);
this.ldClient.track(
'$ld:ai:tokens:input',
this.context,
Expand All @@ -49,7 +100,6 @@ export class LDAIConfigTracker {
);
}
if (tokenMetrics.output > 0) {
console.log('tracking output tokens', tokenMetrics.output);
this.ldClient.track(
'$ld:ai:tokens:output',
this.context,
Expand All @@ -58,20 +108,4 @@ export class LDAIConfigTracker {
);
}
}

trackError(error: number) {
this.ldClient.track('$ld:ai:error', this.context, this.getTrackData(), error);
}

trackGeneration(generation: number) {
this.ldClient.track('$ld:ai:generation', this.context, this.getTrackData(), generation);
}

trackFeedback(feedback: { kind: FeedbackKind }) {
if (feedback.kind === FeedbackKind.Positive) {
this.ldClient.track('$ld:ai:feedback:user:positive', this.context, this.getTrackData(), 1);
} else if (feedback.kind === FeedbackKind.Negative) {
this.ldClient.track('$ld:ai:feedback:user:negative', this.context, this.getTrackData(), 1);
}
}
}
9 changes: 7 additions & 2 deletions packages/sdk/ai/src/api/config/LDAIConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,17 @@ import { LDAIConfigTracker } from './LDAIConfigTracker';
*/
export interface LDAIConfig {
/**
* The result of the AI Config evaluation.
* The result of the AI Config customization.
*/
config: unknown;

/**
* A tracker which can be used to generate analytics for the migration.
* A tracker which can be used to generate analytics.
*/
tracker: LDAIConfigTracker;

/**
* Whether the configuration is not found.
*/
noConfiguration: boolean;
}
3 changes: 3 additions & 0 deletions packages/sdk/ai/src/api/config/LDAIConfigTracker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,7 @@ export interface LDAIConfigTracker {
trackError: (error: number) => void;
trackGeneration: (generation: number) => void;
trackFeedback: (feedback: { kind: FeedbackKind }) => void;
trackDurationOf: (func: Function, ...args: any[]) => any;
trackOpenAI: (func: Function, ...args: any[]) => any;
trackBedrockConverse: (res: any) => any;
}
2 changes: 1 addition & 1 deletion packages/sdk/ai/src/api/config/index.ts
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
export * from './LDAIConfig';
export * from './LDAIConfigTracker';
export { LDAIConfigTracker } from './LDAIConfigTracker';
18 changes: 16 additions & 2 deletions packages/sdk/ai/src/api/metrics/BedrockTokenUsage.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
export interface BedrockTokenUsage {
export class BedrockTokenUsage {
totalTokens: number;
inputTokens: number;
outputTokens: number;
totalTokens: number;

constructor(data: any) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

A type of some kind would be good here. Typescript is structural, so it just needs to have the stuff that we need to understand.

this.totalTokens = data.totalTokens || 0;
this.inputTokens = data.inputTokens || 0;
this.outputTokens = data.outputTokens || 0;
}

toMetrics() {
return {
total: this.totalTokens,
input: this.inputTokens,
output: this.outputTokens,
};
}
}
19 changes: 19 additions & 0 deletions packages/sdk/ai/src/api/metrics/OpenAITokenUsage.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
export class OpenAITokenUsage {
total_tokens: number;
prompt_tokens: number;
completion_tokens: number;

constructor(data: any) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same as others.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am going to stop tagging now. But generally we want to avoid any, unless we really need to. Here we know several fields, so we should be able to say we need those.

this.total_tokens = data.total_tokens;
this.prompt_tokens = data.prompt_tokens;
this.completion_tokens = data.completion_tokens;
}

toMetrics() {
return {
total: this.total_tokens,
input: this.prompt_tokens,
output: this.completion_tokens,
};
}
}
22 changes: 18 additions & 4 deletions packages/sdk/ai/src/api/metrics/TokenUsage.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
export interface TokenUsage {
completionTokens?: number;
promptTokens?: number;
totalTokens?: number;
export class TokenUsage {
totalTokens: number;
promptTokens: number;
completionTokens: number;

constructor(data: any) {
this.totalTokens = data.total_tokens || 0;
this.promptTokens = data.prompt_tokens || 0;
this.completionTokens = data.completion_tokens || 0;
}

toMetrics() {
return {
total: this.totalTokens,
input: this.promptTokens,
output: this.completionTokens,
};
}
}
22 changes: 18 additions & 4 deletions packages/sdk/ai/src/api/metrics/UnderscoreTokenUsage.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
export interface UnderscoreTokenUsage {
completion_tokens?: number;
prompt_tokens?: number;
total_tokens?: number;
export class UnderscoreTokenUsage {
total_tokens: number;
prompt_tokens: number;
completion_tokens: number;

constructor(data: any) {
this.total_tokens = data.total_tokens || 0;
this.prompt_tokens = data.prompt_tokens || 0;
this.completion_tokens = data.completion_tokens || 0;
}

toMetrics() {
return {
total: this.total_tokens,
input: this.prompt_tokens,
output: this.completion_tokens,
};
}
}
1 change: 1 addition & 0 deletions packages/sdk/ai/src/api/metrics/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
export * from './BedrockTokenUsage';
export * from './FeedbackKind';
export * from './OpenAITokenUsage';
export * from './TokenMetrics';
export * from './TokenUsage';
export * from './UnderScoreTokenUsage';
12 changes: 7 additions & 5 deletions packages/sdk/ai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ import Mustache from 'mustache';

import { LDClient, LDContext } from '@launchdarkly/node-server-sdk';

import { LDAIConfigTracker } from './LDAIConfigTracker';
import { LDAIConfig } from './api/config';
import { LDAIConfigTracker } from './LDAIConfigTracker';

export class AIClient {
private ldClient: LDClient;
Expand Down Expand Up @@ -41,7 +41,6 @@ export class AIClient {
* const defaultValue = {}};
*
* const result = modelConfig(key, context, defaultValue, variables);
* console.log(result);
* // Output:
* {
* modelId: "gpt-4o",
Expand All @@ -67,7 +66,7 @@ export class AIClient {
defaultValue: string,
variables?: Record<string, unknown>,
): Promise<LDAIConfig> {
const detail = await this.ldClient.variationDetail(key, context, defaultValue);
const detail = await this.ldClient.variation(key, context, defaultValue);

const allVariables = { ldctx: context, ...variables };

Expand All @@ -81,9 +80,11 @@ export class AIClient {
tracker: new LDAIConfigTracker(
this.ldClient,
key,
detail.value["_ldMeta"]["variationId"],
// eslint-disable-next-line @typescript-eslint/dot-notation
detail.value['_ldMeta'].variationId,
context,
),
noConfiguration: Object.keys(detail).length === 0,
};
}
}
Expand All @@ -92,4 +93,5 @@ export function init(ldClient: LDClient): AIClient {
return new AIClient(ldClient);
}

export { LDAIConfigTracker } from './LDAIConfigTracker';
export * from './api/config/LDAIConfigTracker';
export * from './api/metrics';
2 changes: 1 addition & 1 deletion packages/sdk/ai/src/trackUtils.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { BedrockTokenUsage, TokenMetrics, TokenUsage, UnderscoreTokenUsage } from './types';
import { BedrockTokenUsage, TokenMetrics, TokenUsage, UnderscoreTokenUsage } from './api/metrics';

export function usageToTokenMetrics(
usage: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage,
Expand Down
5 changes: 5 additions & 0 deletions packages/sdk/ai/tsconfig.eslint.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"extends": "./tsconfig.json",
"include": ["/**/*.ts"],
"exclude": ["node_modules"]
}
Loading