diff --git a/packages/sdk/ai/package.json b/packages/sdk/ai/package.json index 0106aabea1..bf76911cd6 100644 --- a/packages/sdk/ai/package.json +++ b/packages/sdk/ai/package.json @@ -5,9 +5,9 @@ "main": "dist/index.js", "types": "dist/index.d.ts", "scripts": { - "build": "tsc", - "test": "jest", - "lint": "eslint . --ext .ts" + "build": "npx tsc", + "lint": "npx eslint . --ext .ts", + "lint:fix": "yarn run lint --fix" }, "keywords": [ "launchdarkly", diff --git a/packages/sdk/ai/src/LDAIConfigTracker.ts b/packages/sdk/ai/src/LDAIConfigTracker.ts index 61e6fc97bd..564902bd60 100644 --- a/packages/sdk/ai/src/LDAIConfigTracker.ts +++ b/packages/sdk/ai/src/LDAIConfigTracker.ts @@ -1,36 +1,88 @@ import { LDClient, LDContext } from '@launchdarkly/node-server-sdk'; -import { BedrockTokenUsage, FeedbackKind, TokenUsage, UnderscoreTokenUsage } from './api/metrics'; -import { usageToTokenMetrics } from './trackUtils'; +import { + BedrockTokenUsage, + FeedbackKind, + OpenAITokenUsage, + TokenUsage, + UnderscoreTokenUsage, +} from './api/metrics'; export class LDAIConfigTracker { private ldClient: LDClient; - private configKey: string; private variationId: string; + private configKey: string; private context: LDContext; constructor(ldClient: LDClient, configKey: string, variationId: string, context: LDContext) { this.ldClient = ldClient; - this.configKey = configKey; this.variationId = variationId; + this.configKey = configKey; this.context = context; } - getTrackData() { + private getTrackData() { return { - configKey: this.configKey, variationId: this.variationId, + configKey: this.configKey, }; } trackDuration(duration: number): void { - this.ldClient.track('$ld:ai:duration:total', this.context, this.variationId, duration); + this.ldClient.track('$ld:ai:duration:total', this.context, this.getTrackData(), duration); + } + + async trackDurationOf(func: Function, ...args: any[]): Promise { + const startTime = Date.now(); + const result = await func(...args); + const endTime = Date.now(); + const duration = endTime - startTime; // duration in milliseconds + this.trackDuration(duration); + return result; + } + + trackError(error: number): void { + this.ldClient.track('$ld:ai:error', this.context, this.getTrackData(), error); + } + + trackFeedback(feedback: { kind: FeedbackKind }): void { + if (feedback.kind === FeedbackKind.Positive) { + this.ldClient.track('$ld:ai:feedback:user:positive', this.context, this.getTrackData(), 1); + } else if (feedback.kind === FeedbackKind.Negative) { + this.ldClient.track('$ld:ai:feedback:user:negative', this.context, this.getTrackData(), 1); + } + } + + trackGeneration(generation: number): void { + this.ldClient.track('$ld:ai:generation', this.context, this.getTrackData(), generation); } - trackTokens(tokens: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage) { - console.log('tracking LLM tokens', tokens); - const tokenMetrics = usageToTokenMetrics(tokens); - console.log('token metrics', tokenMetrics); + async trackOpenAI(func: Function, ...args: any[]): Promise { + const result = await this.trackDurationOf(func, ...args); + this.trackGeneration(1); + if (result.usage) { + this.trackTokens(new OpenAITokenUsage(result.usage)); + } + return result; + } + + async trackBedrockConverse(res: any): Promise { + if (res.$metadata?.httpStatusCode === 200) { + this.trackGeneration(1); + } else if (res.$metadata?.httpStatusCode >= 400) { + this.trackError(res.$metadata.httpStatusCode); + } + if (res.metrics) { + this.trackDuration(res.metrics.latencyMs); + } + if (res.usage) { + this.trackTokens(new BedrockTokenUsage(res.usage)); + } + return res; + } + + trackTokens(tokens: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage): void { + const tokenMetrics = tokens.toMetrics(); if (tokenMetrics.total > 0) { this.ldClient.track( '$ld:ai:tokens:total', @@ -40,7 +92,6 @@ export class LDAIConfigTracker { ); } if (tokenMetrics.input > 0) { - console.log('tracking input tokens', tokenMetrics.input); this.ldClient.track( '$ld:ai:tokens:input', this.context, @@ -49,7 +100,6 @@ export class LDAIConfigTracker { ); } if (tokenMetrics.output > 0) { - console.log('tracking output tokens', tokenMetrics.output); this.ldClient.track( '$ld:ai:tokens:output', this.context, @@ -58,20 +108,4 @@ export class LDAIConfigTracker { ); } } - - trackError(error: number) { - this.ldClient.track('$ld:ai:error', this.context, this.getTrackData(), error); - } - - trackGeneration(generation: number) { - this.ldClient.track('$ld:ai:generation', this.context, this.getTrackData(), generation); - } - - trackFeedback(feedback: { kind: FeedbackKind }) { - if (feedback.kind === FeedbackKind.Positive) { - this.ldClient.track('$ld:ai:feedback:user:positive', this.context, this.getTrackData(), 1); - } else if (feedback.kind === FeedbackKind.Negative) { - this.ldClient.track('$ld:ai:feedback:user:negative', this.context, this.getTrackData(), 1); - } - } } diff --git a/packages/sdk/ai/src/api/config/LDAIConfig.ts b/packages/sdk/ai/src/api/config/LDAIConfig.ts index 25b6cb4e1f..f1c8b81b24 100644 --- a/packages/sdk/ai/src/api/config/LDAIConfig.ts +++ b/packages/sdk/ai/src/api/config/LDAIConfig.ts @@ -5,12 +5,17 @@ import { LDAIConfigTracker } from './LDAIConfigTracker'; */ export interface LDAIConfig { /** - * The result of the AI Config evaluation. + * The result of the AI Config customization. */ config: unknown; /** - * A tracker which can be used to generate analytics for the migration. + * A tracker which can be used to generate analytics. */ tracker: LDAIConfigTracker; + + /** + * Whether the configuration is not found. + */ + noConfiguration: boolean; } diff --git a/packages/sdk/ai/src/api/config/LDAIConfigTracker.ts b/packages/sdk/ai/src/api/config/LDAIConfigTracker.ts index 6a83a8b1c1..6c7dd7c27e 100644 --- a/packages/sdk/ai/src/api/config/LDAIConfigTracker.ts +++ b/packages/sdk/ai/src/api/config/LDAIConfigTracker.ts @@ -6,4 +6,7 @@ export interface LDAIConfigTracker { trackError: (error: number) => void; trackGeneration: (generation: number) => void; trackFeedback: (feedback: { kind: FeedbackKind }) => void; + trackDurationOf: (func: Function, ...args: any[]) => any; + trackOpenAI: (func: Function, ...args: any[]) => any; + trackBedrockConverse: (res: any) => any; } diff --git a/packages/sdk/ai/src/api/config/index.ts b/packages/sdk/ai/src/api/config/index.ts index 2bb488be5b..1c07d5c3a4 100644 --- a/packages/sdk/ai/src/api/config/index.ts +++ b/packages/sdk/ai/src/api/config/index.ts @@ -1,2 +1,2 @@ export * from './LDAIConfig'; -export * from './LDAIConfigTracker'; +export { LDAIConfigTracker } from './LDAIConfigTracker'; diff --git a/packages/sdk/ai/src/api/metrics/BedrockTokenUsage.ts b/packages/sdk/ai/src/api/metrics/BedrockTokenUsage.ts index f50d70a210..a0c44ec4fc 100644 --- a/packages/sdk/ai/src/api/metrics/BedrockTokenUsage.ts +++ b/packages/sdk/ai/src/api/metrics/BedrockTokenUsage.ts @@ -1,5 +1,19 @@ -export interface BedrockTokenUsage { +export class BedrockTokenUsage { + totalTokens: number; inputTokens: number; outputTokens: number; - totalTokens: number; + + constructor(data: any) { + this.totalTokens = data.totalTokens || 0; + this.inputTokens = data.inputTokens || 0; + this.outputTokens = data.outputTokens || 0; + } + + toMetrics() { + return { + total: this.totalTokens, + input: this.inputTokens, + output: this.outputTokens, + }; + } } diff --git a/packages/sdk/ai/src/api/metrics/OpenAITokenUsage.ts b/packages/sdk/ai/src/api/metrics/OpenAITokenUsage.ts new file mode 100644 index 0000000000..bd177b6a7b --- /dev/null +++ b/packages/sdk/ai/src/api/metrics/OpenAITokenUsage.ts @@ -0,0 +1,19 @@ +export class OpenAITokenUsage { + total_tokens: number; + prompt_tokens: number; + completion_tokens: number; + + constructor(data: any) { + this.total_tokens = data.total_tokens; + this.prompt_tokens = data.prompt_tokens; + this.completion_tokens = data.completion_tokens; + } + + toMetrics() { + return { + total: this.total_tokens, + input: this.prompt_tokens, + output: this.completion_tokens, + }; + } +} diff --git a/packages/sdk/ai/src/api/metrics/TokenUsage.ts b/packages/sdk/ai/src/api/metrics/TokenUsage.ts index cfdf8788b0..ed098f7ae9 100644 --- a/packages/sdk/ai/src/api/metrics/TokenUsage.ts +++ b/packages/sdk/ai/src/api/metrics/TokenUsage.ts @@ -1,5 +1,19 @@ -export interface TokenUsage { - completionTokens?: number; - promptTokens?: number; - totalTokens?: number; +export class TokenUsage { + totalTokens: number; + promptTokens: number; + completionTokens: number; + + constructor(data: any) { + this.totalTokens = data.total_tokens || 0; + this.promptTokens = data.prompt_tokens || 0; + this.completionTokens = data.completion_tokens || 0; + } + + toMetrics() { + return { + total: this.totalTokens, + input: this.promptTokens, + output: this.completionTokens, + }; + } } diff --git a/packages/sdk/ai/src/api/metrics/UnderscoreTokenUsage.ts b/packages/sdk/ai/src/api/metrics/UnderscoreTokenUsage.ts index e0efe0cbcf..ec5b81803d 100644 --- a/packages/sdk/ai/src/api/metrics/UnderscoreTokenUsage.ts +++ b/packages/sdk/ai/src/api/metrics/UnderscoreTokenUsage.ts @@ -1,5 +1,19 @@ -export interface UnderscoreTokenUsage { - completion_tokens?: number; - prompt_tokens?: number; - total_tokens?: number; +export class UnderscoreTokenUsage { + total_tokens: number; + prompt_tokens: number; + completion_tokens: number; + + constructor(data: any) { + this.total_tokens = data.total_tokens || 0; + this.prompt_tokens = data.prompt_tokens || 0; + this.completion_tokens = data.completion_tokens || 0; + } + + toMetrics() { + return { + total: this.total_tokens, + input: this.prompt_tokens, + output: this.completion_tokens, + }; + } } diff --git a/packages/sdk/ai/src/api/metrics/index.ts b/packages/sdk/ai/src/api/metrics/index.ts index 16e3d5620b..a85cf510fd 100644 --- a/packages/sdk/ai/src/api/metrics/index.ts +++ b/packages/sdk/ai/src/api/metrics/index.ts @@ -1,5 +1,6 @@ export * from './BedrockTokenUsage'; export * from './FeedbackKind'; +export * from './OpenAITokenUsage'; export * from './TokenMetrics'; export * from './TokenUsage'; export * from './UnderScoreTokenUsage'; diff --git a/packages/sdk/ai/src/index.ts b/packages/sdk/ai/src/index.ts index fd0b73d27b..c77ac522c1 100644 --- a/packages/sdk/ai/src/index.ts +++ b/packages/sdk/ai/src/index.ts @@ -2,8 +2,8 @@ import Mustache from 'mustache'; import { LDClient, LDContext } from '@launchdarkly/node-server-sdk'; -import { LDAIConfigTracker } from './LDAIConfigTracker'; import { LDAIConfig } from './api/config'; +import { LDAIConfigTracker } from './LDAIConfigTracker'; export class AIClient { private ldClient: LDClient; @@ -41,7 +41,6 @@ export class AIClient { * const defaultValue = {}}; * * const result = modelConfig(key, context, defaultValue, variables); - * console.log(result); * // Output: * { * modelId: "gpt-4o", @@ -67,7 +66,7 @@ export class AIClient { defaultValue: string, variables?: Record, ): Promise { - const detail = await this.ldClient.variationDetail(key, context, defaultValue); + const detail = await this.ldClient.variation(key, context, defaultValue); const allVariables = { ldctx: context, ...variables }; @@ -81,9 +80,11 @@ export class AIClient { tracker: new LDAIConfigTracker( this.ldClient, key, - detail.value["_ldMeta"]["variationId"], + // eslint-disable-next-line @typescript-eslint/dot-notation + detail.value['_ldMeta'].variationId, context, ), + noConfiguration: Object.keys(detail).length === 0, }; } } @@ -92,4 +93,5 @@ export function init(ldClient: LDClient): AIClient { return new AIClient(ldClient); } -export { LDAIConfigTracker } from './LDAIConfigTracker'; +export * from './api/config/LDAIConfigTracker'; +export * from './api/metrics'; diff --git a/packages/sdk/ai/src/trackUtils.ts b/packages/sdk/ai/src/trackUtils.ts index 731ad52c49..bace5e0c35 100644 --- a/packages/sdk/ai/src/trackUtils.ts +++ b/packages/sdk/ai/src/trackUtils.ts @@ -1,4 +1,4 @@ -import { BedrockTokenUsage, TokenMetrics, TokenUsage, UnderscoreTokenUsage } from './types'; +import { BedrockTokenUsage, TokenMetrics, TokenUsage, UnderscoreTokenUsage } from './api/metrics'; export function usageToTokenMetrics( usage: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage, diff --git a/packages/sdk/ai/tsconfig.eslint.json b/packages/sdk/ai/tsconfig.eslint.json new file mode 100644 index 0000000000..0d1ecf8cf5 --- /dev/null +++ b/packages/sdk/ai/tsconfig.eslint.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["/**/*.ts"], + "exclude": ["node_modules"] + }