Skip to content

Commit 5b1336e

Browse files
author
Daniel OBrien
committed
add tracker
refactor design
1 parent 679fb26 commit 5b1336e

File tree

4 files changed

+146
-67
lines changed

4 files changed

+146
-67
lines changed
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
import { LDClient, LDContext } from '@launchdarkly/node-server-sdk';
2+
3+
import { usageToTokenMetrics } from './trackUtils';
4+
import { BedrockTokenUsage, FeedbackKind, TokenUsage, UnderscoreTokenUsage } from './types';
5+
6+
export class LDAIConfigTracker {
7+
private ldClient: LDClient;
8+
private variationId: Record<string, string>;
9+
private context: LDContext;
10+
11+
constructor(ldClient: LDClient, variationId: string, context: LDContext) {
12+
this.ldClient = ldClient;
13+
this.variationId = { variationId };
14+
this.context = context;
15+
}
16+
17+
trackDuration(duration: number): void {
18+
this.ldClient.track('$ld:ai:duration:total', this.context, this.variationId, duration);
19+
}
20+
21+
trackTokens(tokens: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage) {
22+
const tokenMetrics = usageToTokenMetrics(tokens);
23+
if (tokenMetrics.total > 0) {
24+
this.ldClient.track(
25+
'$ld:ai:tokens:total',
26+
this.context,
27+
this.variationId,
28+
tokenMetrics.total,
29+
);
30+
}
31+
if (tokenMetrics.input > 0) {
32+
this.ldClient.track(
33+
'$ld:ai:tokens:input',
34+
this.context,
35+
this.variationId,
36+
tokenMetrics.input,
37+
);
38+
}
39+
if (tokenMetrics.output > 0) {
40+
this.ldClient.track(
41+
'$ld:ai:tokens:output',
42+
this.context,
43+
this.variationId,
44+
tokenMetrics.output,
45+
);
46+
}
47+
}
48+
49+
trackError(error: number) {
50+
this.ldClient.track('$ld:ai:error', this.context, this.variationId, error);
51+
}
52+
53+
trackGeneration(generation: number) {
54+
this.ldClient.track('$ld:ai:generation', this.context, this.variationId, generation);
55+
}
56+
57+
trackFeedback(feedback: { kind: FeedbackKind }) {
58+
if (feedback.kind === FeedbackKind.Positive) {
59+
this.ldClient.track('$ld:ai:feedback:user:positive', this.context, this.variationId, 1);
60+
} else if (feedback.kind === FeedbackKind.Negative) {
61+
this.ldClient.track('$ld:ai:feedback:user:negative', this.context, this.variationId, 1);
62+
}
63+
}
64+
}

packages/sdk/ai/src/index.ts

Lines changed: 7 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,8 @@ import Mustache from 'mustache';
22

33
import { LDClient, LDContext } from '@launchdarkly/node-server-sdk';
44

5-
enum FeedbackKind {
6-
Positive = 'positive',
7-
Negative = 'negative',
8-
}
9-
10-
export interface TokenMetrics {
11-
total: number;
12-
input: number;
13-
output: number;
14-
}
5+
import { LDAIConfigTracker } from './LDAIConfigTracker';
6+
import { LDAIConfig } from './types';
157

168
export class AIClient {
179
private ldClient: LDClient;
@@ -72,7 +64,7 @@ export class AIClient {
7264
context: LDContext,
7365
defaultValue: string,
7466
variables?: Record<string, unknown>,
75-
): Promise<any> {
67+
): Promise<LDAIConfig> {
7668
const detail = await this.ldClient.variationDetail(key, context, defaultValue);
7769

7870
const allVariables = { ldctx: context, ...variables };
@@ -82,65 +74,13 @@ export class AIClient {
8274
content: this.interpolateTemplate(entry.content, allVariables),
8375
}));
8476

85-
return detail.value;
86-
}
87-
88-
trackDuration(context: LDContext, duration: number) {
89-
this.ldClient.track('$ld:ai:duration:total', context, duration);
90-
}
91-
92-
trackTokens(context: LDContext, tokens: TokenMetrics) {
93-
if (tokens.total > 0) {
94-
this.ldClient.track('$ld:ai:tokens:total', context, null, tokens.total);
95-
}
96-
if (tokens.input > 0) {
97-
this.ldClient.track('$ld:ai:tokens:input', context, null, tokens.input);
98-
}
99-
if (tokens.output > 0) {
100-
this.ldClient.track('$ld:ai:tokens:output', context, null, tokens.output);
101-
}
102-
}
103-
104-
trackError(context: LDContext, error: number) {
105-
this.ldClient.track('$ld:ai:error', context, null, error);
106-
}
107-
108-
trackGeneration(context: LDContext, generation: number) {
109-
this.ldClient.track('$ld:ai:generation', context, null, generation);
110-
}
111-
112-
trackFeedback(context: LDContext, feedback: { kind: FeedbackKind }) {
113-
if (feedback.kind === FeedbackKind.Positive) {
114-
this.ldClient.track('$ld:ai:feedback:user:positive', context, null, 1);
115-
} else if (feedback.kind === FeedbackKind.Negative) {
116-
this.ldClient.track('$ld:ai:feedback:user:negative', context, null, 1);
117-
}
77+
return {
78+
config: detail.value,
79+
tracker: new LDAIConfigTracker(this.ldClient, key, context),
80+
};
11881
}
11982
}
12083

12184
export function init(ldClient: LDClient): AIClient {
12285
return new AIClient(ldClient);
12386
}
124-
125-
export interface TokenUsage {
126-
completionTokens?: number;
127-
promptTokens?: number;
128-
totalTokens?: number;
129-
}
130-
131-
export interface UnderscoreTokenUsage {
132-
completion_tokens?: number;
133-
prompt_tokens?: number;
134-
total_tokens?: number;
135-
}
136-
137-
export function openAiUsageToTokenMetrics(usage: TokenUsage | UnderscoreTokenUsage): TokenMetrics {
138-
return {
139-
total: 'total_tokens' in usage ? usage.total_tokens : (usage as TokenUsage).totalTokens ?? 0,
140-
input: 'prompt_tokens' in usage ? usage.prompt_tokens : (usage as TokenUsage).promptTokens ?? 0,
141-
output:
142-
'completion_tokens' in usage
143-
? usage.completion_tokens
144-
: (usage as TokenUsage).completionTokens ?? 0,
145-
};
146-
}

packages/sdk/ai/src/trackUtils.ts

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
import { BedrockTokenUsage, TokenMetrics, TokenUsage, UnderscoreTokenUsage } from './types';
2+
3+
export function usageToTokenMetrics(
4+
usage: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage,
5+
): TokenMetrics {
6+
if ('inputTokens' in usage && 'outputTokens' in usage) {
7+
// Bedrock usage
8+
return {
9+
total: usage.totalTokens,
10+
input: usage.inputTokens,
11+
output: usage.outputTokens,
12+
};
13+
}
14+
15+
// OpenAI usage (both camelCase and snake_case)
16+
return {
17+
total: 'total_tokens' in usage ? usage.total_tokens! : (usage as TokenUsage).totalTokens ?? 0,
18+
input:
19+
'prompt_tokens' in usage ? usage.prompt_tokens! : (usage as TokenUsage).promptTokens ?? 0,
20+
output:
21+
'completion_tokens' in usage
22+
? usage.completion_tokens!
23+
: (usage as TokenUsage).completionTokens ?? 0,
24+
};
25+
}

packages/sdk/ai/src/types.ts

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
export interface TokenMetrics {
2+
total: number;
3+
input: number;
4+
output: number;
5+
}
6+
7+
export interface TokenUsage {
8+
completionTokens?: number;
9+
promptTokens?: number;
10+
totalTokens?: number;
11+
}
12+
13+
export interface UnderscoreTokenUsage {
14+
completion_tokens?: number;
15+
prompt_tokens?: number;
16+
total_tokens?: number;
17+
}
18+
19+
export interface BedrockTokenUsage {
20+
inputTokens: number;
21+
outputTokens: number;
22+
totalTokens: number;
23+
}
24+
25+
export enum FeedbackKind {
26+
Positive = 'positive',
27+
Negative = 'negative',
28+
}
29+
30+
export interface LDAIConfigTracker {
31+
trackDuration: (duration: number) => void;
32+
trackTokens: (tokens: TokenUsage | UnderscoreTokenUsage | BedrockTokenUsage) => void;
33+
trackError: (error: number) => void;
34+
trackGeneration: (generation: number) => void;
35+
trackFeedback: (feedback: { kind: FeedbackKind }) => void;
36+
}
37+
/**
38+
* AI Config value and tracker.
39+
*/
40+
export interface LDAIConfig {
41+
/**
42+
* The result of the AI Config evaluation.
43+
*/
44+
config: unknown;
45+
46+
/**
47+
* A tracker which can be used to generate analytics for the migration.
48+
*/
49+
tracker: LDAIConfigTracker;
50+
}

0 commit comments

Comments
 (0)