Skip to content

Commit 679fb26

Browse files
author
Daniel OBrien
committed
new package
in-progress metrics
1 parent 488bc09 commit 679fb26

File tree

8 files changed

+201
-102
lines changed

8 files changed

+201
-102
lines changed

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,8 @@
2323
"packages/store/node-server-sdk-dynamodb",
2424
"packages/telemetry/node-server-sdk-otel",
2525
"packages/tooling/jest",
26-
"packages/sdk/browser"
26+
"packages/sdk/browser",
27+
"packages/sdk/ai"
2728
],
2829
"private": true,
2930
"scripts": {

packages/sdk/ai/README.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# LaunchDarkly Server-Side SDK for Node.js - AI integration
2+
3+
This package provides AI and LLM integration for the LaunchDarkly Server-Side SDK for Node.js.
4+
5+
## Installation

packages/sdk/ai/package.json

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
{
2+
"name": "@launchdarkly/node-server-sdk-ai",
3+
"version": "0.1.0",
4+
"description": "AI and LLM integration for the LaunchDarkly Server-Side SDK for Node.js",
5+
"main": "dist/index.js",
6+
"types": "dist/index.d.ts",
7+
"scripts": {
8+
"build": "tsc",
9+
"test": "jest",
10+
"lint": "eslint . --ext .ts"
11+
},
12+
"keywords": [
13+
"launchdarkly",
14+
"ai",
15+
"llm"
16+
],
17+
"author": "LaunchDarkly",
18+
"license": "Apache-2.0",
19+
"dependencies": {
20+
"@launchdarkly/node-server-sdk": "^9.5.2",
21+
"mustache": "^4.2.0"
22+
},
23+
"devDependencies": {
24+
"@types/jest": "^29.5.3",
25+
"@types/mustache": "^4.2.5",
26+
"@typescript-eslint/eslint-plugin": "^6.20.0",
27+
"@typescript-eslint/parser": "^6.20.0",
28+
"eslint": "^8.45.0",
29+
"jest": "^29.6.1",
30+
"ts-jest": "^29.1.1",
31+
"typescript": "5.1.6"
32+
},
33+
"peerDependencies": {
34+
"@launchdarkly/node-server-sdk": ">=9.4.3"
35+
}
36+
}

packages/sdk/ai/src/index.ts

Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
import Mustache from 'mustache';
2+
3+
import { LDClient, LDContext } from '@launchdarkly/node-server-sdk';
4+
5+
enum FeedbackKind {
6+
Positive = 'positive',
7+
Negative = 'negative',
8+
}
9+
10+
export interface TokenMetrics {
11+
total: number;
12+
input: number;
13+
output: number;
14+
}
15+
16+
export class AIClient {
17+
private ldClient: LDClient;
18+
19+
constructor(ldClient: LDClient) {
20+
this.ldClient = ldClient;
21+
}
22+
23+
/**
24+
* Parses and interpolates a template string with the provided variables.
25+
*
26+
* @param template - The template string to be parsed and interpolated.
27+
* @param variables - An object containing the variables to be used for interpolation.
28+
* @returns The interpolated string.
29+
*/
30+
interpolateTemplate(template: string, variables: Record<string, unknown>): string {
31+
return Mustache.render(template, variables, undefined, { escape: (item: any) => item });
32+
}
33+
34+
/**
35+
* Retrieves and processes a prompt template based on the provided key, LaunchDarkly context, and variables.
36+
*
37+
* @param key - A unique identifier for the prompt template. This key is used to fetch the correct prompt from storage or configuration.
38+
* @param context - The LaunchDarkly context object that contains relevant information about the current environment, user, or session. This context may influence how the prompt is processed or personalized.
39+
* @param variables - A map of key-value pairs representing dynamic variables to be injected into the prompt template. The keys correspond to placeholders within the template, and the values are the corresponding replacements.
40+
* @param defaultValue - A fallback value to be used if the prompt template associated with the key is not found or if any errors occur during processing.
41+
*
42+
* @returns The processed prompt after all variables have been substituted in the stored prompt template. If the prompt cannot be retrieved or processed, the `defaultValue` is returned.
43+
*
44+
* @example
45+
* const key = "welcome_prompt";
46+
* const context = new LDContext(...);
47+
* const variables = new Record<string, string>([["username", "John"]]);
48+
* const defaultValue = "Welcome, user!";
49+
*
50+
* const result = modelConfig(key, context, variables, defaultValue);
51+
* console.log(result);
52+
* // Output:
53+
* // {
54+
* // modelId: "gpt-4o",
55+
* // temperature: 0.2,
56+
* // maxTokens: 4096,
57+
* // userDefinedKey: "myValue",
58+
* // prompt: [
59+
* // {
60+
* // role: "system",
61+
* // content: "You are an amazing GPT."
62+
* // },
63+
* // {
64+
* // role: "user",
65+
* // content: "Explain how you're an amazing GPT."
66+
* // }
67+
* // ]
68+
* // }
69+
*/
70+
async modelConfig(
71+
key: string,
72+
context: LDContext,
73+
defaultValue: string,
74+
variables?: Record<string, unknown>,
75+
): Promise<any> {
76+
const detail = await this.ldClient.variationDetail(key, context, defaultValue);
77+
78+
const allVariables = { ldctx: context, ...variables };
79+
80+
detail.value.prompt = detail.value.prompt.map((entry: any) => ({
81+
...entry,
82+
content: this.interpolateTemplate(entry.content, allVariables),
83+
}));
84+
85+
return detail.value;
86+
}
87+
88+
trackDuration(context: LDContext, duration: number) {
89+
this.ldClient.track('$ld:ai:duration:total', context, duration);
90+
}
91+
92+
trackTokens(context: LDContext, tokens: TokenMetrics) {
93+
if (tokens.total > 0) {
94+
this.ldClient.track('$ld:ai:tokens:total', context, null, tokens.total);
95+
}
96+
if (tokens.input > 0) {
97+
this.ldClient.track('$ld:ai:tokens:input', context, null, tokens.input);
98+
}
99+
if (tokens.output > 0) {
100+
this.ldClient.track('$ld:ai:tokens:output', context, null, tokens.output);
101+
}
102+
}
103+
104+
trackError(context: LDContext, error: number) {
105+
this.ldClient.track('$ld:ai:error', context, null, error);
106+
}
107+
108+
trackGeneration(context: LDContext, generation: number) {
109+
this.ldClient.track('$ld:ai:generation', context, null, generation);
110+
}
111+
112+
trackFeedback(context: LDContext, feedback: { kind: FeedbackKind }) {
113+
if (feedback.kind === FeedbackKind.Positive) {
114+
this.ldClient.track('$ld:ai:feedback:user:positive', context, null, 1);
115+
} else if (feedback.kind === FeedbackKind.Negative) {
116+
this.ldClient.track('$ld:ai:feedback:user:negative', context, null, 1);
117+
}
118+
}
119+
}
120+
121+
export function init(ldClient: LDClient): AIClient {
122+
return new AIClient(ldClient);
123+
}
124+
125+
export interface TokenUsage {
126+
completionTokens?: number;
127+
promptTokens?: number;
128+
totalTokens?: number;
129+
}
130+
131+
export interface UnderscoreTokenUsage {
132+
completion_tokens?: number;
133+
prompt_tokens?: number;
134+
total_tokens?: number;
135+
}
136+
137+
export function openAiUsageToTokenMetrics(usage: TokenUsage | UnderscoreTokenUsage): TokenMetrics {
138+
return {
139+
total: 'total_tokens' in usage ? usage.total_tokens : (usage as TokenUsage).totalTokens ?? 0,
140+
input: 'prompt_tokens' in usage ? usage.prompt_tokens : (usage as TokenUsage).promptTokens ?? 0,
141+
output:
142+
'completion_tokens' in usage
143+
? usage.completion_tokens
144+
: (usage as TokenUsage).completionTokens ?? 0,
145+
};
146+
}

packages/sdk/ai/tsconfig.json

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
{
2+
"compilerOptions": {
3+
"target": "ES2017",
4+
"module": "commonjs",
5+
"declaration": true,
6+
"outDir": "./dist",
7+
"strict": true,
8+
"esModuleInterop": true
9+
},
10+
"include": ["src"],
11+
"exclude": ["node_modules", "**/*.test.ts"]
12+
}

packages/shared/sdk-server/package.json

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,14 +28,12 @@
2828
"license": "Apache-2.0",
2929
"dependencies": {
3030
"@launchdarkly/js-sdk-common": "2.7.0",
31-
"mustache": "^4.2.0",
3231
"semver": "7.5.4"
3332
},
3433
"devDependencies": {
3534
"@launchdarkly/private-js-mocks": "0.0.1",
3635
"@trivago/prettier-plugin-sort-imports": "^4.1.1",
3736
"@types/jest": "^29.4.0",
38-
"@types/mustache": "^4.2.5",
3937
"@types/semver": "^7.3.13",
4038
"@typescript-eslint/eslint-plugin": "^6.20.0",
4139
"@typescript-eslint/parser": "^6.20.0",

packages/shared/sdk-server/src/LDClientImpl.ts

Lines changed: 0 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,6 @@ import MigrationOpTracker from './MigrationOpTracker';
5252
import Configuration from './options/Configuration';
5353
import VersionedDataKinds from './store/VersionedDataKinds';
5454

55-
const Mustache = require('mustache');
56-
5755
const { ClientMessages, ErrorKinds, NullEventProcessor } = internal;
5856
enum InitState {
5957
Initializing,
@@ -314,82 +312,6 @@ export default class LDClientImpl implements LDClient {
314312
return this.clientWithTimeout(this.initializedPromise, options?.timeout, this.logger);
315313
}
316314

317-
/**
318-
* Parses and interpolates a template string with the provided variables.
319-
*
320-
* @param template - The template string to be parsed and interpolated.
321-
* @param variables - An object containing the variables to be used for interpolation.
322-
* @returns The interpolated string.
323-
*/
324-
interpolateTemplate(template: string, variables: Record<string, unknown>): string {
325-
return Mustache.render(template, variables, undefined, { escape: (item: any) => item });
326-
}
327-
328-
/**
329-
* Retrieves and processes a prompt template based on the provided key, LaunchDarkly context, and variables.
330-
*
331-
* @param key - A unique identifier for the prompt template. This key is used to fetch the correct prompt from storage or configuration.
332-
* @param context - The LaunchDarkly context object that contains relevant information about the current environment, user, or session. This context may influence how the prompt is processed or personalized.
333-
* @param variables - A map of key-value pairs representing dynamic variables to be injected into the prompt template. The keys correspond to placeholders within the template, and the values are the corresponding replacements.
334-
* @param defaultValue - A fallback value to be used if the prompt template associated with the key is not found or if any errors occur during processing.
335-
*
336-
* @returns The processed prompt after all variables have been substituted in the stored prompt template. If the prompt cannot be retrieved or processed, the `defaultValue` is returned.
337-
*
338-
* @example
339-
* const key = "welcome_prompt";
340-
* const context = new LDContext(...);
341-
* const variables = new Record<string, string>([["username", "John"]]);
342-
* const defaultValue = "Welcome, user!";
343-
*
344-
* const result = modelConfig(key, context, variables, defaultValue);
345-
* console.log(result);
346-
* // Output:
347-
* // {
348-
* // modelId: "gpt-4o",
349-
* // temperature: 0.2,
350-
* // maxTokens: 4096,
351-
* // userDefinedKey: "myValue",
352-
* // prompt: [
353-
* // {
354-
* // role: "system",
355-
* // content: "You are an amazing GPT."
356-
* // },
357-
* // {
358-
* // role: "user",
359-
* // content: "Explain how you're an amazing GPT."
360-
* // }
361-
* // ]
362-
* // }
363-
*/
364-
async modelConfig(
365-
key: string,
366-
context: LDContext,
367-
defaultValue: string,
368-
variables?: Record<string, unknown>,
369-
): Promise<any> {
370-
const detail = await this.hookRunner.withEvaluationSeries(
371-
key,
372-
context,
373-
defaultValue,
374-
VARIATION_METHOD_NAME,
375-
() =>
376-
new Promise<LDEvaluationDetail>((resolve) => {
377-
this.evaluateIfPossible(key, context, defaultValue, this.eventFactoryDefault, (res) => {
378-
resolve(res.detail);
379-
});
380-
}),
381-
);
382-
383-
const allVariables = { ldctx: context, ...variables };
384-
385-
detail.value.prompt = detail.value.prompt.map((entry: any) => ({
386-
...entry,
387-
content: this.interpolateTemplate(entry.content, allVariables),
388-
}));
389-
390-
return detail.value;
391-
}
392-
393315
variation(
394316
key: string,
395317
context: LDContext,

packages/shared/sdk-server/src/api/LDClient.ts

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -88,27 +88,6 @@ export interface LDClient {
8888
*/
8989
waitForInitialization(options?: LDWaitForInitializationOptions): Promise<LDClient>;
9090

91-
modelConfig(
92-
key: string,
93-
context: LDContext,
94-
defaultValue: unknown,
95-
variables: Record<string, unknown>,
96-
): Promise<unknown>;
97-
98-
/**
99-
* Determines the variation of a feature flag for a context.
100-
*
101-
* @param key The unique key of the feature flag.
102-
* @param context The context requesting the flag. The client will generate an analytics event to
103-
* register this context with LaunchDarkly if the context does not already exist.
104-
* @param defaultValue The default value of the flag, to be used if the value is not available
105-
* from LaunchDarkly.
106-
* @param callback A Node-style callback to receive the result value. If omitted, you will receive
107-
* a Promise instead.
108-
* @returns
109-
* If you provided a callback, then nothing. Otherwise, a Promise which will be resolved with
110-
* the result value.
111-
*/
11291
variation(
11392
key: string,
11493
context: LDContext,

0 commit comments

Comments
 (0)