Skip to content

Commit 934d5aa

Browse files
authored
Merge branch 'launchdarkly:main' into mzafir/react-native-jest-mock
2 parents 8402b34 + 8351aca commit 934d5aa

File tree

24 files changed

+200
-103
lines changed

24 files changed

+200
-103
lines changed

.release-please-manifest.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,9 @@
1010
"packages/shared/akamai-edgeworker-sdk": "1.3.2",
1111
"packages/store/node-server-sdk-dynamodb": "6.2.2",
1212
"packages/store/node-server-sdk-redis": "4.2.2",
13-
"packages/shared/sdk-client": "1.12.0",
14-
"packages/sdk/react-native": "10.9.2",
13+
"packages/shared/sdk-client": "1.12.1",
14+
"packages/sdk/react-native": "10.9.3",
1515
"packages/telemetry/node-server-sdk-otel": "1.1.2",
16-
"packages/sdk/browser": "0.3.2",
17-
"packages/sdk/server-ai": "0.3.0"
16+
"packages/sdk/browser": "0.3.3",
17+
"packages/sdk/server-ai": "0.4.0"
1818
}

packages/sdk/browser/CHANGELOG.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,14 @@
11
# Changelog
22

3+
## [0.3.3](https://github.com/launchdarkly/js-core/compare/js-client-sdk-v0.3.2...js-client-sdk-v0.3.3) (2024-11-22)
4+
5+
6+
### Dependencies
7+
8+
* The following workspace dependencies were updated
9+
* dependencies
10+
* @launchdarkly/js-client-sdk-common bumped from 1.12.0 to 1.12.1
11+
312
## [0.3.2](https://github.com/launchdarkly/js-core/compare/js-client-sdk-v0.3.1...js-client-sdk-v0.3.2) (2024-11-13)
413

514

packages/sdk/browser/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@launchdarkly/js-client-sdk",
3-
"version": "0.3.2",
3+
"version": "0.3.3",
44
"description": "LaunchDarkly SDK for JavaScript in Browsers",
55
"homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/sdk/browser",
66
"repository": {
@@ -55,7 +55,7 @@
5555
"check": "yarn prettier && yarn lint && yarn build && yarn test"
5656
},
5757
"dependencies": {
58-
"@launchdarkly/js-client-sdk-common": "1.12.0"
58+
"@launchdarkly/js-client-sdk-common": "1.12.1"
5959
},
6060
"devDependencies": {
6161
"@jest/globals": "^29.7.0",

packages/sdk/react-native/CHANGELOG.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,14 @@
11
# Changelog
22

3+
## [10.9.3](https://github.com/launchdarkly/js-core/compare/react-native-client-sdk-v10.9.2...react-native-client-sdk-v10.9.3) (2024-11-22)
4+
5+
6+
### Dependencies
7+
8+
* The following workspace dependencies were updated
9+
* dependencies
10+
* @launchdarkly/js-client-sdk-common bumped from 1.12.0 to 1.12.1
11+
312
## [10.9.2](https://github.com/launchdarkly/js-core/compare/react-native-client-sdk-v10.9.1...react-native-client-sdk-v10.9.2) (2024-11-04)
413

514

packages/sdk/react-native/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@launchdarkly/react-native-client-sdk",
3-
"version": "10.9.2",
3+
"version": "10.9.3",
44
"description": "React Native LaunchDarkly SDK",
55
"homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/sdk/react-native",
66
"repository": {
@@ -41,7 +41,7 @@
4141
"react-native": "*"
4242
},
4343
"dependencies": {
44-
"@launchdarkly/js-client-sdk-common": "1.12.0",
44+
"@launchdarkly/js-client-sdk-common": "1.12.1",
4545
"@react-native-async-storage/async-storage": "^1.21.0",
4646
"base64-js": "^1.5.1"
4747
},

packages/sdk/server-ai/CHANGELOG.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,16 @@
11
# Changelog
22

3+
## [0.4.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-v0.3.0...server-sdk-ai-v0.4.0) (2024-11-22)
4+
5+
6+
### ⚠ BREAKING CHANGES
7+
8+
* Updated AI config interface. ([#697](https://github.com/launchdarkly/js-core/issues/697))
9+
10+
### Features
11+
12+
* Updated AI config interface. ([#697](https://github.com/launchdarkly/js-core/issues/697)) ([cd72ea8](https://github.com/launchdarkly/js-core/commit/cd72ea8193888b0635b5beffa0a877b18294777e))
13+
314
## [0.3.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-v0.2.1...server-sdk-ai-v0.3.0) (2024-11-15)
415

516

packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts

Lines changed: 39 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -11,36 +11,24 @@ const mockLdClient: jest.Mocked<LDClientMin> = {
1111

1212
const testContext: LDContext = { kind: 'user', key: 'test-user' };
1313

14-
it('interpolates template variables', () => {
15-
const client = new LDAIClientImpl(mockLdClient);
16-
const template = 'Hello {{name}}, your score is {{score}}';
17-
const variables = { name: 'John', score: 42 };
18-
19-
const result = client.interpolateTemplate(template, variables);
20-
expect(result).toBe('Hello John, your score is 42');
21-
});
22-
23-
it('handles empty variables in template interpolation', () => {
24-
const client = new LDAIClientImpl(mockLdClient);
25-
const template = 'Hello {{name}}';
26-
const variables = {};
27-
28-
const result = client.interpolateTemplate(template, variables);
29-
expect(result).toBe('Hello ');
30-
});
31-
32-
it('returns model config with interpolated prompts', async () => {
14+
it('returns config with interpolated messagess', async () => {
3315
const client = new LDAIClientImpl(mockLdClient);
3416
const key = 'test-flag';
3517
const defaultValue: LDAIDefaults = {
36-
model: { modelId: 'test', name: 'test-model' },
37-
prompt: [],
18+
model: { id: 'test', parameters: { name: 'test-model' } },
19+
messages: [],
3820
enabled: true,
3921
};
4022

4123
const mockVariation = {
42-
model: { modelId: 'example-provider', name: 'imagination', temperature: 0.7, maxTokens: 4096 },
43-
prompt: [
24+
model: {
25+
id: 'example-model',
26+
parameters: { name: 'imagination', temperature: 0.7, maxTokens: 4096 },
27+
},
28+
provider: {
29+
id: 'example-provider',
30+
},
31+
messages: [
4432
{ role: 'system', content: 'Hello {{name}}' },
4533
{ role: 'user', content: 'Score: {{score}}' },
4634
],
@@ -53,11 +41,17 @@ it('returns model config with interpolated prompts', async () => {
5341
mockLdClient.variation.mockResolvedValue(mockVariation);
5442

5543
const variables = { name: 'John', score: 42 };
56-
const result = await client.modelConfig(key, testContext, defaultValue, variables);
44+
const result = await client.config(key, testContext, defaultValue, variables);
5745

5846
expect(result).toEqual({
59-
model: { modelId: 'example-provider', name: 'imagination', temperature: 0.7, maxTokens: 4096 },
60-
prompt: [
47+
model: {
48+
id: 'example-model',
49+
parameters: { name: 'imagination', temperature: 0.7, maxTokens: 4096 },
50+
},
51+
provider: {
52+
id: 'example-provider',
53+
},
54+
messages: [
6155
{ role: 'system', content: 'Hello John' },
6256
{ role: 'user', content: 'Score: 42' },
6357
],
@@ -66,46 +60,46 @@ it('returns model config with interpolated prompts', async () => {
6660
});
6761
});
6862

69-
it('includes context in variables for prompt interpolation', async () => {
63+
it('includes context in variables for messages interpolation', async () => {
7064
const client = new LDAIClientImpl(mockLdClient);
7165
const key = 'test-flag';
7266
const defaultValue: LDAIDefaults = {
73-
model: { modelId: 'test', name: 'test-model' },
74-
prompt: [],
67+
model: { id: 'test', parameters: { name: 'test-model' } },
68+
messages: [],
7569
};
7670

7771
const mockVariation = {
78-
prompt: [{ role: 'system', content: 'User key: {{ldctx.key}}' }],
72+
messages: [{ role: 'system', content: 'User key: {{ldctx.key}}' }],
7973
_ldMeta: { versionKey: 'v1', enabled: true },
8074
};
8175

8276
mockLdClient.variation.mockResolvedValue(mockVariation);
8377

84-
const result = await client.modelConfig(key, testContext, defaultValue);
78+
const result = await client.config(key, testContext, defaultValue);
8579

86-
expect(result.prompt?.[0].content).toBe('User key: test-user');
80+
expect(result.messages?.[0].content).toBe('User key: test-user');
8781
});
8882

8983
it('handles missing metadata in variation', async () => {
9084
const client = new LDAIClientImpl(mockLdClient);
9185
const key = 'test-flag';
9286
const defaultValue: LDAIDefaults = {
93-
model: { modelId: 'test', name: 'test-model' },
94-
prompt: [],
87+
model: { id: 'test', parameters: { name: 'test-model' } },
88+
messages: [],
9589
};
9690

9791
const mockVariation = {
98-
model: { modelId: 'example-provider', name: 'imagination' },
99-
prompt: [{ role: 'system', content: 'Hello' }],
92+
model: { id: 'example-provider', parameters: { name: 'imagination' } },
93+
messages: [{ role: 'system', content: 'Hello' }],
10094
};
10195

10296
mockLdClient.variation.mockResolvedValue(mockVariation);
10397

104-
const result = await client.modelConfig(key, testContext, defaultValue);
98+
const result = await client.config(key, testContext, defaultValue);
10599

106100
expect(result).toEqual({
107-
model: { modelId: 'example-provider', name: 'imagination' },
108-
prompt: [{ role: 'system', content: 'Hello' }],
101+
model: { id: 'example-provider', parameters: { name: 'imagination' } },
102+
messages: [{ role: 'system', content: 'Hello' }],
109103
tracker: expect.any(Object),
110104
enabled: false,
111105
});
@@ -115,18 +109,20 @@ it('passes the default value to the underlying client', async () => {
115109
const client = new LDAIClientImpl(mockLdClient);
116110
const key = 'non-existent-flag';
117111
const defaultValue: LDAIDefaults = {
118-
model: { modelId: 'default-model', name: 'default' },
119-
prompt: [{ role: 'system', content: 'Default prompt' }],
112+
model: { id: 'default-model', parameters: { name: 'default' } },
113+
provider: { id: 'default-provider' },
114+
messages: [{ role: 'system', content: 'Default messages' }],
120115
enabled: true,
121116
};
122117

123118
mockLdClient.variation.mockResolvedValue(defaultValue);
124119

125-
const result = await client.modelConfig(key, testContext, defaultValue);
120+
const result = await client.config(key, testContext, defaultValue);
126121

127122
expect(result).toEqual({
128123
model: defaultValue.model,
129-
prompt: defaultValue.prompt,
124+
messages: defaultValue.messages,
125+
provider: defaultValue.provider,
130126
tracker: expect.any(Object),
131127
enabled: false,
132128
});

packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ it('tracks OpenAI usage', async () => {
9090
const PROMPT_TOKENS = 49;
9191
const COMPLETION_TOKENS = 51;
9292

93-
await tracker.trackOpenAI(async () => ({
93+
await tracker.trackOpenAIMetrics(async () => ({
9494
usage: {
9595
total_tokens: TOTAL_TOKENS,
9696
prompt_tokens: PROMPT_TOKENS,
@@ -151,7 +151,7 @@ it('tracks Bedrock conversation with successful response', () => {
151151
},
152152
};
153153

154-
tracker.trackBedrockConverse(response);
154+
tracker.trackBedrockConverseMetrics(response);
155155

156156
expect(mockTrack).toHaveBeenCalledWith(
157157
'$ld:ai:generation',
@@ -198,7 +198,7 @@ it('tracks Bedrock conversation with error response', () => {
198198

199199
// TODO: We may want a track failure.
200200

201-
tracker.trackBedrockConverse(response);
201+
tracker.trackBedrockConverseMetrics(response);
202202

203203
expect(mockTrack).not.toHaveBeenCalled();
204204
});

packages/sdk/server-ai/examples/bedrock/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"dependencies": {
2525
"@aws-sdk/client-bedrock-runtime": "^3.679.0",
2626
"@launchdarkly/node-server-sdk": "^9.7.1",
27-
"@launchdarkly/server-sdk-ai": "0.3.0"
27+
"@launchdarkly/server-sdk-ai": "0.4.0"
2828
},
2929
"devDependencies": {
3030
"@trivago/prettier-plugin-sort-imports": "^4.1.1",

packages/sdk/server-ai/examples/bedrock/src/index.ts

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -48,12 +48,12 @@ async function main() {
4848

4949
const aiClient = initAi(ldClient);
5050

51-
const aiConfig = await aiClient.modelConfig(
51+
const aiConfig = await aiClient.config(
5252
aiConfigKey!,
5353
context,
5454
{
5555
model: {
56-
modelId: 'my-default-model',
56+
id: 'my-default-model',
5757
},
5858
enabled: true,
5959
},
@@ -63,14 +63,14 @@ async function main() {
6363
);
6464
const { tracker } = aiConfig;
6565

66-
const completion = tracker.trackBedrockConverse(
66+
const completion = tracker.trackBedrockConverseMetrics(
6767
await awsClient.send(
6868
new ConverseCommand({
69-
modelId: aiConfig.model?.modelId ?? 'no-model',
70-
messages: mapPromptToConversation(aiConfig.prompt ?? []),
69+
modelId: aiConfig.model?.id ?? 'no-model',
70+
messages: mapPromptToConversation(aiConfig.messages ?? []),
7171
inferenceConfig: {
72-
temperature: aiConfig.model?.temperature ?? 0.5,
73-
maxTokens: aiConfig.model?.maxTokens ?? 4096,
72+
temperature: (aiConfig.model?.parameters?.temperature as number) ?? 0.5,
73+
maxTokens: (aiConfig.model?.parameters?.maxTokens as number) ?? 4096,
7474
},
7575
}),
7676
),

0 commit comments

Comments
 (0)