Skip to content

Commit 4d5bee6

Browse files
author
Dan O'Brien
authored
feat: SDK Changes in preparation for release (#7)
* Add new Bedrock methods * Remove Underscore class * Rename methods to align closer to TypeScript * Add Tests * Cleanup README and other metadata
2 parents 0e7dacc + e37d4ba commit 4d5bee6

File tree

8 files changed

+354
-157
lines changed

8 files changed

+354
-157
lines changed

.release-please-manifest.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
{
2-
".": "1.0.0"
2+
".": "0.1.0"
33
}

CONTRIBUTING.md

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkl
44

55
## Submitting bug reports and feature requests
66

7-
The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/python-server-sdk-AI/issues) in the SDK repository. Bug reports and feature requests specific to this library should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days.
7+
The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/python-server-sdk-ai/issues) in the SDK repository. Bug reports and feature requests specific to this library should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days.
88

99
## Submitting pull requests
1010

@@ -55,8 +55,6 @@ make lint
5555

5656
The library's module structure is as follows:
5757

58-
<!-- TODO: Add structure description -->
59-
6058
### Type hints
6159

6260
Python does not require the use of type hints, but they can be extremely helpful for spotting mistakes and for improving the IDE experience, so we should always use them in the library. Every method in the public API is expected to have type hints for all non-`self` parameters, and for its return value if any.

README.md

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -12,18 +12,7 @@ This version of the library has a minimum Python version of 3.8.
1212

1313
## Getting started
1414

15-
Install the package
16-
17-
$ pip install launchdarkly-server-sdk-ai
18-
19-
The provided `TracingHook` can be setup as shown below:
20-
21-
<!-- TODO: Install instructions -->
22-
23-
```python
24-
import ldclient
25-
26-
```
15+
Refer to the [SDK reference guide](https://docs.launchdarkly.com/sdk/ai/python) for instructions on getting started with using the SDK.
2716

2817
## Learn more
2918

ldai/client.py

Lines changed: 44 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,53 +1,67 @@
1-
from typing import Any, Dict, Optional
1+
from typing import Any, Dict, List, Literal, Optional
22
from ldclient import Context
33
from ldclient.client import LDClient
44
import chevron
55

66
from ldai.tracker import LDAIConfigTracker
7-
from ldai.types import AIConfig
7+
from dataclasses import dataclass
8+
9+
@dataclass
10+
class LDMessage():
11+
role: Literal['system', 'user', 'assistant']
12+
content: str
13+
14+
@dataclass
15+
class AIConfigData():
16+
model: Optional[dict]
17+
prompt: Optional[List[LDMessage]]
18+
class AIConfig():
19+
def __init__(self, config: AIConfigData, tracker: LDAIConfigTracker, enabled: bool):
20+
self.config = config
21+
self.tracker = tracker
22+
self.enabled = enabled
823

924
class LDAIClient:
1025
"""The LaunchDarkly AI SDK client object."""
1126

1227
def __init__(self, client: LDClient):
1328
self.client = client
1429

15-
def model_config(self, key: str, context: Context, default_value: str, variables: Optional[Dict[str, Any]] = None) -> AIConfig:
16-
"""Get the value of a model configuration asynchronously.
17-
18-
Args:
19-
key: The key of the model configuration.
20-
context: The context to evaluate the model configuration in.
21-
default_value: The default value of the model configuration.
22-
variables: Additional variables for the model configuration.
30+
def model_config(self, key: str, context: Context, default_value: AIConfig, variables: Optional[Dict[str, Any]] = None) -> AIConfig:
31+
"""
32+
Get the value of a model configuration asynchronously.
2333
24-
Returns:
25-
The value of the model configuration.
34+
:param key: The key of the model configuration.
35+
:param context: The context to evaluate the model configuration in.
36+
:param default_value: The default value of the model configuration.
37+
:param variables: Additional variables for the model configuration.
38+
:return: The value of the model configuration.
2639
"""
2740
variation = self.client.variation(key, context, default_value)
2841

29-
all_variables = {'ldctx': context}
42+
all_variables = {}
3043
if variables:
3144
all_variables.update(variables)
45+
all_variables['ldctx'] = context
46+
47+
if isinstance(variation['prompt'], list) and all(isinstance(entry, dict) for entry in variation['prompt']):
48+
variation['prompt'] = [
49+
LDMessage(
50+
role=entry['role'],
51+
content=self.__interpolate_template(entry['content'], all_variables)
52+
)
53+
for entry in variation['prompt']
54+
]
3255

33-
variation['prompt'] = [
34-
{
35-
**entry,
36-
'content': self.interpolate_template(entry['content'], all_variables)
37-
}
38-
for entry in variation['prompt']
39-
]
56+
enabled = variation.get('_ldMeta',{}).get('enabled', False)
57+
return AIConfig(config=AIConfigData(model=variation['model'], prompt=variation['prompt']), tracker=LDAIConfigTracker(self.client, variation.get('_ldMeta', {}).get('versionKey', ''), key, context), enabled=bool(enabled))
4058

41-
return AIConfig(config=variation, tracker=LDAIConfigTracker(self.client, variation['_ldMeta']['variationId'], key, context))
42-
43-
def interpolate_template(self, template: str, variables: Dict[str, Any]) -> str:
44-
"""Interpolate the template with the given variables.
45-
46-
Args:
47-
template: The template string.
48-
variables: The variables to interpolate into the template.
59+
def __interpolate_template(self, template: str, variables: Dict[str, Any]) -> str:
60+
"""
61+
Interpolate the template with the given variables.
4962
50-
Returns:
51-
The interpolated string.
63+
:template: The template string.
64+
:variables: The variables to interpolate into the template.
65+
:return: The interpolated string.
5266
"""
5367
return chevron.render(template, variables)

ldai/testing/test_model_config.py

Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
import pytest
2+
from ldclient import LDClient, Context, Config
3+
from ldclient.integrations.test_data import TestData
4+
from ldai.client import AIConfig, AIConfigData, LDAIClient, LDMessage
5+
from ldai.tracker import LDAIConfigTracker
6+
from ldclient.testing.builders import *
7+
8+
9+
@pytest.fixture
10+
def td() -> TestData:
11+
td = TestData.data_source()
12+
td.update(td.flag('model-config').variations({
13+
'model': { 'modelId': 'fakeModel'},
14+
'prompt': [{'role': 'system', 'content': 'Hello, {{name}}!'}],
15+
'_ldMeta': {'enabled': True, 'versionKey': 'abcd'}
16+
}, "green").variation_for_all(0))
17+
18+
td.update(td.flag('multiple-prompt').variations({
19+
'model': { 'modelId': 'fakeModel'},
20+
'prompt': [{'role': 'system', 'content': 'Hello, {{name}}!'}, {'role': 'user', 'content': 'The day is, {{day}}!'}],
21+
'_ldMeta': {'enabled': True, 'versionKey': 'abcd'}
22+
}, "green").variation_for_all(0))
23+
24+
td.update(td.flag('ctx-interpolation').variations({
25+
'model': { 'modelId': 'fakeModel'},
26+
'prompt': [{'role': 'system', 'content': 'Hello, {{ldctx.name}}!'}],
27+
'_ldMeta': {'enabled': True, 'versionKey': 'abcd'}
28+
}).variation_for_all(0))
29+
30+
td.update(td.flag('off-config').variations({
31+
'model': { 'modelId': 'fakeModel'},
32+
'prompt': [{'role': 'system', 'content': 'Hello, {{name}}!'}],
33+
'_ldMeta': {'enabled': False, 'versionKey': 'abcd'}
34+
}).variation_for_all(0))
35+
36+
return td
37+
38+
@pytest.fixture
39+
def client(td: TestData) -> LDClient:
40+
config = Config('sdk-key', update_processor_class=td, send_events=False)
41+
return LDClient(config=config)
42+
43+
@pytest.fixture
44+
def tracker(client: LDClient) -> LDAIConfigTracker:
45+
return LDAIConfigTracker(client, 'abcd', 'model-config', Context.create('user-key'))
46+
47+
@pytest.fixture
48+
def ldai_client(client: LDClient) -> LDAIClient:
49+
return LDAIClient(client)
50+
51+
def test_model_config_interpolation(ldai_client: LDAIClient, tracker):
52+
context = Context.create('user-key')
53+
default_value = AIConfig(config=AIConfigData(model={ 'modelId': 'fakeModel'}, prompt=[LDMessage(role='system', content='Hello, {{name}}!')]), tracker=tracker, enabled=True)
54+
variables = {'name': 'World'}
55+
56+
config = ldai_client.model_config('model-config', context, default_value, variables)
57+
58+
assert config.config.prompt is not None
59+
assert len(config.config.prompt) > 0
60+
assert config.config.prompt[0].content == 'Hello, World!'
61+
assert config.enabled is True
62+
63+
def test_model_config_no_variables(ldai_client: LDAIClient, tracker):
64+
context = Context.create('user-key')
65+
default_value = AIConfig(config=AIConfigData(model={}, prompt=[]), tracker=tracker, enabled=True)
66+
67+
config = ldai_client.model_config('model-config', context, default_value, {})
68+
69+
assert config.config.prompt is not None
70+
assert len(config.config.prompt) > 0
71+
assert config.config.prompt[0].content == 'Hello, !'
72+
assert config.enabled is True
73+
74+
def test_context_interpolation(ldai_client: LDAIClient, tracker):
75+
context = Context.builder('user-key').name("Sandy").build()
76+
default_value = AIConfig(config=AIConfigData(model={}, prompt=[]), tracker=tracker, enabled=True)
77+
variables = {'name': 'World'}
78+
79+
config = ldai_client.model_config('ctx-interpolation', context, default_value, variables)
80+
81+
assert config.config.prompt is not None
82+
assert len(config.config.prompt) > 0
83+
assert config.config.prompt[0].content == 'Hello, Sandy!'
84+
assert config.enabled is True
85+
86+
def test_model_config_multiple(ldai_client: LDAIClient, tracker):
87+
context = Context.create('user-key')
88+
default_value = AIConfig(config=AIConfigData(model={}, prompt=[]), tracker=tracker, enabled=True)
89+
variables = {'name': 'World', 'day': 'Monday'}
90+
91+
config = ldai_client.model_config('multiple-prompt', context, default_value, variables)
92+
93+
assert config.config.prompt is not None
94+
assert len(config.config.prompt) > 0
95+
assert config.config.prompt[0].content == 'Hello, World!'
96+
assert config.config.prompt[1].content == 'The day is, Monday!'
97+
assert config.enabled is True
98+
99+
def test_model_config_disabled(ldai_client: LDAIClient, tracker):
100+
context = Context.create('user-key')
101+
default_value = AIConfig(config=AIConfigData(model={}, prompt=[]), tracker=tracker, enabled=False)
102+
103+
config = ldai_client.model_config('off-config', context, default_value, {})
104+
105+
assert config.enabled is False

0 commit comments

Comments
 (0)