1
1
import pytest
2
2
from ldclient import Config , Context , LDClient
3
3
from ldclient .integrations .test_data import TestData
4
- from ldclient .testing .builders import *
5
4
6
- from ldai .client import AIConfig , LDAIClient , LDMessage
5
+ from ldai .client import AIConfig , ModelConfig , LDAIClient , LDMessage
7
6
from ldai .tracker import LDAIConfigTracker
8
7
9
8
@@ -14,7 +13,7 @@ def td() -> TestData:
14
13
td .flag ('model-config' )
15
14
.variations (
16
15
{
17
- 'model' : {'modelId' : 'fakeModel' },
16
+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.5 , 'maxTokens' : 4096 },
18
17
'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{name}}!' }],
19
18
'_ldMeta' : {'enabled' : True , 'versionKey' : 'abcd' },
20
19
},
@@ -27,7 +26,7 @@ def td() -> TestData:
27
26
td .flag ('multiple-prompt' )
28
27
.variations (
29
28
{
30
- 'model' : {'modelId' : 'fakeModel' },
29
+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.7 , 'maxTokens' : 8192 },
31
30
'prompt' : [
32
31
{'role' : 'system' , 'content' : 'Hello, {{name}}!' },
33
32
{'role' : 'user' , 'content' : 'The day is, {{day}}!' },
@@ -43,7 +42,7 @@ def td() -> TestData:
43
42
td .flag ('ctx-interpolation' )
44
43
.variations (
45
44
{
46
- 'model' : {'modelId' : 'fakeModel' },
45
+ 'model' : {'modelId' : 'fakeModel' , 'extra-attribute' : 'I can be anything I set my mind/type to' },
47
46
'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{ldctx.name}}!' }],
48
47
'_ldMeta' : {'enabled' : True , 'versionKey' : 'abcd' },
49
48
}
@@ -55,7 +54,7 @@ def td() -> TestData:
55
54
td .flag ('off-config' )
56
55
.variations (
57
56
{
58
- 'model' : {'modelId' : 'fakeModel' },
57
+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.1 },
59
58
'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{name}}!' }],
60
59
'_ldMeta' : {'enabled' : False , 'versionKey' : 'abcd' },
61
60
}
@@ -82,12 +81,26 @@ def ldai_client(client: LDClient) -> LDAIClient:
82
81
return LDAIClient (client )
83
82
84
83
84
+ def test_model_config_delegates_to_properties ():
85
+ model = ModelConfig ('fakeModel' , temperature = 0.5 , max_tokens = 4096 , attributes = {'extra-attribute' : 'value' })
86
+ assert model .id == 'fakeModel'
87
+ assert model .temperature == 0.5
88
+ assert model .max_tokens == 4096
89
+ assert model .get_attribute ('extra-attribute' ) == 'value'
90
+ assert model .get_attribute ('non-existent' ) is None
91
+
92
+ assert model .id == model .get_attribute ('id' )
93
+ assert model .temperature == model .get_attribute ('temperature' )
94
+ assert model .max_tokens == model .get_attribute ('maxTokens' )
95
+ assert model .max_tokens != model .get_attribute ('max_tokens' )
96
+
97
+
85
98
def test_model_config_interpolation (ldai_client : LDAIClient , tracker ):
86
99
context = Context .create ('user-key' )
87
100
default_value = AIConfig (
88
101
tracker = tracker ,
89
102
enabled = True ,
90
- model = { 'modelId' : ' fakeModel'} ,
103
+ model = ModelConfig ( ' fakeModel') ,
91
104
prompt = [LDMessage (role = 'system' , content = 'Hello, {{name}}!' )],
92
105
)
93
106
variables = {'name' : 'World' }
@@ -99,10 +112,14 @@ def test_model_config_interpolation(ldai_client: LDAIClient, tracker):
99
112
assert config .prompt [0 ].content == 'Hello, World!'
100
113
assert config .enabled is True
101
114
115
+ assert config .model .id == 'fakeModel'
116
+ assert config .model .temperature == 0.5
117
+ assert config .model .max_tokens == 4096
118
+
102
119
103
120
def test_model_config_no_variables (ldai_client : LDAIClient , tracker ):
104
121
context = Context .create ('user-key' )
105
- default_value = AIConfig (tracker = tracker , enabled = True , model = {} , prompt = [])
122
+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ( 'fake-model' ) , prompt = [])
106
123
107
124
config = ldai_client .model_config ('model-config' , context , default_value , {})
108
125
@@ -111,10 +128,14 @@ def test_model_config_no_variables(ldai_client: LDAIClient, tracker):
111
128
assert config .prompt [0 ].content == 'Hello, !'
112
129
assert config .enabled is True
113
130
131
+ assert config .model .id == 'fakeModel'
132
+ assert config .model .temperature == 0.5
133
+ assert config .model .max_tokens == 4096
134
+
114
135
115
136
def test_context_interpolation (ldai_client : LDAIClient , tracker ):
116
137
context = Context .builder ('user-key' ).name ("Sandy" ).build ()
117
- default_value = AIConfig (tracker = tracker , enabled = True , model = {} , prompt = [])
138
+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ( 'fake-model' ) , prompt = [])
118
139
variables = {'name' : 'World' }
119
140
120
141
config = ldai_client .model_config (
@@ -126,10 +147,15 @@ def test_context_interpolation(ldai_client: LDAIClient, tracker):
126
147
assert config .prompt [0 ].content == 'Hello, Sandy!'
127
148
assert config .enabled is True
128
149
150
+ assert config .model .id == 'fakeModel'
151
+ assert config .model .temperature is None
152
+ assert config .model .max_tokens is None
153
+ assert config .model .get_attribute ('extra-attribute' ) == 'I can be anything I set my mind/type to'
154
+
129
155
130
156
def test_model_config_multiple (ldai_client : LDAIClient , tracker ):
131
157
context = Context .create ('user-key' )
132
- default_value = AIConfig (tracker = tracker , enabled = True , model = {} , prompt = [])
158
+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ( 'fake-model' ) , prompt = [])
133
159
variables = {'name' : 'World' , 'day' : 'Monday' }
134
160
135
161
config = ldai_client .model_config (
@@ -142,11 +168,18 @@ def test_model_config_multiple(ldai_client: LDAIClient, tracker):
142
168
assert config .prompt [1 ].content == 'The day is, Monday!'
143
169
assert config .enabled is True
144
170
171
+ assert config .model .id == 'fakeModel'
172
+ assert config .model .temperature == 0.7
173
+ assert config .model .max_tokens == 8192
174
+
145
175
146
176
def test_model_config_disabled (ldai_client : LDAIClient , tracker ):
147
177
context = Context .create ('user-key' )
148
- default_value = AIConfig (tracker = tracker , enabled = False , model = {} , prompt = [])
178
+ default_value = AIConfig (tracker = tracker , enabled = False , model = ModelConfig ( 'fake-model' ) , prompt = [])
149
179
150
180
config = ldai_client .model_config ('off-config' , context , default_value , {})
151
181
152
182
assert config .enabled is False
183
+ assert config .model .id == 'fakeModel'
184
+ assert config .model .temperature == 0.1
185
+ assert config .model .max_tokens is None
0 commit comments