11import pytest
22from ldclient import Config , Context , LDClient
33from ldclient .integrations .test_data import TestData
4- from ldclient .testing .builders import *
54
6- from ldai .client import AIConfig , LDAIClient , LDMessage
5+ from ldai .client import AIConfig , ModelConfig , LDAIClient , LDMessage
76from ldai .tracker import LDAIConfigTracker
87
98
@@ -14,7 +13,7 @@ def td() -> TestData:
1413 td .flag ('model-config' )
1514 .variations (
1615 {
17- 'model' : {'modelId' : 'fakeModel' },
16+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.5 , 'maxTokens' : 4096 },
1817 'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{name}}!' }],
1918 '_ldMeta' : {'enabled' : True , 'versionKey' : 'abcd' },
2019 },
@@ -27,7 +26,7 @@ def td() -> TestData:
2726 td .flag ('multiple-prompt' )
2827 .variations (
2928 {
30- 'model' : {'modelId' : 'fakeModel' },
29+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.7 , 'maxTokens' : 8192 },
3130 'prompt' : [
3231 {'role' : 'system' , 'content' : 'Hello, {{name}}!' },
3332 {'role' : 'user' , 'content' : 'The day is, {{day}}!' },
@@ -43,7 +42,7 @@ def td() -> TestData:
4342 td .flag ('ctx-interpolation' )
4443 .variations (
4544 {
46- 'model' : {'modelId' : 'fakeModel' },
45+ 'model' : {'modelId' : 'fakeModel' , 'extra-attribute' : 'I can be anything I set my mind/type to' },
4746 'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{ldctx.name}}!' }],
4847 '_ldMeta' : {'enabled' : True , 'versionKey' : 'abcd' },
4948 }
@@ -55,7 +54,7 @@ def td() -> TestData:
5554 td .flag ('off-config' )
5655 .variations (
5756 {
58- 'model' : {'modelId' : 'fakeModel' },
57+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.1 },
5958 'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{name}}!' }],
6059 '_ldMeta' : {'enabled' : False , 'versionKey' : 'abcd' },
6160 }
@@ -82,12 +81,26 @@ def ldai_client(client: LDClient) -> LDAIClient:
8281 return LDAIClient (client )
8382
8483
84+ def test_model_config_delegates_to_properties ():
85+ model = ModelConfig ('fakeModel' , temperature = 0.5 , max_tokens = 4096 , attributes = {'extra-attribute' : 'value' })
86+ assert model .id == 'fakeModel'
87+ assert model .temperature == 0.5
88+ assert model .max_tokens == 4096
89+ assert model .get_attribute ('extra-attribute' ) == 'value'
90+ assert model .get_attribute ('non-existent' ) is None
91+
92+ assert model .id == model .get_attribute ('id' )
93+ assert model .temperature == model .get_attribute ('temperature' )
94+ assert model .max_tokens == model .get_attribute ('maxTokens' )
95+ assert model .max_tokens != model .get_attribute ('max_tokens' )
96+
97+
8598def test_model_config_interpolation (ldai_client : LDAIClient , tracker ):
8699 context = Context .create ('user-key' )
87100 default_value = AIConfig (
88101 tracker = tracker ,
89102 enabled = True ,
90- model = { 'modelId' : ' fakeModel'} ,
103+ model = ModelConfig ( ' fakeModel') ,
91104 prompt = [LDMessage (role = 'system' , content = 'Hello, {{name}}!' )],
92105 )
93106 variables = {'name' : 'World' }
@@ -99,10 +112,14 @@ def test_model_config_interpolation(ldai_client: LDAIClient, tracker):
99112 assert config .prompt [0 ].content == 'Hello, World!'
100113 assert config .enabled is True
101114
115+ assert config .model .id == 'fakeModel'
116+ assert config .model .temperature == 0.5
117+ assert config .model .max_tokens == 4096
118+
102119
103120def test_model_config_no_variables (ldai_client : LDAIClient , tracker ):
104121 context = Context .create ('user-key' )
105- default_value = AIConfig (tracker = tracker , enabled = True , model = {} , prompt = [])
122+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ( 'fake-model' ) , prompt = [])
106123
107124 config = ldai_client .model_config ('model-config' , context , default_value , {})
108125
@@ -111,10 +128,14 @@ def test_model_config_no_variables(ldai_client: LDAIClient, tracker):
111128 assert config .prompt [0 ].content == 'Hello, !'
112129 assert config .enabled is True
113130
131+ assert config .model .id == 'fakeModel'
132+ assert config .model .temperature == 0.5
133+ assert config .model .max_tokens == 4096
134+
114135
115136def test_context_interpolation (ldai_client : LDAIClient , tracker ):
116137 context = Context .builder ('user-key' ).name ("Sandy" ).build ()
117- default_value = AIConfig (tracker = tracker , enabled = True , model = {} , prompt = [])
138+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ( 'fake-model' ) , prompt = [])
118139 variables = {'name' : 'World' }
119140
120141 config = ldai_client .model_config (
@@ -126,10 +147,15 @@ def test_context_interpolation(ldai_client: LDAIClient, tracker):
126147 assert config .prompt [0 ].content == 'Hello, Sandy!'
127148 assert config .enabled is True
128149
150+ assert config .model .id == 'fakeModel'
151+ assert config .model .temperature is None
152+ assert config .model .max_tokens is None
153+ assert config .model .get_attribute ('extra-attribute' ) == 'I can be anything I set my mind/type to'
154+
129155
130156def test_model_config_multiple (ldai_client : LDAIClient , tracker ):
131157 context = Context .create ('user-key' )
132- default_value = AIConfig (tracker = tracker , enabled = True , model = {} , prompt = [])
158+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ( 'fake-model' ) , prompt = [])
133159 variables = {'name' : 'World' , 'day' : 'Monday' }
134160
135161 config = ldai_client .model_config (
@@ -142,11 +168,18 @@ def test_model_config_multiple(ldai_client: LDAIClient, tracker):
142168 assert config .prompt [1 ].content == 'The day is, Monday!'
143169 assert config .enabled is True
144170
171+ assert config .model .id == 'fakeModel'
172+ assert config .model .temperature == 0.7
173+ assert config .model .max_tokens == 8192
174+
145175
146176def test_model_config_disabled (ldai_client : LDAIClient , tracker ):
147177 context = Context .create ('user-key' )
148- default_value = AIConfig (tracker = tracker , enabled = False , model = {} , prompt = [])
178+ default_value = AIConfig (tracker = tracker , enabled = False , model = ModelConfig ( 'fake-model' ) , prompt = [])
149179
150180 config = ldai_client .model_config ('off-config' , context , default_value , {})
151181
152182 assert config .enabled is False
183+ assert config .model .id == 'fakeModel'
184+ assert config .model .temperature == 0.1
185+ assert config .model .max_tokens is None
0 commit comments