11import pytest
22from ldclient import Config , Context , LDClient
33from ldclient .integrations .test_data import TestData
4- from ldclient .testing .builders import *
54
6- from ldai .client import AIConfig , AIConfigData , LDAIClient , LDMessage
5+ from ldai .client import AIConfig , LDAIClient , LDMessage , ModelConfig
76from ldai .tracker import LDAIConfigTracker
87
98
@@ -14,7 +13,7 @@ def td() -> TestData:
1413 td .flag ('model-config' )
1514 .variations (
1615 {
17- 'model' : {'modelId' : 'fakeModel' },
16+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.5 , 'maxTokens' : 4096 },
1817 'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{name}}!' }],
1918 '_ldMeta' : {'enabled' : True , 'versionKey' : 'abcd' },
2019 },
@@ -27,7 +26,7 @@ def td() -> TestData:
2726 td .flag ('multiple-prompt' )
2827 .variations (
2928 {
30- 'model' : {'modelId' : 'fakeModel' },
29+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.7 , 'maxTokens' : 8192 },
3130 'prompt' : [
3231 {'role' : 'system' , 'content' : 'Hello, {{name}}!' },
3332 {'role' : 'user' , 'content' : 'The day is, {{day}}!' },
@@ -43,7 +42,7 @@ def td() -> TestData:
4342 td .flag ('ctx-interpolation' )
4443 .variations (
4544 {
46- 'model' : {'modelId' : 'fakeModel' },
45+ 'model' : {'modelId' : 'fakeModel' , 'extra-attribute' : 'I can be anything I set my mind/type to' },
4746 'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{ldctx.name}}!' }],
4847 '_ldMeta' : {'enabled' : True , 'versionKey' : 'abcd' },
4948 }
@@ -55,7 +54,7 @@ def td() -> TestData:
5554 td .flag ('off-config' )
5655 .variations (
5756 {
58- 'model' : {'modelId' : 'fakeModel' },
57+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.1 },
5958 'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{name}}!' }],
6059 '_ldMeta' : {'enabled' : False , 'versionKey' : 'abcd' },
6160 }
@@ -82,81 +81,110 @@ def ldai_client(client: LDClient) -> LDAIClient:
8281 return LDAIClient (client )
8382
8483
84+ def test_model_config_delegates_to_properties ():
85+ model = ModelConfig ('fakeModel' , temperature = 0.5 , max_tokens = 4096 , attributes = {'extra-attribute' : 'value' })
86+ assert model .id == 'fakeModel'
87+ assert model .temperature == 0.5
88+ assert model .max_tokens == 4096
89+ assert model .get_attribute ('extra-attribute' ) == 'value'
90+ assert model .get_attribute ('non-existent' ) is None
91+
92+ assert model .id == model .get_attribute ('id' )
93+ assert model .temperature == model .get_attribute ('temperature' )
94+ assert model .max_tokens == model .get_attribute ('maxTokens' )
95+ assert model .max_tokens != model .get_attribute ('max_tokens' )
96+
97+
8598def test_model_config_interpolation (ldai_client : LDAIClient , tracker ):
8699 context = Context .create ('user-key' )
87100 default_value = AIConfig (
88- config = AIConfigData (
89- model = {'modelId' : 'fakeModel' },
90- prompt = [LDMessage (role = 'system' , content = 'Hello, {{name}}!' )],
91- ),
92101 tracker = tracker ,
93102 enabled = True ,
103+ model = ModelConfig ('fakeModel' ),
104+ prompt = [LDMessage (role = 'system' , content = 'Hello, {{name}}!' )],
94105 )
95106 variables = {'name' : 'World' }
96107
97108 config = ldai_client .model_config ('model-config' , context , default_value , variables )
98109
99- assert config .config . prompt is not None
100- assert len (config .config . prompt ) > 0
101- assert config .config . prompt [0 ].content == 'Hello, World!'
110+ assert config .prompt is not None
111+ assert len (config .prompt ) > 0
112+ assert config .prompt [0 ].content == 'Hello, World!'
102113 assert config .enabled is True
103114
115+ assert config .model is not None
116+ assert config .model .id == 'fakeModel'
117+ assert config .model .temperature == 0.5
118+ assert config .model .max_tokens == 4096
119+
104120
105121def test_model_config_no_variables (ldai_client : LDAIClient , tracker ):
106122 context = Context .create ('user-key' )
107- default_value = AIConfig (
108- config = AIConfigData (model = {}, prompt = []), tracker = tracker , enabled = True
109- )
123+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ('fake-model' ), prompt = [])
110124
111125 config = ldai_client .model_config ('model-config' , context , default_value , {})
112126
113- assert config .config . prompt is not None
114- assert len (config .config . prompt ) > 0
115- assert config .config . prompt [0 ].content == 'Hello, !'
127+ assert config .prompt is not None
128+ assert len (config .prompt ) > 0
129+ assert config .prompt [0 ].content == 'Hello, !'
116130 assert config .enabled is True
117131
132+ assert config .model is not None
133+ assert config .model .id == 'fakeModel'
134+ assert config .model .temperature == 0.5
135+ assert config .model .max_tokens == 4096
136+
118137
119138def test_context_interpolation (ldai_client : LDAIClient , tracker ):
120139 context = Context .builder ('user-key' ).name ("Sandy" ).build ()
121- default_value = AIConfig (
122- config = AIConfigData (model = {}, prompt = []), tracker = tracker , enabled = True
123- )
140+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ('fake-model' ), prompt = [])
124141 variables = {'name' : 'World' }
125142
126143 config = ldai_client .model_config (
127144 'ctx-interpolation' , context , default_value , variables
128145 )
129146
130- assert config .config . prompt is not None
131- assert len (config .config . prompt ) > 0
132- assert config .config . prompt [0 ].content == 'Hello, Sandy!'
147+ assert config .prompt is not None
148+ assert len (config .prompt ) > 0
149+ assert config .prompt [0 ].content == 'Hello, Sandy!'
133150 assert config .enabled is True
134151
152+ assert config .model is not None
153+ assert config .model .id == 'fakeModel'
154+ assert config .model .temperature is None
155+ assert config .model .max_tokens is None
156+ assert config .model .get_attribute ('extra-attribute' ) == 'I can be anything I set my mind/type to'
157+
135158
136159def test_model_config_multiple (ldai_client : LDAIClient , tracker ):
137160 context = Context .create ('user-key' )
138- default_value = AIConfig (
139- config = AIConfigData (model = {}, prompt = []), tracker = tracker , enabled = True
140- )
161+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ('fake-model' ), prompt = [])
141162 variables = {'name' : 'World' , 'day' : 'Monday' }
142163
143164 config = ldai_client .model_config (
144165 'multiple-prompt' , context , default_value , variables
145166 )
146167
147- assert config .config . prompt is not None
148- assert len (config .config . prompt ) > 0
149- assert config .config . prompt [0 ].content == 'Hello, World!'
150- assert config .config . prompt [1 ].content == 'The day is, Monday!'
168+ assert config .prompt is not None
169+ assert len (config .prompt ) > 0
170+ assert config .prompt [0 ].content == 'Hello, World!'
171+ assert config .prompt [1 ].content == 'The day is, Monday!'
151172 assert config .enabled is True
152173
174+ assert config .model is not None
175+ assert config .model .id == 'fakeModel'
176+ assert config .model .temperature == 0.7
177+ assert config .model .max_tokens == 8192
178+
153179
154180def test_model_config_disabled (ldai_client : LDAIClient , tracker ):
155181 context = Context .create ('user-key' )
156- default_value = AIConfig (
157- config = AIConfigData (model = {}, prompt = []), tracker = tracker , enabled = False
158- )
182+ default_value = AIConfig (tracker = tracker , enabled = False , model = ModelConfig ('fake-model' ), prompt = [])
159183
160184 config = ldai_client .model_config ('off-config' , context , default_value , {})
161185
186+ assert config .model is not None
162187 assert config .enabled is False
188+ assert config .model .id == 'fakeModel'
189+ assert config .model .temperature == 0.1
190+ assert config .model .max_tokens is None
0 commit comments