1- import logging
21from typing import Any , Dict , List , Optional , Tuple
32
43import chevron
54from ldclient import Context
65from ldclient .client import LDClient
76
7+ from ldai import log
88from ldai .chat import Chat
99from ldai .judge import Judge
1010from ldai .models import (AIAgentConfig , AIAgentConfigDefault ,
1111 AIAgentConfigRequest , AIAgents , AICompletionConfig ,
1212 AICompletionConfigDefault , AIJudgeConfig ,
1313 AIJudgeConfigDefault , JudgeConfiguration , LDMessage ,
1414 ModelConfig , ProviderConfig )
15- from ldai .providers .ai_provider_factory import (AIProviderFactory ,
16- SupportedAIProvider )
15+ from ldai .providers .ai_provider_factory import AIProviderFactory
1716from ldai .tracker import LDAIConfigTracker
1817
1918
@@ -22,7 +21,6 @@ class LDAIClient:
2221
2322 def __init__ (self , client : LDClient ):
2423 self ._client = client
25- self ._logger = logging .getLogger ('ldclient.ai' )
2624
2725 def completion_config (
2826 self ,
@@ -122,7 +120,7 @@ async def create_judge(
122120 context : Context ,
123121 default_value : AIJudgeConfigDefault ,
124122 variables : Optional [Dict [str , Any ]] = None ,
125- default_ai_provider : Optional [SupportedAIProvider ] = None ,
123+ default_ai_provider : Optional [str ] = None ,
126124 ) -> Optional [Judge ]:
127125 """
128126 Creates and returns a new Judge instance for AI evaluation.
@@ -180,11 +178,11 @@ async def create_judge(
180178 return None
181179
182180 # Create AI provider for the judge
183- provider = await AIProviderFactory .create (judge_config , self . _logger , default_ai_provider )
181+ provider = await AIProviderFactory .create (judge_config , default_ai_provider )
184182 if not provider :
185183 return None
186184
187- return Judge (judge_config , judge_config .tracker , provider , self . _logger )
185+ return Judge (judge_config , judge_config .tracker , provider )
188186 except Exception as error :
189187 # Would log error if logger available
190188 return None
@@ -194,7 +192,7 @@ async def _initialize_judges(
194192 judge_configs : List [JudgeConfiguration .Judge ],
195193 context : Context ,
196194 variables : Optional [Dict [str , Any ]] = None ,
197- default_ai_provider : Optional [SupportedAIProvider ] = None ,
195+ default_ai_provider : Optional [str ] = None ,
198196 ) -> Dict [str , Judge ]:
199197 """
200198 Initialize judges from judge configurations.
@@ -240,7 +238,7 @@ async def create_chat(
240238 context : Context ,
241239 default_value : AICompletionConfigDefault ,
242240 variables : Optional [Dict [str , Any ]] = None ,
243- default_ai_provider : Optional [SupportedAIProvider ] = None ,
241+ default_ai_provider : Optional [str ] = None ,
244242 ) -> Optional [Chat ]:
245243 """
246244 Creates and returns a new Chat instance for AI conversations.
@@ -275,15 +273,14 @@ async def create_chat(
275273 print(f"Conversation has {len(messages)} messages")
276274 """
277275 self ._client .track ('$ld:ai:config:function:createChat' , context , key , 1 )
278- if self ._logger :
279- self ._logger .debug (f"Creating chat for key: { key } " )
276+ log .debug (f"Creating chat for key: { key } " )
280277 config = self .completion_config (key , context , default_value , variables )
281278
282279 if not config .enabled or not config .tracker :
283280 # Would log info if logger available
284281 return None
285282
286- provider = await AIProviderFactory .create (config , self . _logger , default_ai_provider )
283+ provider = await AIProviderFactory .create (config , default_ai_provider )
287284 if not provider :
288285 return None
289286
@@ -296,7 +293,7 @@ async def create_chat(
296293 default_ai_provider ,
297294 )
298295
299- return Chat (config , config .tracker , provider , judges , self . _logger )
296+ return Chat (config , config .tracker , provider , judges )
300297
301298 def agent_config (
302299 self ,
0 commit comments