Skip to content

Commit fd4cb37

Browse files
authored
fix!: Use client provided logger and remove optional parameter (#81)
1 parent 23bebb2 commit fd4cb37

File tree

3 files changed

+25
-47
lines changed

3 files changed

+25
-47
lines changed

packages/ai-providers/server-ai-langchain/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ packages = [{ include = "ldai_langchain", from = "src" }]
2424

2525
[tool.poetry.dependencies]
2626
python = ">=3.9,<4"
27-
launchdarkly-server-sdk-ai = ">=0.11.0"
27+
launchdarkly-server-sdk-ai = ">=0.12.0"
2828
langchain-core = ">=0.2.0"
2929
langchain = ">=0.2.0"
3030

packages/ai-providers/server-ai-langchain/src/ldai_langchain/langchain_provider.py

Lines changed: 14 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from langchain_core.language_models.chat_models import BaseChatModel
66
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage
7-
from ldai import LDMessage
7+
from ldai import LDMessage, log
88
from ldai.models import AIConfigKind
99
from ldai.providers import AIProvider
1010
from ldai.providers.types import ChatResponse, LDAIMetrics, StructuredResponse
@@ -18,27 +18,24 @@ class LangChainProvider(AIProvider):
1818
This provider integrates LangChain models with LaunchDarkly's tracking capabilities.
1919
"""
2020

21-
def __init__(self, llm: BaseChatModel, logger: Optional[Any] = None):
21+
def __init__(self, llm: BaseChatModel):
2222
"""
2323
Initialize the LangChain provider.
2424
2525
:param llm: A LangChain BaseChatModel instance
26-
:param logger: Optional logger for logging provider operations
2726
"""
28-
super().__init__(logger)
2927
self._llm = llm
3028

3129
@staticmethod
32-
async def create(ai_config: AIConfigKind, logger: Optional[Any] = None) -> 'LangChainProvider':
30+
async def create(ai_config: AIConfigKind) -> 'LangChainProvider':
3331
"""
3432
Static factory method to create a LangChain AIProvider from an AI configuration.
3533
3634
:param ai_config: The LaunchDarkly AI configuration
37-
:param logger: Optional logger for the provider
3835
:return: Configured LangChainProvider instance
3936
"""
4037
llm = LangChainProvider.create_langchain_model(ai_config)
41-
return LangChainProvider(llm, logger)
38+
return LangChainProvider(llm)
4239

4340
async def invoke_model(self, messages: List[LDMessage]) -> ChatResponse:
4441
"""
@@ -56,20 +53,18 @@ async def invoke_model(self, messages: List[LDMessage]) -> ChatResponse:
5653
if isinstance(response.content, str):
5754
content = response.content
5855
else:
59-
if self.logger:
60-
self.logger.warn(
61-
f'Multimodal response not supported, expecting a string. '
62-
f'Content type: {type(response.content)}, Content: {response.content}'
63-
)
56+
log.warning(
57+
f'Multimodal response not supported, expecting a string. '
58+
f'Content type: {type(response.content)}, Content: {response.content}'
59+
)
6460
metrics = LDAIMetrics(success=False, usage=metrics.usage)
6561

6662
return ChatResponse(
6763
message=LDMessage(role='assistant', content=content),
6864
metrics=metrics,
6965
)
7066
except Exception as error:
71-
if self.logger:
72-
self.logger.warn(f'LangChain model invocation failed: {error}')
67+
log.warning(f'LangChain model invocation failed: {error}')
7368

7469
return ChatResponse(
7570
message=LDMessage(role='assistant', content=''),
@@ -94,11 +89,10 @@ async def invoke_structured_model(
9489
response = await structured_llm.ainvoke(langchain_messages)
9590

9691
if not isinstance(response, dict):
97-
if self.logger:
98-
self.logger.warn(
99-
f'Structured output did not return a dict. '
100-
f'Got: {type(response)}'
101-
)
92+
log.warning(
93+
f'Structured output did not return a dict. '
94+
f'Got: {type(response)}'
95+
)
10296
return StructuredResponse(
10397
data={},
10498
raw_response='',
@@ -117,8 +111,7 @@ async def invoke_structured_model(
117111
),
118112
)
119113
except Exception as error:
120-
if self.logger:
121-
self.logger.warn(f'LangChain structured model invocation failed: {error}')
114+
log.warning(f'LangChain structured model invocation failed: {error}')
122115

123116
return StructuredResponse(
124117
data={},

packages/ai-providers/server-ai-langchain/tests/test_langchain_provider.py

Lines changed: 10 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -145,53 +145,45 @@ def mock_llm(self):
145145
"""Create a mock LLM."""
146146
return MagicMock()
147147

148-
@pytest.fixture
149-
def mock_logger(self):
150-
"""Create a mock logger."""
151-
return MagicMock()
152-
153148
@pytest.mark.asyncio
154-
async def test_returns_success_true_for_string_content(self, mock_llm, mock_logger):
149+
async def test_returns_success_true_for_string_content(self, mock_llm):
155150
"""Should return success=True for string content."""
156151
mock_response = AIMessage(content='Test response')
157152
mock_llm.ainvoke = AsyncMock(return_value=mock_response)
158-
provider = LangChainProvider(mock_llm, mock_logger)
153+
provider = LangChainProvider(mock_llm)
159154

160155
messages = [LDMessage(role='user', content='Hello')]
161156
result = await provider.invoke_model(messages)
162157

163158
assert result.metrics.success is True
164159
assert result.message.content == 'Test response'
165-
mock_logger.warn.assert_not_called()
166160

167161
@pytest.mark.asyncio
168-
async def test_returns_success_false_for_non_string_content_and_logs_warning(self, mock_llm, mock_logger):
162+
async def test_returns_success_false_for_non_string_content_and_logs_warning(self, mock_llm):
169163
"""Should return success=False for non-string content and log warning."""
170164
mock_response = AIMessage(content=[{'type': 'image', 'data': 'base64data'}])
171165
mock_llm.ainvoke = AsyncMock(return_value=mock_response)
172-
provider = LangChainProvider(mock_llm, mock_logger)
166+
provider = LangChainProvider(mock_llm)
173167

174168
messages = [LDMessage(role='user', content='Hello')]
175169
result = await provider.invoke_model(messages)
176170

177171
assert result.metrics.success is False
178172
assert result.message.content == ''
179-
mock_logger.warn.assert_called_once()
180173

181174
@pytest.mark.asyncio
182-
async def test_returns_success_false_when_model_invocation_throws_error(self, mock_llm, mock_logger):
175+
async def test_returns_success_false_when_model_invocation_throws_error(self, mock_llm):
183176
"""Should return success=False when model invocation throws an error."""
184177
error = Exception('Model invocation failed')
185178
mock_llm.ainvoke = AsyncMock(side_effect=error)
186-
provider = LangChainProvider(mock_llm, mock_logger)
179+
provider = LangChainProvider(mock_llm)
187180

188181
messages = [LDMessage(role='user', content='Hello')]
189182
result = await provider.invoke_model(messages)
190183

191184
assert result.metrics.success is False
192185
assert result.message.content == ''
193186
assert result.message.role == 'assistant'
194-
mock_logger.warn.assert_called()
195187

196188

197189
class TestInvokeStructuredModel:
@@ -202,36 +194,30 @@ def mock_llm(self):
202194
"""Create a mock LLM."""
203195
return MagicMock()
204196

205-
@pytest.fixture
206-
def mock_logger(self):
207-
"""Create a mock logger."""
208-
return MagicMock()
209-
210197
@pytest.mark.asyncio
211-
async def test_returns_success_true_for_successful_invocation(self, mock_llm, mock_logger):
198+
async def test_returns_success_true_for_successful_invocation(self, mock_llm):
212199
"""Should return success=True for successful invocation."""
213200
mock_response = {'result': 'structured data'}
214201
mock_structured_llm = MagicMock()
215202
mock_structured_llm.ainvoke = AsyncMock(return_value=mock_response)
216203
mock_llm.with_structured_output = MagicMock(return_value=mock_structured_llm)
217-
provider = LangChainProvider(mock_llm, mock_logger)
204+
provider = LangChainProvider(mock_llm)
218205

219206
messages = [LDMessage(role='user', content='Hello')]
220207
response_structure = {'type': 'object', 'properties': {}}
221208
result = await provider.invoke_structured_model(messages, response_structure)
222209

223210
assert result.metrics.success is True
224211
assert result.data == mock_response
225-
mock_logger.warn.assert_not_called()
226212

227213
@pytest.mark.asyncio
228-
async def test_returns_success_false_when_structured_model_invocation_throws_error(self, mock_llm, mock_logger):
214+
async def test_returns_success_false_when_structured_model_invocation_throws_error(self, mock_llm):
229215
"""Should return success=False when structured model invocation throws an error."""
230216
error = Exception('Structured invocation failed')
231217
mock_structured_llm = MagicMock()
232218
mock_structured_llm.ainvoke = AsyncMock(side_effect=error)
233219
mock_llm.with_structured_output = MagicMock(return_value=mock_structured_llm)
234-
provider = LangChainProvider(mock_llm, mock_logger)
220+
provider = LangChainProvider(mock_llm)
235221

236222
messages = [LDMessage(role='user', content='Hello')]
237223
response_structure = {'type': 'object', 'properties': {}}
@@ -242,7 +228,6 @@ async def test_returns_success_false_when_structured_model_invocation_throws_err
242228
assert result.raw_response == ''
243229
assert result.metrics.usage is not None
244230
assert result.metrics.usage.total == 0
245-
mock_logger.warn.assert_called()
246231

247232

248233
class TestGetChatModel:

0 commit comments

Comments
 (0)