Skip to content

Commit 99ea7bd

Browse files
committed
catch exceptions in langchain provider
1 parent 4b71d80 commit 99ea7bd

File tree

2 files changed

+144
-53
lines changed

2 files changed

+144
-53
lines changed

packages/ai-providers/server-ai-langchain/__tests__/LangChainProvider.test.ts

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -155,6 +155,66 @@ describe('LangChainProvider', () => {
155155
expect(result.message.content).toBe('');
156156
expect(mockLogger.warn).toHaveBeenCalledTimes(1);
157157
});
158+
159+
it('returns success=false when model invocation throws an error', async () => {
160+
const error = new Error('Model invocation failed');
161+
mockLLM.invoke.mockRejectedValue(error);
162+
163+
const messages = [{ role: 'user' as const, content: 'Hello' }];
164+
const result = await provider.invokeModel(messages);
165+
166+
expect(result.metrics.success).toBe(false);
167+
expect(result.message.content).toBe('');
168+
expect(result.message.role).toBe('assistant');
169+
expect(mockLogger.error).toHaveBeenCalledWith('LangChain model invocation failed:', error);
170+
});
171+
});
172+
173+
describe('invokeStructuredModel', () => {
174+
let mockLLM: any;
175+
let provider: LangChainProvider;
176+
177+
beforeEach(() => {
178+
mockLLM = {
179+
withStructuredOutput: jest.fn(),
180+
};
181+
provider = new LangChainProvider(mockLLM, mockLogger);
182+
jest.clearAllMocks();
183+
});
184+
185+
it('returns success=true for successful invocation', async () => {
186+
const mockResponse = { result: 'structured data' };
187+
const mockInvoke = jest.fn().mockResolvedValue(mockResponse);
188+
mockLLM.withStructuredOutput.mockReturnValue({ invoke: mockInvoke });
189+
190+
const messages = [{ role: 'user' as const, content: 'Hello' }];
191+
const responseStructure = { type: 'object', properties: {} };
192+
const result = await provider.invokeStructuredModel(messages, responseStructure);
193+
194+
expect(result.metrics.success).toBe(true);
195+
expect(result.data).toEqual(mockResponse);
196+
expect(result.rawResponse).toBe(JSON.stringify(mockResponse));
197+
expect(mockLogger.error).not.toHaveBeenCalled();
198+
});
199+
200+
it('returns success=false when structured model invocation throws an error', async () => {
201+
const error = new Error('Structured invocation failed');
202+
const mockInvoke = jest.fn().mockRejectedValue(error);
203+
mockLLM.withStructuredOutput.mockReturnValue({ invoke: mockInvoke });
204+
205+
const messages = [{ role: 'user' as const, content: 'Hello' }];
206+
const responseStructure = { type: 'object', properties: {} };
207+
const result = await provider.invokeStructuredModel(messages, responseStructure);
208+
209+
expect(result.metrics.success).toBe(false);
210+
expect(result.data).toEqual({});
211+
expect(result.rawResponse).toBe('');
212+
expect(result.metrics.usage).toEqual({ total: 0, input: 0, output: 0 });
213+
expect(mockLogger.error).toHaveBeenCalledWith(
214+
'LangChain structured model invocation failed:',
215+
error,
216+
);
217+
});
158218
});
159219

160220
describe('mapProvider', () => {

packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts

Lines changed: 84 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -45,39 +45,53 @@ export class LangChainProvider extends AIProvider {
4545
* Invoke the LangChain model with an array of messages.
4646
*/
4747
async invokeModel(messages: LDMessage[]): Promise<ChatResponse> {
48-
// Convert LDMessage[] to LangChain messages
49-
const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages);
50-
51-
// Get the LangChain response
52-
const response: AIMessage = await this._llm.invoke(langchainMessages);
53-
54-
// Generate metrics early (assumes success by default)
55-
const metrics = LangChainProvider.createAIMetrics(response);
56-
57-
// Extract text content from the response
58-
let content: string = '';
59-
if (typeof response.content === 'string') {
60-
content = response.content;
61-
} else {
62-
// Log warning for non-string content (likely multimodal)
63-
this.logger?.warn(
64-
`Multimodal response not supported, expecting a string. Content type: ${typeof response.content}, Content:`,
65-
JSON.stringify(response.content, null, 2),
66-
);
67-
// Update metrics to reflect content loss
68-
metrics.success = false;
69-
}
48+
try {
49+
// Convert LDMessage[] to LangChain messages
50+
const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages);
51+
52+
// Get the LangChain response
53+
const response: AIMessage = await this._llm.invoke(langchainMessages);
54+
55+
// Generate metrics early (assumes success by default)
56+
const metrics = LangChainProvider.createAIMetrics(response);
57+
58+
// Extract text content from the response
59+
let content: string = '';
60+
if (typeof response.content === 'string') {
61+
content = response.content;
62+
} else {
63+
// Log warning for non-string content (likely multimodal)
64+
this.logger?.warn(
65+
`Multimodal response not supported, expecting a string. Content type: ${typeof response.content}, Content:`,
66+
JSON.stringify(response.content, null, 2),
67+
);
68+
// Update metrics to reflect content loss
69+
metrics.success = false;
70+
}
7071

71-
// Create the assistant message
72-
const assistantMessage: LDMessage = {
73-
role: 'assistant',
74-
content,
75-
};
72+
// Create the assistant message
73+
const assistantMessage: LDMessage = {
74+
role: 'assistant',
75+
content,
76+
};
7677

77-
return {
78-
message: assistantMessage,
79-
metrics,
80-
};
78+
return {
79+
message: assistantMessage,
80+
metrics,
81+
};
82+
} catch (error) {
83+
this.logger?.warn('LangChain model invocation failed:', error);
84+
85+
return {
86+
message: {
87+
role: 'assistant',
88+
content: '',
89+
},
90+
metrics: {
91+
success: false,
92+
},
93+
};
94+
}
8195
}
8296

8397
/**
@@ -87,29 +101,46 @@ export class LangChainProvider extends AIProvider {
87101
messages: LDMessage[],
88102
responseStructure: Record<string, unknown>,
89103
): Promise<StructuredResponse> {
90-
// Convert LDMessage[] to LangChain messages
91-
const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages);
92-
93-
// Get the LangChain response
94-
const response = await this._llm
95-
.withStructuredOutput(responseStructure)
96-
.invoke(langchainMessages);
97-
98-
// Using structured output doesn't support metrics
99-
const metrics = {
100-
success: true,
101-
usage: {
102-
total: 0,
103-
input: 0,
104-
output: 0,
105-
},
106-
};
104+
try {
105+
// Convert LDMessage[] to LangChain messages
106+
const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages);
107+
108+
// Get the LangChain response
109+
const response = await this._llm
110+
.withStructuredOutput(responseStructure)
111+
.invoke(langchainMessages);
112+
113+
// Using structured output doesn't support metrics
114+
const metrics = {
115+
success: true,
116+
usage: {
117+
total: 0,
118+
input: 0,
119+
output: 0,
120+
},
121+
};
107122

108-
return {
109-
data: response,
110-
rawResponse: JSON.stringify(response),
111-
metrics,
112-
};
123+
return {
124+
data: response,
125+
rawResponse: JSON.stringify(response),
126+
metrics,
127+
};
128+
} catch (error) {
129+
this.logger?.warn('LangChain structured model invocation failed:', error);
130+
131+
return {
132+
data: {},
133+
rawResponse: '',
134+
metrics: {
135+
success: false,
136+
usage: {
137+
total: 0,
138+
input: 0,
139+
output: 0,
140+
},
141+
},
142+
};
143+
}
113144
}
114145

115146
/**

0 commit comments

Comments
 (0)