Skip to content

Commit c15ae29

Browse files
committed
test(server): LLM provider tests failing due to mocks
1 parent 50501ae commit c15ae29

File tree

2 files changed

+15
-118
lines changed

2 files changed

+15
-118
lines changed

apps/server/src/services/llm/providers/anthropic_service.spec.ts

Lines changed: 7 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -31,49 +31,7 @@ vi.mock('./providers.js', () => ({
3131
}));
3232

3333
vi.mock('@anthropic-ai/sdk', () => {
34-
const mockStream = {
35-
[Symbol.asyncIterator]: async function* () {
36-
yield {
37-
type: 'content_block_delta',
38-
delta: { text: 'Hello' }
39-
};
40-
yield {
41-
type: 'content_block_delta',
42-
delta: { text: ' world' }
43-
};
44-
yield {
45-
type: 'message_delta',
46-
delta: { stop_reason: 'end_turn' }
47-
};
48-
}
49-
};
50-
51-
class MockAnthropic {
52-
messages = {
53-
create: vi.fn().mockImplementation((params) => {
54-
if (params.stream) {
55-
return Promise.resolve(mockStream);
56-
}
57-
return Promise.resolve({
58-
id: 'msg_123',
59-
type: 'message',
60-
role: 'assistant',
61-
content: [{
62-
type: 'text',
63-
text: 'Hello! How can I help you today?'
64-
}],
65-
model: 'claude-3-opus-20240229',
66-
stop_reason: 'end_turn',
67-
stop_sequence: null,
68-
usage: {
69-
input_tokens: 10,
70-
output_tokens: 25
71-
}
72-
});
73-
})
74-
};
75-
}
76-
34+
const MockAnthropic = vi.fn();
7735
return { default: MockAnthropic };
7836
});
7937

@@ -85,7 +43,6 @@ describe('AnthropicService', () => {
8543
vi.clearAllMocks();
8644

8745
// Get the mocked Anthropic instance before creating the service
88-
const AnthropicMock = vi.mocked(Anthropic);
8946
mockAnthropicInstance = {
9047
messages: {
9148
create: vi.fn().mockImplementation((params) => {
@@ -127,8 +84,8 @@ describe('AnthropicService', () => {
12784
}
12885
};
12986

130-
AnthropicMock.mockImplementation(function () {
131-
Object.assign(this, mockAnthropicInstance);
87+
(Anthropic as any).mockImplementation(function(this: any) {
88+
return mockAnthropicInstance;
13289
});
13390

13491
service = new AnthropicService();
@@ -355,14 +312,13 @@ describe('AnthropicService', () => {
355312
vi.mocked(providers.getAnthropicOptions).mockReturnValueOnce(mockOptions);
356313

357314
// Spy on Anthropic constructor
358-
const AnthropicMock = vi.mocked(Anthropic);
359-
AnthropicMock.mockClear();
315+
(Anthropic as any).mockClear();
360316

361317
// Create new service to trigger client creation
362318
const newService = new AnthropicService();
363319
await newService.generateChatCompletion(messages);
364320

365-
expect(AnthropicMock).toHaveBeenCalledWith({
321+
expect(Anthropic).toHaveBeenCalledWith({
366322
apiKey: 'test-key',
367323
baseURL: 'https://api.anthropic.com',
368324
defaultHeaders: {
@@ -382,14 +338,13 @@ describe('AnthropicService', () => {
382338
vi.mocked(providers.getAnthropicOptions).mockReturnValueOnce(mockOptions);
383339

384340
// Spy on Anthropic constructor
385-
const AnthropicMock = vi.mocked(Anthropic);
386-
AnthropicMock.mockClear();
341+
(Anthropic as any).mockClear();
387342

388343
// Create new service to trigger client creation
389344
const newService = new AnthropicService();
390345
await newService.generateChatCompletion(messages);
391346

392-
expect(AnthropicMock).toHaveBeenCalledWith({
347+
expect(Anthropic).toHaveBeenCalledWith({
393348
apiKey: 'test-key',
394349
baseURL: 'https://api.anthropic.com',
395350
defaultHeaders: {

apps/server/src/services/llm/providers/ollama_service.spec.ts

Lines changed: 8 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -65,63 +65,7 @@ vi.mock('./stream_handler.js', () => ({
6565
}));
6666

6767
vi.mock('ollama', () => {
68-
const mockStream = {
69-
[Symbol.asyncIterator]: async function* () {
70-
yield {
71-
message: {
72-
role: 'assistant',
73-
content: 'Hello'
74-
},
75-
done: false
76-
};
77-
yield {
78-
message: {
79-
role: 'assistant',
80-
content: ' world'
81-
},
82-
done: true
83-
};
84-
}
85-
};
86-
87-
class MockOllama {
88-
chat = vi.fn().mockImplementation((params) => {
89-
if (params.stream) {
90-
return Promise.resolve(mockStream);
91-
}
92-
return Promise.resolve({
93-
message: {
94-
role: 'assistant',
95-
content: 'Hello! How can I help you today?'
96-
},
97-
created_at: '2024-01-01T00:00:00Z',
98-
model: 'llama2',
99-
done: true
100-
});
101-
});
102-
show = vi.fn().mockResolvedValue({
103-
modelfile: 'FROM llama2',
104-
parameters: {},
105-
template: '',
106-
details: {
107-
format: 'gguf',
108-
family: 'llama',
109-
families: ['llama'],
110-
parameter_size: '7B',
111-
quantization_level: 'Q4_0'
112-
}
113-
});
114-
list = vi.fn().mockResolvedValue({
115-
models: [
116-
{
117-
name: 'llama2:latest',
118-
modified_at: '2024-01-01T00:00:00Z',
119-
size: 3800000000
120-
}
121-
]
122-
});
123-
}
124-
68+
const MockOllama = vi.fn();
12569
return { Ollama: MockOllama };
12670
});
12771

@@ -141,7 +85,6 @@ describe('OllamaService', () => {
14185
vi.clearAllMocks();
14286

14387
// Create the mock instance before creating the service
144-
const OllamaMock = vi.mocked(Ollama);
14588
mockOllamaInstance = {
14689
chat: vi.fn().mockImplementation((params) => {
14790
if (params.stream) {
@@ -197,8 +140,9 @@ describe('OllamaService', () => {
197140
})
198141
};
199142

200-
OllamaMock.mockImplementation(function () {
201-
Object.assign(this, mockOllamaInstance);
143+
// Mock the Ollama constructor to return our mock instance
144+
(Ollama as any).mockImplementation(function(this: any) {
145+
return mockOllamaInstance;
202146
});
203147

204148
service = new OllamaService();
@@ -401,8 +345,7 @@ describe('OllamaService', () => {
401345
vi.mocked(providers.getOllamaOptions).mockResolvedValueOnce(mockOptions);
402346

403347
// Spy on Ollama constructor
404-
const OllamaMock = vi.mocked(Ollama);
405-
OllamaMock.mockClear();
348+
(Ollama as any).mockClear();
406349

407350
// Create new service to trigger client creation
408351
const newService = new OllamaService();
@@ -416,7 +359,7 @@ describe('OllamaService', () => {
416359

417360
await newService.generateChatCompletion(messages);
418361

419-
expect(OllamaMock).toHaveBeenCalledWith({
362+
expect(Ollama).toHaveBeenCalledWith({
420363
host: 'http://localhost:11434',
421364
fetch: expect.any(Function)
422365
});
@@ -576,15 +519,14 @@ describe('OllamaService', () => {
576519
};
577520
vi.mocked(providers.getOllamaOptions).mockResolvedValue(mockOptions);
578521

579-
const OllamaMock = vi.mocked(Ollama);
580-
OllamaMock.mockClear();
522+
(Ollama as any).mockClear();
581523

582524
// Make two calls
583525
await service.generateChatCompletion([{ role: 'user', content: 'Hello' }]);
584526
await service.generateChatCompletion([{ role: 'user', content: 'Hi' }]);
585527

586528
// Should only create client once
587-
expect(OllamaMock).toHaveBeenCalledTimes(1);
529+
expect(Ollama).toHaveBeenCalledTimes(1);
588530
});
589531
});
590532
});

0 commit comments

Comments
 (0)