Skip to content

Commit 35f244c

Browse files
committed
chore(deps): adapt most of the tests
1 parent fa30bfc commit 35f244c

File tree

7 files changed

+77
-71
lines changed

7 files changed

+77
-71
lines changed

apps/server/src/routes/api/llm.spec.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -52,9 +52,9 @@ vi.mock("../../services/llm/ai_service_manager.js", () => ({
5252

5353
// Mock chat pipeline
5454
const mockChatPipelineExecute = vi.fn();
55-
const MockChatPipeline = vi.fn().mockImplementation(() => ({
56-
execute: mockChatPipelineExecute
57-
}));
55+
const MockChatPipeline = vi.fn().mockImplementation(function () {
56+
this.execute = mockChatPipelineExecute;
57+
});
5858
vi.mock("../../services/llm/pipeline/chat_pipeline.js", () => ({
5959
ChatPipeline: MockChatPipeline
6060
}));

apps/server/src/services/llm/ai_service_manager.spec.ts

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -35,24 +35,24 @@ vi.mock('../log.js', () => ({
3535
}));
3636

3737
vi.mock('./providers/anthropic_service.js', () => ({
38-
AnthropicService: vi.fn().mockImplementation(() => ({
39-
isAvailable: vi.fn().mockReturnValue(true),
40-
generateChatCompletion: vi.fn()
41-
}))
38+
AnthropicService: vi.fn().mockImplementation(function () {
39+
this.isAvailable = vi.fn().mockReturnValue(true);
40+
this.generateChatCompletion = vi.fn();
41+
})
4242
}));
4343

4444
vi.mock('./providers/openai_service.js', () => ({
45-
OpenAIService: vi.fn().mockImplementation(() => ({
46-
isAvailable: vi.fn().mockReturnValue(true),
47-
generateChatCompletion: vi.fn()
48-
}))
45+
OpenAIService: vi.fn().mockImplementation(function () {
46+
this.isAvailable = vi.fn().mockReturnValue(true);
47+
this.generateChatCompletion = vi.fn();
48+
};
4949
}));
5050

5151
vi.mock('./providers/ollama_service.js', () => ({
52-
OllamaService: vi.fn().mockImplementation(() => ({
53-
isAvailable: vi.fn().mockReturnValue(true),
54-
generateChatCompletion: vi.fn()
55-
}))
52+
OllamaService: vi.fn().mockImplementation(function () {
53+
this.isAvailable = vi.fn().mockReturnValue(true);
54+
this.generateChatCompletion = vi.fn();
55+
})
5656
}));
5757

5858
vi.mock('./config/configuration_helpers.js', () => ({
@@ -65,7 +65,7 @@ vi.mock('./config/configuration_helpers.js', () => ({
6565
}));
6666

6767
vi.mock('./context/index.js', () => ({
68-
ContextExtractor: vi.fn().mockImplementation(() => ({}))
68+
ContextExtractor: vi.fn().mockImplementation(function () {})
6969
}));
7070

7171
vi.mock('./context_extractors/index.js', () => ({

apps/server/src/services/llm/chat/rest_chat_service.spec.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,9 @@ vi.mock('../pipeline/chat_pipeline.js', () => ({
3939
}));
4040

4141
vi.mock('./handlers/tool_handler.js', () => ({
42-
ToolHandler: vi.fn().mockImplementation(() => ({
43-
handleToolCalls: vi.fn()
44-
}))
42+
ToolHandler: vi.fn().mockImplementation(function () {
43+
this.handleToolCalls = vi.fn()
44+
})
4545
}));
4646

4747
vi.mock('../chat_storage_service.js', () => ({

apps/server/src/services/llm/chat_service.spec.ts

Lines changed: 24 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -36,20 +36,22 @@ vi.mock('./constants/llm_prompt_constants.js', () => ({
3636
}));
3737

3838
vi.mock('./pipeline/chat_pipeline.js', () => ({
39-
ChatPipeline: vi.fn().mockImplementation((config) => ({
40-
config,
41-
execute: vi.fn(),
42-
getMetrics: vi.fn(),
43-
resetMetrics: vi.fn(),
44-
stages: {
45-
contextExtraction: {
46-
execute: vi.fn()
47-
},
48-
semanticContextExtraction: {
49-
execute: vi.fn()
39+
ChatPipeline: vi.fn().mockImplementation(function (config) {
40+
Object.assign(this, {
41+
config,
42+
execute: vi.fn(),
43+
getMetrics: vi.fn(),
44+
resetMetrics: vi.fn(),
45+
stages: {
46+
contextExtraction: {
47+
execute: vi.fn()
48+
},
49+
semanticContextExtraction: {
50+
execute: vi.fn()
51+
}
5052
}
51-
}
52-
}))
53+
});
54+
});
5355
}));
5456

5557
vi.mock('./ai_service_manager.js', () => ({
@@ -67,12 +69,12 @@ describe('ChatService', () => {
6769

6870
beforeEach(async () => {
6971
vi.clearAllMocks();
70-
72+
7173
// Get mocked modules
7274
mockChatStorageService = (await import('./chat_storage_service.js')).default;
7375
mockAiServiceManager = (await import('./ai_service_manager.js')).default;
7476
mockLog = (await import('../log.js')).default;
75-
77+
7678
// Setup pipeline mock
7779
mockChatPipeline = {
7880
execute: vi.fn(),
@@ -87,10 +89,10 @@ describe('ChatService', () => {
8789
}
8890
}
8991
};
90-
92+
9193
// Create a new ChatService instance
9294
chatService = new ChatService();
93-
95+
9496
// Replace the internal pipelines with our mock
9597
(chatService as any).pipelines.set('default', mockChatPipeline);
9698
(chatService as any).pipelines.set('agent', mockChatPipeline);
@@ -228,7 +230,7 @@ describe('ChatService', () => {
228230

229231
it('should create new session if not found', async () => {
230232
mockChatStorageService.getChat.mockResolvedValueOnce(null);
231-
233+
232234
const mockNewChat = {
233235
id: 'chat-new',
234236
title: 'New Chat',
@@ -301,7 +303,7 @@ describe('ChatService', () => {
301303

302304
mockChatStorageService.getChat.mockResolvedValue(mockChat);
303305
mockChatStorageService.updateChat.mockResolvedValue(mockChat);
304-
306+
305307
mockChatPipeline.execute.mockResolvedValue({
306308
text: 'Hello! How can I help you?',
307309
model: 'gpt-3.5-turbo',
@@ -435,7 +437,7 @@ describe('ChatService', () => {
435437

436438
mockChatStorageService.getChat.mockResolvedValue(mockChat);
437439
mockChatStorageService.updateChat.mockResolvedValue(mockChat);
438-
440+
439441
mockChatPipeline.execute.mockResolvedValue({
440442
text: 'Based on the context, here is my response.',
441443
model: 'gpt-4',
@@ -841,7 +843,7 @@ describe('ChatService', () => {
841843

842844
it('should return default title for empty or invalid messages', () => {
843845
const generateTitle = (chatService as any).generateTitleFromMessages.bind(chatService);
844-
846+
845847
expect(generateTitle([])).toBe('New Chat');
846848
expect(generateTitle([{ role: 'assistant', content: 'Hello' }])).toBe('New Chat');
847849
});
@@ -858,4 +860,4 @@ describe('ChatService', () => {
858860
expect(title).toBe('First line');
859861
});
860862
});
861-
});
863+
});

apps/server/src/services/llm/context/services/context_service.spec.ts

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,9 @@ vi.mock('../../ai_service_manager.js', () => ({
4747
}));
4848

4949
vi.mock('../index.js', () => ({
50-
ContextExtractor: vi.fn().mockImplementation(() => ({
51-
findRelevantNotes: vi.fn().mockResolvedValue([])
52-
}))
50+
ContextExtractor: vi.fn().mockImplementation(function () {
51+
this.findRelevantNotes = vi.fn().mockResolvedValue([])
52+
});
5353
}));
5454

5555
describe('ContextService', () => {
@@ -59,7 +59,7 @@ describe('ContextService', () => {
5959
beforeEach(() => {
6060
vi.clearAllMocks();
6161
service = new ContextService();
62-
62+
6363
mockLLMService = {
6464
generateChatCompletion: vi.fn().mockResolvedValue({
6565
content: 'Mock LLM response',
@@ -84,15 +84,15 @@ describe('ContextService', () => {
8484
describe('initialize', () => {
8585
it('should initialize successfully', async () => {
8686
const result = await service.initialize();
87-
87+
8888
expect(result).toBeUndefined(); // initialize returns void
8989
expect((service as any).initialized).toBe(true);
9090
});
9191

9292
it('should not initialize twice', async () => {
9393
await service.initialize();
9494
await service.initialize(); // Second call should be a no-op
95-
95+
9696
expect((service as any).initialized).toBe(true);
9797
});
9898

@@ -102,9 +102,9 @@ describe('ContextService', () => {
102102
service.initialize(),
103103
service.initialize()
104104
];
105-
105+
106106
await Promise.all(promises);
107-
107+
108108
expect((service as any).initialized).toBe(true);
109109
});
110110
});
@@ -186,11 +186,11 @@ describe('ContextService', () => {
186186
describe('error handling', () => {
187187
it('should handle service operations', async () => {
188188
await service.initialize();
189-
189+
190190
// These operations should not throw
191191
const result1 = await service.processQuery('test', mockLLMService);
192192
const result2 = await service.findRelevantNotes('test', null, {});
193-
193+
194194
expect(result1).toBeDefined();
195195
expect(result2).toBeDefined();
196196
});
@@ -224,4 +224,4 @@ describe('ContextService', () => {
224224
});
225225
});
226226
});
227-
});
227+
});

apps/server/src/services/llm/providers/anthropic_service.spec.ts

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,8 @@ vi.mock('@anthropic-ai/sdk', () => {
4848
}
4949
};
5050

51-
const mockAnthropic = vi.fn().mockImplementation(() => ({
52-
messages: {
51+
const mockAnthropic = vi.fn().mockImplementation(function () {
52+
this.messages = {
5353
create: vi.fn().mockImplementation((params) => {
5454
if (params.stream) {
5555
return Promise.resolve(mockStream);
@@ -71,8 +71,8 @@ vi.mock('@anthropic-ai/sdk', () => {
7171
}
7272
});
7373
})
74-
}
75-
}));
74+
};
75+
});
7676

7777
return { default: mockAnthropic };
7878
});
@@ -127,7 +127,9 @@ describe('AnthropicService', () => {
127127
}
128128
};
129129

130-
AnthropicMock.mockImplementation(() => mockAnthropicInstance);
130+
AnthropicMock.mockImplementation(function () {
131+
Object.assign(this, mockAnthropicInstance);
132+
});
131133

132134
service = new AnthropicService();
133135
});

apps/server/src/services/llm/providers/ollama_service.spec.ts

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,11 @@ vi.mock('./providers.js', () => ({
3030
}));
3131

3232
vi.mock('../formatters/ollama_formatter.js', () => ({
33-
OllamaMessageFormatter: vi.fn().mockImplementation(() => ({
34-
formatMessages: vi.fn().mockReturnValue([
33+
OllamaMessageFormatter: vi.fn().mockImplementation(function () {
34+
this.formatMessages = vi.fn().mockReturnValue([
3535
{ role: 'user', content: 'Hello' }
36-
]),
37-
formatResponse: vi.fn().mockReturnValue({
36+
]);
37+
this.formatResponse = vi.fn().mockReturnValue({
3838
text: 'Hello! How can I help you today?',
3939
provider: 'Ollama',
4040
model: 'llama2',
@@ -44,8 +44,8 @@ vi.mock('../formatters/ollama_formatter.js', () => ({
4444
totalTokens: 15
4545
},
4646
tool_calls: null
47-
})
48-
}))
47+
});
48+
})
4949
}));
5050

5151
vi.mock('../tools/tool_registry.js', () => ({
@@ -83,8 +83,8 @@ vi.mock('ollama', () => {
8383
}
8484
};
8585

86-
const mockOllama = vi.fn().mockImplementation(() => ({
87-
chat: vi.fn().mockImplementation((params) => {
86+
const mockOllama = vi.fn().mockImplementation(function () {
87+
this.chat = vi.fn().mockImplementation((params) => {
8888
if (params.stream) {
8989
return Promise.resolve(mockStream);
9090
}
@@ -97,8 +97,8 @@ vi.mock('ollama', () => {
9797
model: 'llama2',
9898
done: true
9999
});
100-
}),
101-
show: vi.fn().mockResolvedValue({
100+
});
101+
this.show = vi.fn().mockResolvedValue({
102102
modelfile: 'FROM llama2',
103103
parameters: {},
104104
template: '',
@@ -109,8 +109,8 @@ vi.mock('ollama', () => {
109109
parameter_size: '7B',
110110
quantization_level: 'Q4_0'
111111
}
112-
}),
113-
list: vi.fn().mockResolvedValue({
112+
});
113+
this.list = vi.fn().mockResolvedValue({
114114
models: [
115115
{
116116
name: 'llama2:latest',
@@ -119,7 +119,7 @@ vi.mock('ollama', () => {
119119
}
120120
]
121121
})
122-
}));
122+
});
123123

124124
return { Ollama: mockOllama };
125125
});
@@ -196,7 +196,9 @@ describe('OllamaService', () => {
196196
})
197197
};
198198

199-
OllamaMock.mockImplementation(() => mockOllamaInstance);
199+
OllamaMock.mockImplementation(function () {
200+
Object.assign(this, mockOllamaInstance);
201+
});
200202

201203
service = new OllamaService();
202204

0 commit comments

Comments
 (0)