Skip to content

Commit 56493ef

Browse files
committed
fix: minor fixes and new cases for webView/ClineProvider unit test, remove outdated lm-studio fetcher
1 parent 075e9af commit 56493ef

File tree

5 files changed

+44
-56
lines changed

5 files changed

+44
-56
lines changed

src/api/providers/fetchers/lm-studio.ts

Lines changed: 0 additions & 35 deletions
This file was deleted.

src/api/providers/fetchers/modelCache.ts

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,22 @@
1-
import * as path from "path"
21
import fs from "fs/promises"
2+
import * as path from "path"
33

44
import NodeCache from "node-cache"
55
import { safeWriteJson } from "../../../utils/safeWriteJson"
66

77
import { ContextProxy } from "../../../core/config/ContextProxy"
8-
import { getCacheDirectoryPath } from "../../../utils/storage"
9-
import { RouterName, ModelRecord } from "../../../shared/api"
8+
import { ModelRecord, RouterName } from "../../../shared/api"
109
import { fileExistsAtPath } from "../../../utils/fs"
10+
import { getCacheDirectoryPath } from "../../../utils/storage"
1111

12-
import { getOpenRouterModels } from "./openrouter"
13-
import { getRequestyModels } from "./requesty"
12+
import { GetModelsOptions } from "../../../shared/api"
1413
import { getGlamaModels } from "./glama"
15-
import { getUnboundModels } from "./unbound"
1614
import { getLiteLLMModels } from "./litellm"
17-
import { getLmStudioModels } from "./lm-studio"
18-
import { GetModelsOptions } from "../../../shared/api"
19-
import { getOllamaModels } from "./ollama"
2015
import { getLMStudioModels } from "./lmstudio"
16+
import { getOllamaModels } from "./ollama"
17+
import { getOpenRouterModels } from "./openrouter"
18+
import { getRequestyModels } from "./requesty"
19+
import { getUnboundModels } from "./unbound"
2120

2221
const memoryCache = new NodeCache({ stdTTL: 5 * 60, checkperiod: 5 * 60 })
2322

src/core/webview/__tests__/ClineProvider.spec.ts

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import { Task, TaskOptions } from "../../task/Task"
1616
import { safeWriteJson } from "../../../utils/safeWriteJson"
1717

1818
import { ClineProvider } from "../ClineProvider"
19+
import { LmStudioHandler } from "../../../api/providers"
1920

2021
// Mock setup must come before imports
2122
vi.mock("../../prompts/sections/custom-instructions")
@@ -2371,6 +2372,7 @@ describe("ClineProvider - Router Models", () => {
23712372
unboundApiKey: "unbound-key",
23722373
litellmApiKey: "litellm-key",
23732374
litellmBaseUrl: "http://localhost:4000",
2375+
lmStudioBaseUrl: "http://localhost:1234",
23742376
},
23752377
} as any)
23762378

@@ -2404,6 +2406,10 @@ describe("ClineProvider - Router Models", () => {
24042406
apiKey: "litellm-key",
24052407
baseUrl: "http://localhost:4000",
24062408
})
2409+
expect(getModels).toHaveBeenCalledWith({
2410+
provider: "lmstudio",
2411+
baseUrl: "http://localhost:1234",
2412+
})
24072413

24082414
// Verify response was sent
24092415
expect(mockPostMessage).toHaveBeenCalledWith({
@@ -2415,7 +2421,7 @@ describe("ClineProvider - Router Models", () => {
24152421
unbound: mockModels,
24162422
litellm: mockModels,
24172423
ollama: {},
2418-
lmstudio: {},
2424+
lmstudio: mockModels,
24192425
},
24202426
})
24212427
})
@@ -2432,6 +2438,7 @@ describe("ClineProvider - Router Models", () => {
24322438
unboundApiKey: "unbound-key",
24332439
litellmApiKey: "litellm-key",
24342440
litellmBaseUrl: "http://localhost:4000",
2441+
lmStudioBaseUrl: "http://localhost:1234",
24352442
},
24362443
} as any)
24372444

@@ -2447,6 +2454,7 @@ describe("ClineProvider - Router Models", () => {
24472454
.mockResolvedValueOnce(mockModels) // glama success
24482455
.mockRejectedValueOnce(new Error("Unbound API error")) // unbound fail
24492456
.mockRejectedValueOnce(new Error("LiteLLM connection failed")) // litellm fail
2457+
.mockRejectedValueOnce(new Error("LMStudio API error")) // lmstudio fail
24502458

24512459
await messageHandler({ type: "requestRouterModels" })
24522460

@@ -2492,6 +2500,13 @@ describe("ClineProvider - Router Models", () => {
24922500
error: "LiteLLM connection failed",
24932501
values: { provider: "litellm" },
24942502
})
2503+
2504+
expect(mockPostMessage).toHaveBeenCalledWith({
2505+
type: "singleRouterModelFetchResponse",
2506+
success: false,
2507+
error: "LMStudio API error",
2508+
values: { provider: "lmstudio" },
2509+
})
24952510
})
24962511

24972512
test("handles requestRouterModels with LiteLLM values from message", async () => {
@@ -2570,7 +2585,7 @@ describe("ClineProvider - Router Models", () => {
25702585
unbound: mockModels,
25712586
litellm: {},
25722587
ollama: {},
2573-
lmstudio: {},
2588+
lmstudio: mockModels,
25742589
},
25752590
})
25762591
})

src/core/webview/__tests__/webviewMessageHandler.spec.ts

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@ describe("webviewMessageHandler - requestRouterModels", () => {
105105
unboundApiKey: "unbound-key",
106106
litellmApiKey: "litellm-key",
107107
litellmBaseUrl: "http://localhost:4000",
108+
lmStudioBaseUrl: "http://localhost:1234",
108109
},
109110
})
110111
})
@@ -141,6 +142,10 @@ describe("webviewMessageHandler - requestRouterModels", () => {
141142
apiKey: "litellm-key",
142143
baseUrl: "http://localhost:4000",
143144
})
145+
expect(mockGetModels).toHaveBeenCalledWith({
146+
provider: "lmstudio",
147+
baseUrl: "http://localhost:1234",
148+
})
144149

145150
// Verify response was sent
146151
expect(mockClineProvider.postMessageToWebview).toHaveBeenCalledWith({
@@ -152,7 +157,7 @@ describe("webviewMessageHandler - requestRouterModels", () => {
152157
unbound: mockModels,
153158
litellm: mockModels,
154159
ollama: {},
155-
lmstudio: {},
160+
lmstudio: mockModels,
156161
},
157162
})
158163
})
@@ -239,7 +244,7 @@ describe("webviewMessageHandler - requestRouterModels", () => {
239244
unbound: mockModels,
240245
litellm: {},
241246
ollama: {},
242-
lmstudio: {},
247+
lmstudio: mockModels,
243248
},
244249
})
245250
})
@@ -261,6 +266,7 @@ describe("webviewMessageHandler - requestRouterModels", () => {
261266
.mockResolvedValueOnce(mockModels) // glama
262267
.mockRejectedValueOnce(new Error("Unbound API error")) // unbound
263268
.mockRejectedValueOnce(new Error("LiteLLM connection failed")) // litellm
269+
.mockRejectedValueOnce(new Error("LMStudio API error")) // lmstudio"))
264270

265271
await webviewMessageHandler(mockClineProvider, {
266272
type: "requestRouterModels",
@@ -311,6 +317,7 @@ describe("webviewMessageHandler - requestRouterModels", () => {
311317
.mockRejectedValueOnce(new Error("Glama API error")) // glama
312318
.mockRejectedValueOnce(new Error("Unbound API error")) // unbound
313319
.mockRejectedValueOnce(new Error("LiteLLM connection failed")) // litellm
320+
.mockRejectedValueOnce(new Error("LMStudio API error")) // lmstudio
314321

315322
await webviewMessageHandler(mockClineProvider, {
316323
type: "requestRouterModels",
@@ -351,6 +358,13 @@ describe("webviewMessageHandler - requestRouterModels", () => {
351358
error: "LiteLLM connection failed",
352359
values: { provider: "litellm" },
353360
})
361+
362+
expect(mockClineProvider.postMessageToWebview).toHaveBeenCalledWith({
363+
type: "singleRouterModelFetchResponse",
364+
success: false,
365+
error: "LMStudio API error",
366+
values: { provider: "lmstudio" },
367+
})
354368
})
355369

356370
it("prefers config values over message values for LiteLLM", async () => {

src/core/webview/webviewMessageHandler.ts

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -568,9 +568,8 @@ export const webviewMessageHandler = async (
568568

569569
const fetchedRouterModels: Partial<Record<RouterName, ModelRecord>> = {
570570
...routerModels,
571-
// Initialize ollama and lmstudio with empty objects since they use separate handlers
571+
// Initialize ollama with empty objects since it uses separate handlers
572572
ollama: {},
573-
lmstudio: {},
574573
}
575574

576575
results.forEach((result, index) => {
@@ -579,18 +578,14 @@ export const webviewMessageHandler = async (
579578
if (result.status === "fulfilled") {
580579
fetchedRouterModels[routerName] = result.value.models
581580

582-
// Ollama and LM Studio settings pages still need these events
581+
// Ollama settings pages still need these events
583582
if (routerName === "ollama" && Object.keys(result.value.models).length > 0) {
584583
provider.postMessageToWebview({
585584
type: "ollamaModels",
586585
ollamaModels: Object.keys(result.value.models),
587586
})
588-
} else if (routerName === "lmstudio" && Object.keys(result.value.models).length > 0) {
589-
provider.postMessageToWebview({
590-
type: "lmStudioModels",
591-
lmStudioModels: Object.keys(result.value.models),
592-
})
593587
}
588+
// LM Studio models have moved to main router models message
594589
} else {
595590
// Handle rejection: Post a specific error message for this provider
596591
const errorMessage = result.reason instanceof Error ? result.reason.message : String(result.reason)

0 commit comments

Comments
 (0)