Skip to content

Commit 827f174

Browse files
daniel-lxscte
authored andcommitted
fix: resolve LM Studio context length detection (#5075) (#5076)
1 parent ebb067b commit 827f174

File tree

2 files changed

+104
-4
lines changed

2 files changed

+104
-4
lines changed

src/api/providers/__tests__/lmstudio.spec.ts

Lines changed: 85 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,23 +58,47 @@ vi.mock("openai", () => {
5858
}
5959
})
6060

61+
// Mock LM Studio fetcher
62+
vi.mock("../fetchers/lmstudio", () => ({
63+
getLMStudioModels: vi.fn(),
64+
}))
65+
6166
import type { Anthropic } from "@anthropic-ai/sdk"
67+
import type { ModelInfo } from "@roo-code/types"
6268

6369
import { LmStudioHandler } from "../lm-studio"
6470
import type { ApiHandlerOptions } from "../../../shared/api"
71+
import { getLMStudioModels } from "../fetchers/lmstudio"
72+
73+
// Get the mocked function
74+
const mockGetLMStudioModels = vi.mocked(getLMStudioModels)
6575

6676
describe("LmStudioHandler", () => {
6777
let handler: LmStudioHandler
6878
let mockOptions: ApiHandlerOptions
6979

80+
const mockModelInfo: ModelInfo = {
81+
maxTokens: 8192,
82+
contextWindow: 32768,
83+
supportsImages: false,
84+
supportsComputerUse: false,
85+
supportsPromptCache: true,
86+
inputPrice: 0,
87+
outputPrice: 0,
88+
cacheWritesPrice: 0,
89+
cacheReadsPrice: 0,
90+
description: "Test Model - local-model",
91+
}
92+
7093
beforeEach(() => {
7194
mockOptions = {
7295
apiModelId: "local-model",
7396
lmStudioModelId: "local-model",
74-
lmStudioBaseUrl: "http://localhost:1234/v1",
97+
lmStudioBaseUrl: "http://localhost:1234",
7598
}
7699
handler = new LmStudioHandler(mockOptions)
77100
mockCreate.mockClear()
101+
mockGetLMStudioModels.mockClear()
78102
})
79103

80104
describe("constructor", () => {
@@ -156,12 +180,71 @@ describe("LmStudioHandler", () => {
156180
})
157181

158182
describe("getModel", () => {
159-
it("should return model info", () => {
183+
it("should return default model info when no models fetched", () => {
160184
const modelInfo = handler.getModel()
161185
expect(modelInfo.id).toBe(mockOptions.lmStudioModelId)
162186
expect(modelInfo.info).toBeDefined()
163187
expect(modelInfo.info.maxTokens).toBe(-1)
164188
expect(modelInfo.info.contextWindow).toBe(128_000)
165189
})
190+
191+
it("should return fetched model info when available", async () => {
192+
// Mock the fetched models
193+
mockGetLMStudioModels.mockResolvedValueOnce({
194+
"local-model": mockModelInfo,
195+
})
196+
197+
await handler.fetchModel()
198+
const modelInfo = handler.getModel()
199+
200+
expect(modelInfo.id).toBe(mockOptions.lmStudioModelId)
201+
expect(modelInfo.info).toEqual(mockModelInfo)
202+
expect(modelInfo.info.contextWindow).toBe(32768)
203+
})
204+
205+
it("should fallback to default when model not found in fetched models", async () => {
206+
// Mock fetched models without our target model
207+
mockGetLMStudioModels.mockResolvedValueOnce({
208+
"other-model": mockModelInfo,
209+
})
210+
211+
await handler.fetchModel()
212+
const modelInfo = handler.getModel()
213+
214+
expect(modelInfo.id).toBe(mockOptions.lmStudioModelId)
215+
expect(modelInfo.info.maxTokens).toBe(-1)
216+
expect(modelInfo.info.contextWindow).toBe(128_000)
217+
})
218+
})
219+
220+
describe("fetchModel", () => {
221+
it("should fetch models successfully", async () => {
222+
mockGetLMStudioModels.mockResolvedValueOnce({
223+
"local-model": mockModelInfo,
224+
})
225+
226+
const result = await handler.fetchModel()
227+
228+
expect(mockGetLMStudioModels).toHaveBeenCalledWith(mockOptions.lmStudioBaseUrl)
229+
expect(result.id).toBe(mockOptions.lmStudioModelId)
230+
expect(result.info).toEqual(mockModelInfo)
231+
})
232+
233+
it("should handle fetch errors gracefully", async () => {
234+
const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {})
235+
mockGetLMStudioModels.mockRejectedValueOnce(new Error("Connection failed"))
236+
237+
const result = await handler.fetchModel()
238+
239+
expect(consoleSpy).toHaveBeenCalledWith(
240+
"Failed to fetch LM Studio models, using defaults:",
241+
expect.any(Error),
242+
)
243+
expect(result.id).toBe(mockOptions.lmStudioModelId)
244+
expect(result.info.maxTokens).toBe(-1)
245+
expect(result.info.contextWindow).toBe(128_000)
246+
247+
consoleSpy.mockRestore()
248+
})
166249
})
167250
})

src/api/providers/lm-studio.ts

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,12 @@ import { ApiStream } from "../transform/stream"
1313

1414
import { BaseProvider } from "./base-provider"
1515
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
16+
import { getLMStudioModels } from "./fetchers/lmstudio"
1617

1718
export class LmStudioHandler extends BaseProvider implements SingleCompletionHandler {
1819
protected options: ApiHandlerOptions
1920
private client: OpenAI
21+
private models: Record<string, ModelInfo> = {}
2022

2123
constructor(options: ApiHandlerOptions) {
2224
super()
@@ -130,10 +132,25 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
130132
}
131133
}
132134

135+
public async fetchModel() {
136+
try {
137+
this.models = await getLMStudioModels(this.options.lmStudioBaseUrl)
138+
} catch (error) {
139+
console.warn("Failed to fetch LM Studio models, using defaults:", error)
140+
this.models = {}
141+
}
142+
return this.getModel()
143+
}
144+
133145
override getModel(): { id: string; info: ModelInfo } {
146+
const id = this.options.lmStudioModelId || ""
147+
148+
// Try to get the actual model info from fetched models
149+
const info = this.models[id] || openAiModelInfoSaneDefaults
150+
134151
return {
135-
id: this.options.lmStudioModelId || "",
136-
info: openAiModelInfoSaneDefaults,
152+
id,
153+
info,
137154
}
138155
}
139156

0 commit comments

Comments
 (0)