Skip to content

Commit f666cb3

Browse files
authored
Revert fix: resolve LM Studio context length detection (#5075) (#5083)
1 parent e7ca038 commit f666cb3

File tree

2 files changed

+3
-103
lines changed

2 files changed

+3
-103
lines changed

src/api/providers/__tests__/lmstudio.spec.ts

Lines changed: 1 addition & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -58,38 +58,15 @@ vi.mock("openai", () => {
5858
}
5959
})
6060

61-
// Mock LM Studio fetcher
62-
vi.mock("../fetchers/lmstudio", () => ({
63-
getLMStudioModels: vi.fn(),
64-
}))
65-
6661
import type { Anthropic } from "@anthropic-ai/sdk"
67-
import type { ModelInfo } from "@roo-code/types"
6862

6963
import { LmStudioHandler } from "../lm-studio"
7064
import type { ApiHandlerOptions } from "../../../shared/api"
71-
import { getLMStudioModels } from "../fetchers/lmstudio"
72-
73-
// Get the mocked function
74-
const mockGetLMStudioModels = vi.mocked(getLMStudioModels)
7565

7666
describe("LmStudioHandler", () => {
7767
let handler: LmStudioHandler
7868
let mockOptions: ApiHandlerOptions
7969

80-
const mockModelInfo: ModelInfo = {
81-
maxTokens: 8192,
82-
contextWindow: 32768,
83-
supportsImages: false,
84-
supportsComputerUse: false,
85-
supportsPromptCache: true,
86-
inputPrice: 0,
87-
outputPrice: 0,
88-
cacheWritesPrice: 0,
89-
cacheReadsPrice: 0,
90-
description: "Test Model - local-model",
91-
}
92-
9370
beforeEach(() => {
9471
mockOptions = {
9572
apiModelId: "local-model",
@@ -98,7 +75,6 @@ describe("LmStudioHandler", () => {
9875
}
9976
handler = new LmStudioHandler(mockOptions)
10077
mockCreate.mockClear()
101-
mockGetLMStudioModels.mockClear()
10278
})
10379

10480
describe("constructor", () => {
@@ -180,71 +156,12 @@ describe("LmStudioHandler", () => {
180156
})
181157

182158
describe("getModel", () => {
183-
it("should return default model info when no models fetched", () => {
159+
it("should return model info", () => {
184160
const modelInfo = handler.getModel()
185161
expect(modelInfo.id).toBe(mockOptions.lmStudioModelId)
186162
expect(modelInfo.info).toBeDefined()
187163
expect(modelInfo.info.maxTokens).toBe(-1)
188164
expect(modelInfo.info.contextWindow).toBe(128_000)
189165
})
190-
191-
it("should return fetched model info when available", async () => {
192-
// Mock the fetched models
193-
mockGetLMStudioModels.mockResolvedValueOnce({
194-
"local-model": mockModelInfo,
195-
})
196-
197-
await handler.fetchModel()
198-
const modelInfo = handler.getModel()
199-
200-
expect(modelInfo.id).toBe(mockOptions.lmStudioModelId)
201-
expect(modelInfo.info).toEqual(mockModelInfo)
202-
expect(modelInfo.info.contextWindow).toBe(32768)
203-
})
204-
205-
it("should fallback to default when model not found in fetched models", async () => {
206-
// Mock fetched models without our target model
207-
mockGetLMStudioModels.mockResolvedValueOnce({
208-
"other-model": mockModelInfo,
209-
})
210-
211-
await handler.fetchModel()
212-
const modelInfo = handler.getModel()
213-
214-
expect(modelInfo.id).toBe(mockOptions.lmStudioModelId)
215-
expect(modelInfo.info.maxTokens).toBe(-1)
216-
expect(modelInfo.info.contextWindow).toBe(128_000)
217-
})
218-
})
219-
220-
describe("fetchModel", () => {
221-
it("should fetch models successfully", async () => {
222-
mockGetLMStudioModels.mockResolvedValueOnce({
223-
"local-model": mockModelInfo,
224-
})
225-
226-
const result = await handler.fetchModel()
227-
228-
expect(mockGetLMStudioModels).toHaveBeenCalledWith(mockOptions.lmStudioBaseUrl)
229-
expect(result.id).toBe(mockOptions.lmStudioModelId)
230-
expect(result.info).toEqual(mockModelInfo)
231-
})
232-
233-
it("should handle fetch errors gracefully", async () => {
234-
const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {})
235-
mockGetLMStudioModels.mockRejectedValueOnce(new Error("Connection failed"))
236-
237-
const result = await handler.fetchModel()
238-
239-
expect(consoleSpy).toHaveBeenCalledWith(
240-
"Failed to fetch LM Studio models, using defaults:",
241-
expect.any(Error),
242-
)
243-
expect(result.id).toBe(mockOptions.lmStudioModelId)
244-
expect(result.info.maxTokens).toBe(-1)
245-
expect(result.info.contextWindow).toBe(128_000)
246-
247-
consoleSpy.mockRestore()
248-
})
249166
})
250167
})

src/api/providers/lm-studio.ts

Lines changed: 2 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,10 @@ import { ApiStream } from "../transform/stream"
1313

1414
import { BaseProvider } from "./base-provider"
1515
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
16-
import { getLMStudioModels } from "./fetchers/lmstudio"
1716

1817
export class LmStudioHandler extends BaseProvider implements SingleCompletionHandler {
1918
protected options: ApiHandlerOptions
2019
private client: OpenAI
21-
private models: Record<string, ModelInfo> = {}
2220

2321
constructor(options: ApiHandlerOptions) {
2422
super()
@@ -132,25 +130,10 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
132130
}
133131
}
134132

135-
public async fetchModel() {
136-
try {
137-
this.models = await getLMStudioModels(this.options.lmStudioBaseUrl)
138-
} catch (error) {
139-
console.warn("Failed to fetch LM Studio models, using defaults:", error)
140-
this.models = {}
141-
}
142-
return this.getModel()
143-
}
144-
145133
override getModel(): { id: string; info: ModelInfo } {
146-
const id = this.options.lmStudioModelId || ""
147-
148-
// Try to get the actual model info from fetched models
149-
const info = this.models[id] || openAiModelInfoSaneDefaults
150-
151134
return {
152-
id,
153-
info,
135+
id: this.options.lmStudioModelId || "",
136+
info: openAiModelInfoSaneDefaults,
154137
}
155138
}
156139

0 commit comments

Comments
 (0)