Skip to content

Commit 5b55f6e

Browse files
committed
fix: provide fallback context window values for Ollama and LM Studio models
- Add fallback ModelInfo when routerModels.ollama or lmStudioModels return undefined - Fixes context window display showing "used/1" instead of actual max tokens - Ensures proper context window management for Ollama and LM Studio providers Fixes #7674
1 parent b48b0be commit 5b55f6e

File tree

1 file changed

+22
-2
lines changed

1 file changed

+22
-2
lines changed

webview-ui/src/components/ui/hooks/useSelectedModel.ts

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -255,17 +255,37 @@ function getSelectedModel({
255255
case "ollama": {
256256
const id = apiConfiguration.ollamaModelId ?? ""
257257
const info = routerModels.ollama && routerModels.ollama[id]
258+
// Provide fallback values when info is undefined to fix context window display
258259
return {
259260
id,
260-
info: info || undefined,
261+
info:
262+
info ||
263+
(id
264+
? {
265+
maxTokens: 8192,
266+
contextWindow: 8192,
267+
supportsImages: false,
268+
supportsPromptCache: true,
269+
}
270+
: undefined),
261271
}
262272
}
263273
case "lmstudio": {
264274
const id = apiConfiguration.lmStudioModelId ?? ""
265275
const info = lmStudioModels && lmStudioModels[apiConfiguration.lmStudioModelId!]
276+
// Provide fallback values when info is undefined to fix context window display
266277
return {
267278
id,
268-
info: info || undefined,
279+
info:
280+
info ||
281+
(id
282+
? {
283+
maxTokens: 8192,
284+
contextWindow: 8192,
285+
supportsImages: false,
286+
supportsPromptCache: false,
287+
}
288+
: undefined),
269289
}
270290
}
271291
case "vscode-lm": {

0 commit comments

Comments
 (0)