Skip to content

Commit ecedf08

Browse files
committed
fix: address race condition, add proper solution for marking cached info instead of description hack
1 parent 56493ef commit ecedf08

File tree

1 file changed

+23
-8
lines changed

1 file changed

+23
-8
lines changed

src/api/providers/lm-studio.ts

Lines changed: 23 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,19 @@ import type { ApiHandlerCreateMessageMetadata, SingleCompletionHandler } from ".
1414
import { BaseProvider } from "./base-provider"
1515
import { flushModels, getModels } from "./fetchers/modelCache"
1616

17+
type ModelInfoCaching = {
18+
modelInfo: ModelInfo
19+
cached: boolean
20+
}
21+
1722
export class LmStudioHandler extends BaseProvider implements SingleCompletionHandler {
1823
protected options: ApiHandlerOptions
1924
private client: OpenAI
20-
private cachedModelInfo: ModelInfo = openAiModelInfoSaneDefaults
25+
private cachedModelInfo: ModelInfoCaching = {
26+
modelInfo: openAiModelInfoSaneDefaults,
27+
cached: false,
28+
}
29+
private lastRecacheTime: number = -1
2130

2231
constructor(options: ApiHandlerOptions) {
2332
super()
@@ -127,20 +136,26 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
127136
outputTokens = 0
128137
}
129138

130-
if (this.cachedModelInfo === openAiModelInfoSaneDefaults) {
139+
if (
140+
!this.cachedModelInfo.cached &&
141+
(this.lastRecacheTime < 0 || Date.now() - this.lastRecacheTime > 30 * 1000)
142+
) {
143+
// assume that if we didn't get a response in 30 seconds
144+
this.lastRecacheTime = Date.now() // Update last recache time to avoid race condition
145+
131146
// We need to fetch the model info every time we open a new session
132147
// to ensure we have the latest context window and other details
133148
// since LM Studio models can chance their context windows on reload
134149
await flushModels("lmstudio")
135150
const models = await getModels({ provider: "lmstudio", baseUrl: this.getBaseUrl() })
136151
if (models && models[this.getModel().id]) {
137-
this.cachedModelInfo = models[this.getModel().id]
138-
} else {
139-
// If model info is not found, use sane defaults
140152
this.cachedModelInfo = {
141-
...openAiModelInfoSaneDefaults,
142-
description: "Fake description to avoid recache",
153+
modelInfo: models[this.getModel().id],
154+
cached: true,
143155
}
156+
} else {
157+
// if model info is not found, still mark the result as cached to avoid retries on every chunk
158+
this.cachedModelInfo.cached = true
144159
}
145160
}
146161

@@ -159,7 +174,7 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
159174
override getModel(): { id: string; info: ModelInfo } {
160175
return {
161176
id: this.options.lmStudioModelId || "",
162-
info: this.cachedModelInfo,
177+
info: this.cachedModelInfo.modelInfo,
163178
}
164179
}
165180

0 commit comments

Comments
 (0)