Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,7 @@ const lmStudioSchema = baseProviderSettingsSchema.extend({
lmStudioBaseUrl: z.string().optional(),
lmStudioDraftModelId: z.string().optional(),
lmStudioSpeculativeDecodingEnabled: z.boolean().optional(),
lmStudioShowDebugThoughts: z.boolean().optional(),
})

const geminiSchema = apiModelIdProviderModelSchema.extend({
Expand Down
11 changes: 11 additions & 0 deletions src/api/providers/lm-studio.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,17 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
yield processedChunk
}
}

if ("reasoning" in delta && delta.reasoning && typeof delta.reasoning === "string") {
const reasoning = delta.reasoning
assistantText += reasoning
if (this.options.lmStudioShowDebugThoughts === true) {
yield {
type: "reasoning",
text: reasoning,
}
}
}
}

for (const processedChunk of matcher.final()) {
Expand Down
10 changes: 10 additions & 0 deletions webview-ui/src/components/settings/providers/LMStudio.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,16 @@ export const LMStudio = ({ apiConfiguration, setApiConfigurationField }: LMStudi
)}
</>
)}
<Checkbox
checked={apiConfiguration?.lmStudioShowDebugThoughts === true}
onChange={(checked) => {
setApiConfigurationField("lmStudioShowDebugThoughts", checked)
}}>
{t("settings:providers.lmStudio.showDebugThoughts")}
</Checkbox>
<div className="text-sm text-vscode-descriptionForeground mb-2">
{t("settings:providers.lmStudio.showDebugThoughtsDesc")}
</div>
<div className="text-sm text-vscode-descriptionForeground">
<Trans
i18nKey="settings:providers.lmStudio.description"
Expand Down
2 changes: 2 additions & 0 deletions webview-ui/src/i18n/locales/en/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -372,6 +372,8 @@
"draftModelDesc": "Draft model must be from the same model family for speculative decoding to work correctly.",
"selectDraftModel": "Select Draft Model",
"noModelsFound": "No draft models found. Please ensure LM Studio is running with Server Mode enabled.",
"showDebugThoughts": "Show Debug Thoughts",
"showDebugThoughtsDesc": "Display debug reasoning output from models that support it. Note: this is different from standard <think> tags which are always processed.",
"description": "LM Studio allows you to run models locally on your computer. For instructions on how to get started, see their <a>quickstart guide</a>. You will also need to start LM Studio's <b>local server</b> feature to use it with this extension. <span>Note:</span> Roo Code uses complex prompts and works best with Claude models. Less capable models may not work as expected."
},
"ollama": {
Expand Down
2 changes: 2 additions & 0 deletions webview-ui/src/i18n/locales/ru/settings.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading