diff --git a/packages/types/src/provider-settings.ts b/packages/types/src/provider-settings.ts
index fd327657b6..55f4cc2d93 100644
--- a/packages/types/src/provider-settings.ts
+++ b/packages/types/src/provider-settings.ts
@@ -276,6 +276,7 @@ const lmStudioSchema = baseProviderSettingsSchema.extend({
lmStudioBaseUrl: z.string().optional(),
lmStudioDraftModelId: z.string().optional(),
lmStudioSpeculativeDecodingEnabled: z.boolean().optional(),
+ lmStudioShowDebugThoughts: z.boolean().optional(),
})
const geminiSchema = apiModelIdProviderModelSchema.extend({
diff --git a/src/api/providers/lm-studio.ts b/src/api/providers/lm-studio.ts
index 6c58a96ae1..24c252f961 100644
--- a/src/api/providers/lm-studio.ts
+++ b/src/api/providers/lm-studio.ts
@@ -118,6 +118,21 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
yield processedChunk
}
}
+
+ // This is a hack to show the debug reasoning in the UI.
+ if (
+ this.options.lmStudioShowDebugThoughts === true &&
+ "reasoning" in delta &&
+ delta.reasoning &&
+ typeof delta.reasoning === "string"
+ ) {
+ const reasoning = delta.reasoning
+ assistantText += reasoning
+ yield {
+ type: "reasoning",
+ text: reasoning,
+ }
+ }
}
for (const processedChunk of matcher.final()) {
diff --git a/webview-ui/src/components/settings/providers/LMStudio.tsx b/webview-ui/src/components/settings/providers/LMStudio.tsx
index e3401aa62c..4b3a80d9e1 100644
--- a/webview-ui/src/components/settings/providers/LMStudio.tsx
+++ b/webview-ui/src/components/settings/providers/LMStudio.tsx
@@ -207,6 +207,16 @@ export const LMStudio = ({ apiConfiguration, setApiConfigurationField }: LMStudi
)}
>
)}
+ {
+ setApiConfigurationField("lmStudioShowDebugThoughts", checked)
+ }}>
+ {t("settings:providers.lmStudio.showDebugThoughts")}
+
+
+ {t("settings:providers.lmStudio.showDebugThoughtsDesc")}
+
estàndard que sempre es processen.",
"description": "LM Studio permet executar models localment al vostre ordinador. Per a instruccions sobre com començar, consulteu la seva Guia d'inici ràpid. També necessitareu iniciar la funció de Servidor Local de LM Studio per utilitzar-la amb aquesta extensió. Nota: Roo Code utilitza prompts complexos i funciona millor amb models Claude. Els models menys capaços poden no funcionar com s'espera."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/de/settings.json b/webview-ui/src/i18n/locales/de/settings.json
index 4e75f6af2a..37a8b94a11 100644
--- a/webview-ui/src/i18n/locales/de/settings.json
+++ b/webview-ui/src/i18n/locales/de/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "Das Entwurfsmodell muss aus derselben Modellfamilie stammen, damit das spekulative Dekodieren korrekt funktioniert.",
"selectDraftModel": "Entwurfsmodell auswählen",
"noModelsFound": "Keine Entwurfsmodelle gefunden. Bitte stelle sicher, dass LM Studio mit aktiviertem Servermodus läuft.",
+ "showDebugThoughts": "Debug-Gedanken anzeigen",
+ "showDebugThoughtsDesc": "Debug-Reasoning-Ausgabe von Modellen anzeigen, die dies unterstützen. Hinweis: Dies unterscheidet sich von Standard--Tags, die immer verarbeitet werden.",
"description": "LM Studio ermöglicht es dir, Modelle lokal auf deinem Computer auszuführen. Eine Anleitung zum Einstieg findest du in ihrem Schnellstart-Guide. Du musst auch die lokale Server-Funktion von LM Studio starten, um es mit dieser Erweiterung zu verwenden. Hinweis: Roo Code verwendet komplexe Prompts und funktioniert am besten mit Claude-Modellen. Weniger leistungsfähige Modelle funktionieren möglicherweise nicht wie erwartet."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/en/settings.json b/webview-ui/src/i18n/locales/en/settings.json
index 1be824b37e..b9ef80025c 100644
--- a/webview-ui/src/i18n/locales/en/settings.json
+++ b/webview-ui/src/i18n/locales/en/settings.json
@@ -372,6 +372,8 @@
"draftModelDesc": "Draft model must be from the same model family for speculative decoding to work correctly.",
"selectDraftModel": "Select Draft Model",
"noModelsFound": "No draft models found. Please ensure LM Studio is running with Server Mode enabled.",
+ "showDebugThoughts": "Show Debug Thoughts",
+ "showDebugThoughtsDesc": "Display debug reasoning output from models that support it. Note: this is different from standard tags which are always processed.",
"description": "LM Studio allows you to run models locally on your computer. For instructions on how to get started, see their quickstart guide. You will also need to start LM Studio's local server feature to use it with this extension. Note: Roo Code uses complex prompts and works best with Claude models. Less capable models may not work as expected."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/es/settings.json b/webview-ui/src/i18n/locales/es/settings.json
index deb2bc7a22..6cf61acdc3 100644
--- a/webview-ui/src/i18n/locales/es/settings.json
+++ b/webview-ui/src/i18n/locales/es/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "El modelo borrador debe ser de la misma familia de modelos para que la decodificación especulativa funcione correctamente.",
"selectDraftModel": "Seleccionar modelo borrador",
"noModelsFound": "No se encontraron modelos borrador. Asegúrese de que LM Studio esté ejecutándose con el Modo Servidor habilitado.",
+ "showDebugThoughts": "Mostrar pensamientos de depuración",
+ "showDebugThoughtsDesc": "Mostrar la salida de razonamiento de depuración de modelos que lo soporten. Nota: esto es diferente de las etiquetas estándar que siempre se procesan.",
"description": "LM Studio le permite ejecutar modelos localmente en su computadora. Para obtener instrucciones sobre cómo comenzar, consulte su guía de inicio rápido. También necesitará iniciar la función de servidor local de LM Studio para usarlo con esta extensión. Nota: Roo Code utiliza prompts complejos y funciona mejor con modelos Claude. Los modelos menos capaces pueden no funcionar como se espera."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/fr/settings.json b/webview-ui/src/i18n/locales/fr/settings.json
index ccb8e61d7a..33f11d5974 100644
--- a/webview-ui/src/i18n/locales/fr/settings.json
+++ b/webview-ui/src/i18n/locales/fr/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "Le modèle brouillon doit être de la même famille de modèles pour que le décodage spéculatif fonctionne correctement.",
"selectDraftModel": "Sélectionner le modèle brouillon",
"noModelsFound": "Aucun modèle brouillon trouvé. Veuillez vous assurer que LM Studio est en cours d'exécution avec le mode serveur activé.",
+ "showDebugThoughts": "Afficher les pensées de débogage",
+ "showDebugThoughtsDesc": "Afficher la sortie de raisonnement de débogage des modèles qui le supportent. Note : ceci est différent des balises standard qui sont toujours traitées.",
"description": "LM Studio vous permet d'exécuter des modèles localement sur votre ordinateur. Pour obtenir des instructions sur la mise en route, consultez leur guide de démarrage rapide. Vous devrez également démarrer la fonction serveur local de LM Studio pour l'utiliser avec cette extension. Remarque : Roo Code utilise des prompts complexes et fonctionne mieux avec les modèles Claude. Les modèles moins performants peuvent ne pas fonctionner comme prévu."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/hi/settings.json b/webview-ui/src/i18n/locales/hi/settings.json
index 3d879e2ca7..524a5ee692 100644
--- a/webview-ui/src/i18n/locales/hi/settings.json
+++ b/webview-ui/src/i18n/locales/hi/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "स्पेक्युलेटिव डिकोडिंग के सही काम करने के लिए ड्राफ्ट मॉडल को समान मॉडल परिवार से होना चाहिए।",
"selectDraftModel": "ड्राफ्ट मॉडल चुनें",
"noModelsFound": "कोई ड्राफ्ट मॉडल नहीं मिला। कृपया सुनिश्चित करें कि LM Studio सर्वर मोड सक्षम के साथ चल रहा है।",
+ "showDebugThoughts": "डिबग विचार दिखाएं",
+ "showDebugThoughtsDesc": "इसे समर्थन करने वाले मॉडलों से डिबग तर्क आउटपुट दिखाएं। नोट: यह हमेशा प्रोसेस किए जाने वाले मानक टैग से अलग है।",
"description": "LM Studio आपको अपने कंप्यूटर पर स्थानीय रूप से मॉडल चलाने की अनुमति देता है। आरंभ करने के निर्देशों के लिए, उनकी क्विकस्टार्ट गाइड देखें। आपको इस एक्सटेंशन के साथ उपयोग करने के लिए LM Studio की स्थानीय सर्वर सुविधा भी शुरू करनी होगी। नोट: Roo Code जटिल प्रॉम्प्ट्स का उपयोग करता है और Claude मॉडल के साथ सबसे अच्छा काम करता है। कम क्षमता वाले मॉडल अपेक्षित रूप से काम नहीं कर सकते हैं।"
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/id/settings.json b/webview-ui/src/i18n/locales/id/settings.json
index 8138726c33..4e4a032548 100644
--- a/webview-ui/src/i18n/locales/id/settings.json
+++ b/webview-ui/src/i18n/locales/id/settings.json
@@ -377,6 +377,8 @@
"draftModelDesc": "Draft model harus dari keluarga model yang sama agar speculative decoding bekerja dengan benar.",
"selectDraftModel": "Pilih Draft Model",
"noModelsFound": "Tidak ada draft model ditemukan. Pastikan LM Studio berjalan dengan Server Mode diaktifkan.",
+ "showDebugThoughts": "Tampilkan Pemikiran Debug",
+ "showDebugThoughtsDesc": "Tampilkan output penalaran debug dari model yang mendukungnya. Catatan: ini berbeda dari tag standar yang selalu diproses.",
"description": "LM Studio memungkinkan kamu menjalankan model secara lokal di komputer. Untuk instruksi cara memulai, lihat panduan quickstart mereka. Kamu juga perlu memulai fitur local server LM Studio untuk menggunakannya dengan ekstensi ini. Catatan: Roo Code menggunakan prompt kompleks dan bekerja terbaik dengan model Claude. Model yang kurang mampu mungkin tidak bekerja seperti yang diharapkan."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/it/settings.json b/webview-ui/src/i18n/locales/it/settings.json
index 80ff0f8a71..316ceb31a2 100644
--- a/webview-ui/src/i18n/locales/it/settings.json
+++ b/webview-ui/src/i18n/locales/it/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "Per un corretto funzionamento della decodifica speculativa, il modello bozza deve provenire dalla stessa famiglia di modelli.",
"selectDraftModel": "Seleziona modello bozza",
"noModelsFound": "Nessun modello bozza trovato. Assicurati che LM Studio sia in esecuzione con la modalità server abilitata.",
+ "showDebugThoughts": "Mostra pensieri di debug",
+ "showDebugThoughtsDesc": "Mostra l'output di ragionamento di debug dai modelli che lo supportano. Nota: questo è diverso dai tag standard che vengono sempre elaborati.",
"description": "LM Studio ti permette di eseguire modelli localmente sul tuo computer. Per iniziare, consulta la loro guida rapida. Dovrai anche avviare la funzionalità server locale di LM Studio per utilizzarlo con questa estensione. Nota: Roo Code utilizza prompt complessi e funziona meglio con i modelli Claude. I modelli con capacità inferiori potrebbero non funzionare come previsto."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/ja/settings.json b/webview-ui/src/i18n/locales/ja/settings.json
index 264d774473..da0d542cc7 100644
--- a/webview-ui/src/i18n/locales/ja/settings.json
+++ b/webview-ui/src/i18n/locales/ja/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "推論デコーディングが正しく機能するには、ドラフトモデルは同じモデルファミリーから選択する必要があります。",
"selectDraftModel": "ドラフトモデルを選択",
"noModelsFound": "ドラフトモデルが見つかりません。LM Studioがサーバーモードで実行されていることを確認してください。",
+ "showDebugThoughts": "デバッグ思考を表示",
+ "showDebugThoughtsDesc": "それをサポートするモデルからデバッグ推論出力を表示します。注:これは常に処理される標準のタグとは異なります。",
"description": "LM Studioを使用すると、ローカルコンピューターでモデルを実行できます。始め方については、クイックスタートガイドをご覧ください。また、この拡張機能で使用するには、LM Studioのローカルサーバー機能を起動する必要があります。注意:Roo Codeは複雑なプロンプトを使用し、Claudeモデルで最適に動作します。能力の低いモデルは期待通りに動作しない場合があります。"
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/ko/settings.json b/webview-ui/src/i18n/locales/ko/settings.json
index e490e31f78..12640ac00b 100644
--- a/webview-ui/src/i18n/locales/ko/settings.json
+++ b/webview-ui/src/i18n/locales/ko/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "추론 디코딩이 올바르게 작동하려면 초안 모델이 동일한 모델 패밀리에서 와야 합니다.",
"selectDraftModel": "초안 모델 선택",
"noModelsFound": "초안 모델을 찾을 수 없습니다. LM Studio가 서버 모드로 실행 중인지 확인하세요.",
+ "showDebugThoughts": "디버그 사고 표시",
+ "showDebugThoughtsDesc": "이를 지원하는 모델에서 디버그 추론 출력을 표시합니다. 참고: 이는 항상 처리되는 표준 태그와는 다릅니다.",
"description": "LM Studio를 사용하면 컴퓨터에서 로컬로 모델을 실행할 수 있습니다. 시작하는 방법은 빠른 시작 가이드를 참조하세요. 이 확장 프로그램과 함께 사용하려면 LM Studio의 로컬 서버 기능도 시작해야 합니다. 참고: Roo Code는 복잡한 프롬프트를 사용하며 Claude 모델에서 가장 잘 작동합니다. 덜 강력한 모델은 예상대로 작동하지 않을 수 있습니다."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/nl/settings.json b/webview-ui/src/i18n/locales/nl/settings.json
index ee0ba193e5..588cc64acd 100644
--- a/webview-ui/src/i18n/locales/nl/settings.json
+++ b/webview-ui/src/i18n/locales/nl/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "Draft-model moet uit dezelfde modelfamilie komen voor correcte speculatieve decodering.",
"selectDraftModel": "Selecteer draft-model",
"noModelsFound": "Geen draft-modellen gevonden. Zorg dat LM Studio draait met Server Mode ingeschakeld.",
+ "showDebugThoughts": "Debug-gedachten tonen",
+ "showDebugThoughtsDesc": "Debug-redeneeruitvoer tonen van modellen die dit ondersteunen. Opmerking: dit verschilt van standaard -tags die altijd worden verwerkt.",
"description": "LM Studio laat je modellen lokaal op je computer draaien. Zie hun quickstart-gids voor instructies. Je moet ook de lokale server-functie van LM Studio starten om het met deze extensie te gebruiken. Let op: Roo Code gebruikt complexe prompts en werkt het beste met Claude-modellen. Minder krachtige modellen werken mogelijk niet zoals verwacht."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/pl/settings.json b/webview-ui/src/i18n/locales/pl/settings.json
index 2d30547d9f..3ab043f962 100644
--- a/webview-ui/src/i18n/locales/pl/settings.json
+++ b/webview-ui/src/i18n/locales/pl/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "Aby dekodowanie spekulacyjne działało poprawnie, model szkicu musi pochodzić z tej samej rodziny modeli.",
"selectDraftModel": "Wybierz model szkicu",
"noModelsFound": "Nie znaleziono modeli szkicu. Upewnij się, że LM Studio jest uruchomione z włączonym trybem serwera.",
+ "showDebugThoughts": "Pokaż myśli debugowania",
+ "showDebugThoughtsDesc": "Wyświetl wynik rozumowania debugowania z modeli, które to obsługują. Uwaga: to różni się od standardowych tagów , które są zawsze przetwarzane.",
"description": "LM Studio pozwala na lokalne uruchamianie modeli na twoim komputerze. Aby rozpocząć, zapoznaj się z ich przewodnikiem szybkiego startu. Będziesz również musiał uruchomić funkcję serwera lokalnego LM Studio, aby używać go z tym rozszerzeniem. Uwaga: Roo Code używa złożonych podpowiedzi i działa najlepiej z modelami Claude. Modele o niższych możliwościach mogą nie działać zgodnie z oczekiwaniami."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/pt-BR/settings.json b/webview-ui/src/i18n/locales/pt-BR/settings.json
index 338ab9f6b1..dee87cb208 100644
--- a/webview-ui/src/i18n/locales/pt-BR/settings.json
+++ b/webview-ui/src/i18n/locales/pt-BR/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "O modelo de rascunho deve ser da mesma família de modelos para que a decodificação especulativa funcione corretamente.",
"selectDraftModel": "Selecionar Modelo de Rascunho",
"noModelsFound": "Nenhum modelo de rascunho encontrado. Certifique-se de que o LM Studio esteja em execução com o Modo Servidor ativado.",
+ "showDebugThoughts": "Mostrar pensamentos de depuração",
+ "showDebugThoughtsDesc": "Exibir saída de raciocínio de depuração de modelos que suportam isso. Nota: isso é diferente das tags padrão que são sempre processadas.",
"description": "O LM Studio permite que você execute modelos localmente em seu computador. Para instruções sobre como começar, veja o guia de início rápido deles. Você também precisará iniciar o recurso de servidor local do LM Studio para usá-lo com esta extensão. Nota: O Roo Code usa prompts complexos e funciona melhor com modelos Claude. Modelos menos capazes podem não funcionar como esperado."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/ru/settings.json b/webview-ui/src/i18n/locales/ru/settings.json
index be494c571b..a0616807b4 100644
--- a/webview-ui/src/i18n/locales/ru/settings.json
+++ b/webview-ui/src/i18n/locales/ru/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "Черновая модель должна быть из той же семьи моделей для корректной работы speculative decoding.",
"selectDraftModel": "Выбрать черновую модель",
"noModelsFound": "Черновых моделей не найдено. Проверьте, что LM Studio запущен с включённым серверным режимом.",
+ "showDebugThoughts": "Показать отладочные мысли",
+ "showDebugThoughtsDesc": "Отображать отладочный вывод рассуждений от моделей, которые это поддерживают. Примечание: это отличается от стандартных тегов , которые всегда обрабатываются.",
"description": "LM Studio позволяет запускать модели локально на вашем компьютере. Для начала ознакомьтесь с кратким руководством. Также необходимо включить локальный сервер LM Studio для работы с этим расширением. Примечание: Roo Code использует сложные подсказки и лучше всего работает с моделями Claude. Менее мощные модели могут работать некорректно."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/tr/settings.json b/webview-ui/src/i18n/locales/tr/settings.json
index fe4508495b..e2acd24c4a 100644
--- a/webview-ui/src/i18n/locales/tr/settings.json
+++ b/webview-ui/src/i18n/locales/tr/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "Spekülatif kod çözmenin doğru çalışması için taslak model aynı model ailesinden olmalıdır.",
"selectDraftModel": "Taslak Model Seç",
"noModelsFound": "Taslak model bulunamadı. Lütfen LM Studio'nun Sunucu Modu etkinken çalıştığından emin olun.",
+ "showDebugThoughts": "Hata Ayıklama Düşüncelerini Göster",
+ "showDebugThoughtsDesc": "Bunu destekleyen modellerden hata ayıklama mantığı çıktısını görüntüler. Not: Bu, her zaman işlenen standart etiketlerinden farklıdır.",
"description": "LM Studio, modelleri bilgisayarınızda yerel olarak çalıştırmanıza olanak tanır. Başlamak için hızlı başlangıç kılavuzlarına bakın. Bu uzantıyla kullanmak için LM Studio'nun yerel sunucu özelliğini de başlatmanız gerekecektir. Not: Roo Code karmaşık istemler kullanır ve Claude modelleriyle en iyi şekilde çalışır. Daha az yetenekli modeller beklendiği gibi çalışmayabilir."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/vi/settings.json b/webview-ui/src/i18n/locales/vi/settings.json
index 0f03de47a5..9d8d2c5ff1 100644
--- a/webview-ui/src/i18n/locales/vi/settings.json
+++ b/webview-ui/src/i18n/locales/vi/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "Mô hình nháp phải từ cùng một họ mô hình để giải mã suy đoán hoạt động chính xác.",
"selectDraftModel": "Chọn mô hình nháp",
"noModelsFound": "Không tìm thấy mô hình nháp nào. Vui lòng đảm bảo LM Studio đang chạy với chế độ máy chủ được bật.",
+ "showDebugThoughts": "Hiển thị Suy nghĩ Debug",
+ "showDebugThoughtsDesc": "Hiển thị đầu ra suy luận debug từ các mô hình hỗ trợ. Lưu ý: điều này khác với các thẻ tiêu chuẩn luôn được xử lý.",
"description": "LM Studio cho phép bạn chạy các mô hình cục bộ trên máy tính của bạn. Để biết hướng dẫn về cách bắt đầu, xem hướng dẫn nhanh của họ. Bạn cũng sẽ cần khởi động tính năng máy chủ cục bộ của LM Studio để sử dụng nó với tiện ích mở rộng này. Lưu ý: Roo Code sử dụng các lời nhắc phức tạp và hoạt động tốt nhất với các mô hình Claude. Các mô hình kém mạnh hơn có thể không hoạt động như mong đợi."
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/zh-CN/settings.json b/webview-ui/src/i18n/locales/zh-CN/settings.json
index 51db19562a..96b0b6b228 100644
--- a/webview-ui/src/i18n/locales/zh-CN/settings.json
+++ b/webview-ui/src/i18n/locales/zh-CN/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "草稿模型必须来自相同的模型系列,推测性解码才能正常工作。",
"selectDraftModel": "选择草稿模型",
"noModelsFound": "未找到草稿模型。请确保 LM Studio 已启用服务器模式运行。",
+ "showDebugThoughts": "显示调试思考",
+ "showDebugThoughtsDesc": "显示支持此功能的模型的调试推理输出。注意:这与始终被处理的标准标签不同。",
"description": "LM Studio 允许您在本地计算机上运行模型。要了解如何开始,请参阅他们的 快速入门指南。您还需要启动 LM Studio 的 本地服务器 功能,以便与此扩展一起使用。注意:Roo Code 使用复杂的提示,并且在 Claude 模型上效果最佳。功能较弱的模型可能无法正常工作。"
},
"ollama": {
diff --git a/webview-ui/src/i18n/locales/zh-TW/settings.json b/webview-ui/src/i18n/locales/zh-TW/settings.json
index 89d517f5b5..a99c8a5c42 100644
--- a/webview-ui/src/i18n/locales/zh-TW/settings.json
+++ b/webview-ui/src/i18n/locales/zh-TW/settings.json
@@ -373,6 +373,8 @@
"draftModelDesc": "草稿模型必須來自相同模型系列才能正確運作。",
"selectDraftModel": "選擇草稿模型",
"noModelsFound": "未找到草稿模型。請確保 LM Studio 以伺服器模式執行。",
+ "showDebugThoughts": "顯示除錯思維",
+ "showDebugThoughtsDesc": "顯示支援此功能模型的除錯推理輸出。注意:這與總是被處理的標準 標籤不同。",
"description": "LM Studio 允許您在本機電腦執行模型。詳細資訊請參閱快速入門指南。您需要啟動 LM Studio 的本機伺服器功能才能與此擴充功能搭配使用。注意: Roo Code 使用複雜提示,與 Claude 模型搭配最佳。功能較弱的模型可能無法正常運作。"
},
"ollama": {