Skip to content

Commit 37f1096

Browse files
debug: say goodbye to curl -N, hello one-click raw stream
- adds a new checkbox in the WebUI to display raw LLM output without backend parsing or frontend Markdown rendering
1 parent 111381f commit 37f1096

File tree

4 files changed

+34
-3
lines changed

4 files changed

+34
-3
lines changed

tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,11 @@
131131
</div>
132132
</div>
133133
{:else if message.role === 'assistant'}
134-
<MarkdownContent content={messageContent || ''} />
134+
{#if config().disableReasoningFormat}
135+
<pre class="raw-output">{messageContent || ''}</pre>
136+
{:else}
137+
<MarkdownContent content={messageContent || ''} />
138+
{/if}
135139
{:else}
136140
<div class="text-sm whitespace-pre-wrap">
137141
{messageContent}
@@ -203,4 +207,22 @@
203207
background-position: -200% 0;
204208
}
205209
}
210+
211+
.raw-output {
212+
width: 100%;
213+
max-width: 48rem;
214+
margin-top: 1.5rem;
215+
padding: 1rem 1.25rem;
216+
border-radius: 1rem;
217+
background: hsl(var(--muted) / 0.3);
218+
color: var(--foreground);
219+
font-family:
220+
ui-monospace, SFMono-Regular, 'SF Mono', Monaco, 'Cascadia Code', 'Roboto Mono', Consolas,
221+
'Liberation Mono', Menlo, monospace;
222+
font-size: 0.875rem;
223+
line-height: 1.6;
224+
white-space: pre-wrap;
225+
word-break: break-word;
226+
}
227+
206228
</style>

tools/server/webui/src/lib/components/app/chat/ChatSettings/ChatSettingsDialog.svelte

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,11 @@
148148
key: 'showThoughtInProgress',
149149
label: 'Show thought in progress',
150150
type: 'checkbox'
151+
},
152+
{
153+
key: 'disableReasoningFormat',
154+
label: 'Show raw LLM output without backend parsing and frontend Markdown rendering to inspect streaming across different models.',
155+
type: 'checkbox'
151156
}
152157
]
153158
},

tools/server/webui/src/lib/constants/settings-config.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ export const SETTING_CONFIG_DEFAULT: Record<string, string | number | boolean> =
66
theme: 'system',
77
showTokensPerSecond: false,
88
showThoughtInProgress: false,
9+
disableReasoningFormat: false,
910
keepStatsVisible: false,
1011
askForTitleConfirmation: false,
1112
pasteLongTextToFileLen: 2500,
@@ -76,6 +77,8 @@ export const SETTING_CONFIG_INFO: Record<string, string> = {
7677
custom: 'Custom JSON parameters to send to the API. Must be valid JSON format.',
7778
showTokensPerSecond: 'Display generation speed in tokens per second during streaming.',
7879
showThoughtInProgress: 'Expand thought process by default when generating messages.',
80+
disableReasoningFormat:
81+
'Show raw LLM output without backend parsing and frontend Markdown rendering to inspect streaming across different models.',
7982
keepStatsVisible: 'Keep processing statistics visible after generation finishes.',
8083
askForTitleConfirmation:
8184
'Ask for confirmation before automatically changing conversation title when editing the first message.',

tools/server/webui/src/lib/services/chat.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,8 @@ export class ChatService {
7878
timings_per_token
7979
} = options;
8080

81+
const currentConfig = config();
82+
8183
// Cancel any ongoing request and create a new abort controller
8284
this.abort();
8385
this.abortController = new AbortController();
@@ -117,7 +119,7 @@ export class ChatService {
117119
stream
118120
};
119121

120-
requestBody.reasoning_format = 'auto';
122+
requestBody.reasoning_format = currentConfig.disableReasoningFormat ? 'none' : 'auto';
121123

122124
if (temperature !== undefined) requestBody.temperature = temperature;
123125
// Set max_tokens to -1 (infinite) if not provided or empty
@@ -161,7 +163,6 @@ export class ChatService {
161163
}
162164

163165
try {
164-
const currentConfig = config();
165166
const apiKey = currentConfig.apiKey?.toString().trim();
166167

167168
const response = await fetch(`./v1/chat/completions`, {

0 commit comments

Comments
 (0)