Skip to content

Commit ece2c99

Browse files
webui: restore OpenAI-Compatible model source of truth and unify metadata capture
This change re-establishes a single, reliable source of truth for the active model: fully aligned with the OpenAI-Compat API behavior It introduces a unified metadata flow that captures the model field from both streaming and non-streaming responses, wiring a new onModel callback through ChatService The model name is now resolved directly from the API payload rather than relying on server /props or UI assumptions ChatStore records and persists the resolved model for each assistant message during streaming, ensuring consistency across the UI and database Type definitions for API and settings were also extended to include model metadata and the onModel callback, completing the alignment with OpenAI-Compat semantics
1 parent fc8aa51 commit ece2c99

File tree

3 files changed

+92
-3
lines changed

3 files changed

+92
-3
lines changed

tools/server/webui/src/lib/services/chat.ts

Lines changed: 83 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,8 @@ export class ChatService {
5252
onChunk,
5353
onComplete,
5454
onError,
55+
onReasoningChunk,
56+
onModel,
5557
// Generation parameters
5658
temperature,
5759
max_tokens,
@@ -195,13 +197,14 @@ export class ChatService {
195197
onChunk,
196198
onComplete,
197199
onError,
198-
options.onReasoningChunk,
200+
onReasoningChunk,
201+
onModel,
199202
conversationId,
200203
abortController.signal
201204
);
202205
return;
203206
} else {
204-
return this.handleNonStreamResponse(response, onComplete, onError);
207+
return this.handleNonStreamResponse(response, onComplete, onError, onModel);
205208
}
206209
} catch (error) {
207210
if (error instanceof Error && error.name === 'AbortError') {
@@ -261,6 +264,7 @@ export class ChatService {
261264
) => void,
262265
onError?: (error: Error) => void,
263266
onReasoningChunk?: (chunk: string) => void,
267+
onModel?: (model: string) => void,
264268
conversationId?: string,
265269
abortSignal?: AbortSignal
266270
): Promise<void> {
@@ -276,6 +280,7 @@ export class ChatService {
276280
let hasReceivedData = false;
277281
let lastTimings: ChatMessageTimings | undefined;
278282
let streamFinished = false;
283+
let modelEmitted = false;
279284

280285
try {
281286
let chunk = '';
@@ -304,6 +309,12 @@ export class ChatService {
304309
try {
305310
const parsed: ApiChatCompletionStreamChunk = JSON.parse(data);
306311

312+
const chunkModel = this.extractModelName(parsed);
313+
if (chunkModel && !modelEmitted) {
314+
modelEmitted = true;
315+
onModel?.(chunkModel);
316+
}
317+
307318
const content = parsed.choices[0]?.delta?.content;
308319
const reasoningContent = parsed.choices[0]?.delta?.reasoning_content;
309320
const timings = parsed.timings;
@@ -378,7 +389,8 @@ export class ChatService {
378389
reasoningContent?: string,
379390
timings?: ChatMessageTimings
380391
) => void,
381-
onError?: (error: Error) => void
392+
onError?: (error: Error) => void,
393+
onModel?: (model: string) => void
382394
): Promise<string> {
383395
try {
384396
const responseText = await response.text();
@@ -389,6 +401,11 @@ export class ChatService {
389401
}
390402

391403
const data: ApiChatCompletionResponse = JSON.parse(responseText);
404+
const responseModel = this.extractModelName(data);
405+
if (responseModel) {
406+
onModel?.(responseModel);
407+
}
408+
392409
const content = data.choices[0]?.message?.content || '';
393410
const reasoningContent = data.choices[0]?.message?.reasoning_content;
394411

@@ -618,6 +635,69 @@ export class ChatService {
618635
}
619636
}
620637

638+
private extractModelName(data: unknown): string | undefined {
639+
if (!data || typeof data !== 'object') {
640+
return undefined;
641+
}
642+
643+
const record = data as Record<string, unknown>;
644+
const normalize = (value: unknown): string | undefined => {
645+
if (typeof value !== 'string') {
646+
return undefined;
647+
}
648+
649+
const trimmed = value.trim();
650+
651+
return trimmed.length > 0 ? trimmed : undefined;
652+
};
653+
654+
const rootModel = normalize(record['model']);
655+
if (rootModel) {
656+
return rootModel;
657+
}
658+
659+
const choices = record['choices'];
660+
if (!Array.isArray(choices) || choices.length === 0) {
661+
return undefined;
662+
}
663+
664+
const firstChoice = choices[0] as Record<string, unknown> | undefined;
665+
if (!firstChoice) {
666+
return undefined;
667+
}
668+
669+
const choiceModel = normalize(firstChoice['model']);
670+
if (choiceModel) {
671+
return choiceModel;
672+
}
673+
674+
const delta = firstChoice['delta'] as Record<string, unknown> | undefined;
675+
if (delta) {
676+
const deltaModel = normalize(delta['model']);
677+
if (deltaModel) {
678+
return deltaModel;
679+
}
680+
}
681+
682+
const message = firstChoice['message'] as Record<string, unknown> | undefined;
683+
if (message) {
684+
const messageModel = normalize(message['model']);
685+
if (messageModel) {
686+
return messageModel;
687+
}
688+
}
689+
690+
const metadata = firstChoice['metadata'] as Record<string, unknown> | undefined;
691+
if (metadata) {
692+
const metadataModel = normalize(metadata['model']);
693+
if (metadataModel) {
694+
return metadataModel;
695+
}
696+
}
697+
698+
return undefined;
699+
}
700+
621701
private updateProcessingState(
622702
timings?: ChatMessageTimings,
623703
promptProgress?: ChatMessagePromptProgress,

tools/server/webui/src/lib/types/api.d.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -186,10 +186,14 @@ export interface ApiChatCompletionRequest {
186186
}
187187

188188
export interface ApiChatCompletionStreamChunk {
189+
model?: string;
189190
choices: Array<{
191+
model?: string;
192+
metadata?: { model?: string };
190193
delta: {
191194
content?: string;
192195
reasoning_content?: string;
196+
model?: string;
193197
};
194198
}>;
195199
timings?: {
@@ -203,10 +207,14 @@ export interface ApiChatCompletionStreamChunk {
203207
}
204208

205209
export interface ApiChatCompletionResponse {
210+
model?: string;
206211
choices: Array<{
212+
model?: string;
213+
metadata?: { model?: string };
207214
message: {
208215
content: string;
209216
reasoning_content?: string;
217+
model?: string;
210218
};
211219
}>;
212220
}

tools/server/webui/src/lib/types/settings.d.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ export interface SettingsChatServiceOptions {
4141
// Callbacks
4242
onChunk?: (chunk: string) => void;
4343
onReasoningChunk?: (chunk: string) => void;
44+
onModel?: (model: string) => void;
4445
onComplete?: (response: string, reasoningContent?: string, timings?: ChatMessageTimings) => void;
4546
onError?: (error: Error) => void;
4647
}

0 commit comments

Comments
 (0)