Skip to content

Commit 90d0b45

Browse files
webui: introduce OpenAI-compatible model selector in JSON payload
1 parent 7639bec commit 90d0b45

File tree

9 files changed

+415
-25
lines changed

9 files changed

+415
-25
lines changed

tools/server/webui/src/lib/components/app/chat/ChatSidebar/ChatSidebar.svelte

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
updateConversationName
1414
} from '$lib/stores/chat.svelte';
1515
import ChatSidebarActions from './ChatSidebarActions.svelte';
16+
import ModelSelector from './ModelSelector.svelte';
1617
1718
const sidebar = Sidebar.useSidebar();
1819
@@ -110,6 +111,8 @@
110111
<h1 class="inline-flex items-center gap-1 px-2 text-xl font-semibold">llama.cpp</h1>
111112
</a>
112113

114+
<ModelSelector />
115+
113116
<ChatSidebarActions {handleMobileSidebarItemClick} bind:isSearchModeActive bind:searchQuery />
114117
</Sidebar.Header>
115118

Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
<script lang="ts">
2+
import { onMount } from 'svelte';
3+
import { Loader2 } from '@lucide/svelte';
4+
import * as Select from '$lib/components/ui/select';
5+
import {
6+
fetchModels,
7+
modelOptions,
8+
modelsError,
9+
modelsLoading,
10+
modelsUpdating,
11+
selectModel,
12+
selectedModelId
13+
} from '$lib/stores/models.svelte';
14+
import type { ModelOption } from '$lib/stores/models.svelte';
15+
16+
let options = $derived(modelOptions());
17+
let loading = $derived(modelsLoading());
18+
let updating = $derived(modelsUpdating());
19+
let error = $derived(modelsError());
20+
let activeId = $derived(selectedModelId());
21+
22+
let isMounted = $state(false);
23+
24+
onMount(async () => {
25+
try {
26+
await fetchModels();
27+
} catch (error) {
28+
console.error('Unable to load models:', error);
29+
} finally {
30+
isMounted = true;
31+
}
32+
});
33+
34+
async function handleSelect(value: string | undefined) {
35+
if (!value) return;
36+
37+
const option = options.find((item) => item.id === value);
38+
if (!option) {
39+
console.error('Model is no longer available');
40+
return;
41+
}
42+
43+
try {
44+
await selectModel(option.id);
45+
} catch (error) {
46+
console.error('Failed to switch model:', error);
47+
}
48+
}
49+
50+
function getDisplayOption(): ModelOption | undefined {
51+
if (activeId) {
52+
return options.find((option) => option.id === activeId);
53+
}
54+
55+
return options[0];
56+
}
57+
</script>
58+
59+
{#if loading && options.length === 0 && !isMounted}
60+
<div class="flex items-center gap-2 text-xs text-muted-foreground">
61+
<Loader2 class="h-4 w-4 animate-spin" />
62+
Loading models…
63+
</div>
64+
{:else if options.length === 0}
65+
<p class="text-xs text-muted-foreground">No models available.</p>
66+
{:else}
67+
{@const selectedOption = getDisplayOption()}
68+
69+
<Select.Root
70+
type="single"
71+
value={selectedOption?.id ?? ''}
72+
onValueChange={handleSelect}
73+
disabled={loading || updating}
74+
>
75+
<Select.Trigger class="h-9 w-full justify-between">
76+
<span class="truncate text-sm font-medium">{selectedOption?.name || 'Select model'}</span>
77+
78+
{#if updating}
79+
<Loader2 class="h-4 w-4 animate-spin text-muted-foreground" />
80+
{/if}
81+
</Select.Trigger>
82+
83+
<Select.Content class="z-[100000]">
84+
{#each options as option (option.id)}
85+
<Select.Item value={option.id} label={option.name}>
86+
<span class="text-sm font-medium">{option.name}</span>
87+
88+
{#if option.description}
89+
<span class="text-xs text-muted-foreground">{option.description}</span>
90+
{/if}
91+
</Select.Item>
92+
{/each}
93+
</Select.Content>
94+
</Select.Root>
95+
{/if}
96+
97+
{#if error}
98+
<p class="mt-2 text-xs text-destructive">{error}</p>
99+
{/if}

tools/server/webui/src/lib/components/app/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ export { default as ParameterSourceIndicator } from './chat/ChatSettings/Paramet
3232
export { default as ChatSidebar } from './chat/ChatSidebar/ChatSidebar.svelte';
3333
export { default as ChatSidebarConversationItem } from './chat/ChatSidebar/ChatSidebarConversationItem.svelte';
3434
export { default as ChatSidebarSearch } from './chat/ChatSidebar/ChatSidebarSearch.svelte';
35+
export { default as ChatSidebarModelSelector } from './chat/ChatSidebar/ModelSelector.svelte';
3536

3637
export { default as ChatErrorDialog } from './dialogs/ChatErrorDialog.svelte';
3738
export { default as EmptyFileAlertDialog } from './dialogs/EmptyFileAlertDialog.svelte';

tools/server/webui/src/lib/services/chat.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { config } from '$lib/stores/settings.svelte';
2+
import { selectedModelName } from '$lib/stores/models.svelte';
23
import { slotsService } from './slots';
34
/**
45
* ChatService - Low-level API communication layer for llama.cpp server interactions
@@ -118,6 +119,11 @@ export class ChatService {
118119
stream
119120
};
120121

122+
const activeModel = selectedModelName();
123+
if (activeModel) {
124+
requestBody.model = activeModel;
125+
}
126+
121127
requestBody.reasoning_format = currentConfig.disableReasoningFormat ? 'none' : 'auto';
122128

123129
if (temperature !== undefined) requestBody.temperature = temperature;
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import { base } from '$app/paths';
2+
import { config } from '$lib/stores/settings.svelte';
3+
import type { ApiModelListResponse } from '$lib/types/api';
4+
5+
export class ModelsService {
6+
static async list(): Promise<ApiModelListResponse> {
7+
const currentConfig = config();
8+
const apiKey = currentConfig.apiKey?.toString().trim();
9+
10+
const response = await fetch(`${base}/v1/models`, {
11+
headers: {
12+
...(apiKey ? { Authorization: `Bearer ${apiKey}` } : {})
13+
}
14+
});
15+
16+
if (!response.ok) {
17+
throw new Error(`Failed to fetch model list (status ${response.status})`);
18+
}
19+
20+
return response.json() as Promise<ApiModelListResponse>;
21+
}
22+
}

tools/server/webui/src/lib/stores/chat.svelte.ts

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import { DatabaseStore } from '$lib/stores/database';
22
import { chatService, slotsService } from '$lib/services';
3-
import { serverStore } from '$lib/stores/server.svelte';
43
import { config } from '$lib/stores/settings.svelte';
54
import { filterByLeafNodeId, findLeafNode, findDescendantMessages } from '$lib/utils/branching';
65
import { browser } from '$app/environment';
@@ -359,28 +358,29 @@ class ChatStore {
359358
): Promise<void> {
360359
let streamedContent = '';
361360
let streamedReasoningContent = '';
362-
let modelCaptured = false;
361+
let resolvedModel: string | null = null;
362+
let modelPersisted = false;
363363

364-
const captureModelIfNeeded = (updateDbImmediately = true): string | undefined => {
365-
if (!modelCaptured) {
366-
const currentModelName = serverStore.modelName;
364+
const recordModel = (modelName: string, persistImmediately = true): void => {
365+
const trimmedModel = modelName.trim();
367366

368-
if (currentModelName) {
369-
if (updateDbImmediately) {
370-
DatabaseStore.updateMessage(assistantMessage.id, { model: currentModelName }).catch(
371-
console.error
372-
);
373-
}
367+
if (!trimmedModel || trimmedModel === resolvedModel) {
368+
return;
369+
}
374370

375-
const messageIndex = this.findMessageIndex(assistantMessage.id);
371+
resolvedModel = trimmedModel;
376372

377-
this.updateMessageAtIndex(messageIndex, { model: currentModelName });
378-
modelCaptured = true;
373+
const messageIndex = this.findMessageIndex(assistantMessage.id);
379374

380-
return currentModelName;
381-
}
375+
this.updateMessageAtIndex(messageIndex, { model: trimmedModel });
376+
377+
if (persistImmediately && !modelPersisted) {
378+
modelPersisted = true;
379+
DatabaseStore.updateMessage(assistantMessage.id, { model: trimmedModel }).catch((error) => {
380+
console.error('Failed to persist model name:', error);
381+
modelPersisted = false;
382+
});
382383
}
383-
return undefined;
384384
};
385385

386386
slotsService.startStreaming();
@@ -399,7 +399,6 @@ class ChatStore {
399399
assistantMessage.id
400400
);
401401

402-
captureModelIfNeeded();
403402
const messageIndex = this.findMessageIndex(assistantMessage.id);
404403
this.updateMessageAtIndex(messageIndex, {
405404
content: streamedContent
@@ -409,13 +408,15 @@ class ChatStore {
409408
onReasoningChunk: (reasoningChunk: string) => {
410409
streamedReasoningContent += reasoningChunk;
411410

412-
captureModelIfNeeded();
413-
414411
const messageIndex = this.findMessageIndex(assistantMessage.id);
415412

416413
this.updateMessageAtIndex(messageIndex, { thinking: streamedReasoningContent });
417414
},
418415

416+
onModel: (modelName: string) => {
417+
recordModel(modelName);
418+
},
419+
419420
onComplete: async (
420421
finalContent?: string,
421422
reasoningContent?: string,
@@ -434,10 +435,9 @@ class ChatStore {
434435
timings: timings
435436
};
436437

437-
const capturedModel = captureModelIfNeeded(false);
438-
439-
if (capturedModel) {
440-
updateData.model = capturedModel;
438+
if (resolvedModel && !modelPersisted) {
439+
updateData.model = resolvedModel;
440+
modelPersisted = true;
441441
}
442442

443443
await DatabaseStore.updateMessage(assistantMessage.id, updateData);

0 commit comments

Comments
 (0)