Skip to content

Commit 964fb7f

Browse files
committed
chore: applied suggestions
Signed-off-by: Evzen Gasta <[email protected]>
1 parent 26409bf commit 964fb7f

File tree

3 files changed

+13
-7
lines changed

3 files changed

+13
-7
lines changed

packages/backend/package.json

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -53,11 +53,10 @@
5353
},
5454
"ai-lab.inferenceRuntime": {
5555
"type": "string",
56-
"default": "llama-cpp",
5756
"enum": [
5857
"llama-cpp",
59-
"whisper-cpp",
60-
"none"
58+
"openvino",
59+
"vllm"
6160
],
6261
"description": "Choose the default inferencing runtime for AI Lab"
6362
},

packages/frontend/src/lib/select/ModelSelect.svelte

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ function handleOnChange(nValue: (ModelInfo & { label: string; value: string }) |
4747
value = nValue;
4848
}
4949
50-
let defaultRuntime: string = 'llama-cpp';
50+
let defaultRuntime: string | undefined = $state();
5151
5252
onMount(() => {
5353
return configuration.subscribe(values => {
@@ -56,6 +56,13 @@ onMount(() => {
5656
}
5757
});
5858
});
59+
60+
function filterModel(model: ModelInfo): boolean {
61+
// If the defaultRuntime is undefined we should not filter any model
62+
if (!defaultRuntime) return true;
63+
64+
return model.backend === defaultRuntime;
65+
}
5966
</script>
6067

6168
<Select
@@ -66,7 +73,7 @@ onMount(() => {
6673
onchange={handleOnChange}
6774
placeholder="Select model to use"
6875
items={models
69-
.filter(model => model.backend === defaultRuntime)
76+
.filter(filterModel)
7077
.toSorted((a, b) => getModelSortingScore(a) - getModelSortingScore(b))
7178
.map(model => ({ ...model, value: model.id, label: model.name }))}>
7279
<div slot="item" let:item>

packages/frontend/src/pages/Recipes.svelte

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,12 +103,12 @@ function openContribution(): void {
103103
studioClient.openURL('https://github.com/containers/ai-lab-recipes/blob/main/CONTRIBUTING.md').catch(console.error);
104104
}
105105
106-
let defaultRuntime: string = $state('llama-cpp');
106+
let defaultRuntime: string | undefined = $state();
107107
108108
onMount(() => {
109109
const inferenceRuntime = $configuration?.inferenceRuntime;
110110
if (inferenceRuntime) defaultRuntime = inferenceRuntime;
111-
onFilterChange('tools', defaultRuntime);
111+
onFilterChange('tools', defaultRuntime ?? 'all');
112112
});
113113
</script>
114114

0 commit comments

Comments
 (0)