Skip to content

Commit bd0e5d7

Browse files
authored
feat: display runtime for playgrounds (#3007)
Signed-off-by: Philippe Martin <[email protected]>
1 parent b3c5396 commit bd0e5d7

File tree

7 files changed

+136
-13
lines changed

7 files changed

+136
-13
lines changed
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
/**********************************************************************
2+
* Copyright (C) 2025 Red Hat, Inc.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*
16+
* SPDX-License-Identifier: Apache-2.0
17+
***********************************************************************/
18+
19+
import { test, vi, beforeEach } from 'vitest';
20+
import { render, screen } from '@testing-library/svelte';
21+
import { InferenceType } from '@shared/models/IInference';
22+
import type { ConversationWithBackend } from '/@/stores/conversations';
23+
import PlaygroundColumnRuntime from './PlaygroundColumnRuntime.svelte';
24+
25+
beforeEach(() => {
26+
vi.resetAllMocks();
27+
});
28+
29+
test('should display label for backend', async () => {
30+
render(PlaygroundColumnRuntime, {
31+
object: {
32+
backend: InferenceType.LLAMA_CPP,
33+
} as ConversationWithBackend,
34+
});
35+
36+
screen.getByText('llamacpp');
37+
});
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
<script lang="ts">
2+
import { inferenceTypeLabel } from '@shared/models/IInference';
3+
import Badge from '../../Badge.svelte';
4+
import type { ConversationWithBackend } from '/@/stores/conversations';
5+
6+
export let object: ConversationWithBackend;
7+
</script>
8+
9+
<Badge content={inferenceTypeLabel(object.backend)} />

packages/frontend/src/pages/Playground.spec.ts

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,12 @@ import Playground from './Playground.svelte';
2323
import { studioClient } from '../utils/client';
2424
import type { ModelInfo } from '@shared/models/IModelInfo';
2525
import { fireEvent } from '@testing-library/dom';
26-
import type { AssistantChat, Conversation, ModelUsage, PendingChat, UserChat } from '@shared/models/IPlaygroundMessage';
26+
import type { AssistantChat, ModelUsage, PendingChat, UserChat } from '@shared/models/IPlaygroundMessage';
2727
import * as conversationsStore from '/@/stores/conversations';
2828
import * as inferenceServersStore from '/@/stores/inferenceServers';
2929
import { readable, writable } from 'svelte/store';
3030
import userEvent from '@testing-library/user-event';
31-
import type { InferenceServer } from '@shared/models/IInference';
31+
import { InferenceType, type InferenceServer } from '@shared/models/IInference';
3232

3333
vi.mock('../utils/client', async () => {
3434
return {
@@ -59,13 +59,14 @@ vi.mock('/@/stores/inferenceServers', async () => {
5959
};
6060
});
6161

62-
const customConversations = writable<Conversation[]>([
62+
const customConversations = writable<conversationsStore.ConversationWithBackend[]>([
6363
{
6464
id: 'playground-1',
6565
name: 'Playground 1',
6666
modelId: 'model-1',
6767
messages: [],
6868
usage: {} as ModelUsage,
69+
backend: InferenceType.LLAMA_CPP,
6970
},
7071
]);
7172

@@ -246,6 +247,7 @@ test('receiving complete message should enable the input element', async () => {
246247
} as AssistantChat,
247248
],
248249
usage: {} as ModelUsage,
250+
backend: InferenceType.LLAMA_CPP,
249251
},
250252
]);
251253

@@ -291,6 +293,7 @@ test('sending prompt should display the prompt and the response', async () => {
291293
} as unknown as PendingChat,
292294
],
293295
usage: {} as ModelUsage,
296+
backend: InferenceType.LLAMA_CPP,
294297
},
295298
]);
296299

@@ -319,6 +322,7 @@ test('sending prompt should display the prompt and the response', async () => {
319322
} as AssistantChat,
320323
],
321324
usage: {} as ModelUsage,
325+
backend: InferenceType.LLAMA_CPP,
322326
},
323327
]);
324328

@@ -398,6 +402,7 @@ describe('error message', () => {
398402
timestamp: 55,
399403
},
400404
],
405+
backend: InferenceType.LLAMA_CPP,
401406
},
402407
]);
403408

packages/frontend/src/pages/Playgrounds.svelte

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,19 +3,24 @@ import { router } from 'tinro';
33
import PlaygroundColumnModel from '../lib/table/playground/PlaygroundColumnModel.svelte';
44
import PlaygroundColumnName from '../lib/table/playground/PlaygroundColumnName.svelte';
55
import ConversationColumnAction from '/@/lib/table/playground/ConversationColumnAction.svelte';
6-
import { conversations } from '/@/stores/conversations';
6+
import { conversations, type ConversationWithBackend } from '/@/stores/conversations';
77
import PlaygroundColumnIcon from '/@/lib/table/playground/PlaygroundColumnIcon.svelte';
88
import { Button, EmptyScreen, Table, TableColumn, TableRow, NavPage } from '@podman-desktop/ui-svelte';
9-
import type { Conversation } from '@shared/models/IPlaygroundMessage';
109
import { faMessage, faPlusCircle } from '@fortawesome/free-solid-svg-icons';
10+
import PlaygroundColumnRuntime from '../lib/table/playground/PlaygroundColumnRuntime.svelte';
1111
1212
const columns = [
1313
new TableColumn<unknown>('', { width: '40px', renderer: PlaygroundColumnIcon }),
14-
new TableColumn<Conversation>('Name', { width: '1fr', renderer: PlaygroundColumnName }),
15-
new TableColumn<Conversation>('Model', { width: '1fr', renderer: PlaygroundColumnModel }),
16-
new TableColumn<Conversation>('Actions', { width: '80px', renderer: ConversationColumnAction, align: 'right' }),
14+
new TableColumn<ConversationWithBackend>('Name', { width: '1fr', renderer: PlaygroundColumnName }),
15+
new TableColumn<ConversationWithBackend>('Model', { width: '1fr', renderer: PlaygroundColumnModel }),
16+
new TableColumn<ConversationWithBackend>('Runtime', { width: '90px', renderer: PlaygroundColumnRuntime }),
17+
new TableColumn<ConversationWithBackend>('Actions', {
18+
width: '80px',
19+
renderer: ConversationColumnAction,
20+
align: 'right',
21+
}),
1722
];
18-
const row = new TableRow<Conversation>({});
23+
const row = new TableRow<ConversationWithBackend>({});
1924
2025
function createNewPlayground(): void {
2126
router.goto('/playground/create');

packages/frontend/src/stores/conversations.ts

Lines changed: 31 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,21 +21,48 @@ import { readable } from 'svelte/store';
2121
import { MSG_CONVERSATIONS_UPDATE } from '@shared/Messages';
2222
import { rpcBrowser, studioClient } from '/@/utils/client';
2323
import type { Conversation } from '@shared/models/IPlaygroundMessage';
24+
import type { ModelInfo } from '@shared/models/IModelInfo';
25+
import type { InferenceType } from '@shared/models/IInference';
26+
import { toInferenceType } from '@shared/models/IInference';
27+
28+
export interface ConversationWithBackend extends Conversation {
29+
backend: InferenceType;
30+
}
2431

2532
// RPCReadable cannot be used here, as it is doing some debouncing, and we want
2633
// to get the conversation as soon as the tokens arrive here, instead getting them by packets
27-
export const conversations: Readable<Conversation[]> = readable<Conversation[]>([], set => {
28-
const sub = rpcBrowser.subscribe(MSG_CONVERSATIONS_UPDATE, msg => {
29-
set(msg);
34+
export const conversations: Readable<ConversationWithBackend[]> = readable<ConversationWithBackend[]>([], set => {
35+
const sub = rpcBrowser.subscribe(MSG_CONVERSATIONS_UPDATE, conversations => {
36+
setWithBackend(set, conversations);
3037
});
3138
// Initialize the store manually
3239
studioClient
3340
.getPlaygroundConversations()
3441
.then(state => {
35-
set(state);
42+
setWithBackend(set, state);
3643
})
3744
.catch((err: unknown) => console.error(`Error getting playground conversations:`, err));
3845
return () => {
3946
sub.unsubscribe();
4047
};
4148
});
49+
50+
function setWithBackend(set: (value: ConversationWithBackend[]) => void, conversations: Conversation[]): void {
51+
studioClient
52+
.getModelsInfo()
53+
.then(modelsInfo => {
54+
const conversationsWithBackend: ConversationWithBackend[] = conversations.map(conversation => ({
55+
...conversation,
56+
backend: getModelBackend(modelsInfo, conversation.modelId),
57+
}));
58+
set(conversationsWithBackend);
59+
})
60+
.catch((err: unknown) => {
61+
console.error('error getting models info', String(err));
62+
});
63+
}
64+
65+
function getModelBackend(modelsInfo: ModelInfo[], modelId: string): InferenceType {
66+
const backend = modelsInfo.find(modelInfo => modelInfo.id === modelId)?.backend;
67+
return toInferenceType(backend);
68+
}
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
/**********************************************************************
2+
* Copyright (C) 2025 Red Hat, Inc.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*
16+
* SPDX-License-Identifier: Apache-2.0
17+
***********************************************************************/
18+
19+
import { expect, test } from 'vitest';
20+
import { InferenceType, toInferenceType } from './IInference';
21+
22+
test('toInferenceType', () => {
23+
expect(toInferenceType('llama-cpp')).toEqual(InferenceType.LLAMA_CPP);
24+
expect(toInferenceType(InferenceType.LLAMA_CPP)).toEqual(InferenceType.LLAMA_CPP);
25+
expect(toInferenceType('not-known')).toEqual(InferenceType.NONE);
26+
expect(toInferenceType('')).toEqual(InferenceType.NONE);
27+
expect(toInferenceType(undefined)).toEqual(InferenceType.NONE);
28+
});

packages/shared/src/models/IInference.ts

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,18 @@ const InferenceTypeLabel = {
2929
none: 'None',
3030
};
3131

32+
// toInferenceType casts a string to InferenceType
33+
// returns NONE value if input value is undefined or unknown
34+
export function toInferenceType(type: string | undefined): InferenceType {
35+
if (!type) {
36+
return InferenceType.NONE;
37+
}
38+
if (Object.values(InferenceType).includes(type as InferenceType)) {
39+
return type as InferenceType;
40+
}
41+
return InferenceType.NONE;
42+
}
43+
3244
export function inferenceTypeLabel(type: InferenceType): string {
3345
if (type in InferenceTypeLabel) {
3446
return InferenceTypeLabel[type];

0 commit comments

Comments
 (0)