From 95a0f61c434a804472fdced2cd380c7fa241f66e Mon Sep 17 00:00:00 2001 From: ankit Date: Sat, 17 May 2025 18:25:18 +0530 Subject: [PATCH] feat: save model name with conversation history in llama-server --- tools/server/webui/src/components/ChatMessage.tsx | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tools/server/webui/src/components/ChatMessage.tsx b/tools/server/webui/src/components/ChatMessage.tsx index ee59de450d1ff..e40893f0a2a20 100644 --- a/tools/server/webui/src/components/ChatMessage.tsx +++ b/tools/server/webui/src/components/ChatMessage.tsx @@ -37,7 +37,7 @@ export default function ChatMessage({ onChangeSibling(sibling: Message['id']): void; isPending?: boolean; }) { - const { viewingChat, config } = useAppContext(); + const { viewingChat, config, serverProps } = useAppContext(); const [editingContent, setEditingContent] = useState(null); const timings = useMemo( () => @@ -175,6 +175,11 @@ export default function ChatMessage({ > Speed: {timings.predicted_per_second.toFixed(1)} t/s +
+ + Model: {serverProps?.model_path?.split(/(\\|\/)/).pop()} + +
Prompt
- Tokens: {timings.prompt_n}