Skip to content

Commit 266a9f8

Browse files
author
Marvin Zhang
committed
refactor: consolidate refs and enhance focus handling in AssistantConsole and useAssistantConsole
- Moved messageListRef and chatInputRef to useAssistantConsole for better state management. - Improved focus handling for chat input on conversation selection and component mount. - Cleaned up code formatting for better readability and consistency. - Updated AI model options in getLLMProviderItems for expanded functionality.
1 parent 337e965 commit 266a9f8

File tree

4 files changed

+81
-21
lines changed

4 files changed

+81
-21
lines changed

src/components/core/ai/AssistantConsole.vue

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ const { t } = useI18n();
1919
const router = useRouter();
2020
2121
const {
22+
messageListRef,
23+
chatInputRef,
2224
currentConversationId,
2325
conversations,
2426
chatHistory,
@@ -44,10 +46,6 @@ const {
4446
extractErrorMessage,
4547
} = useAssistantConsole();
4648
47-
// Add ref for message list component
48-
const messageListRef = ref<{ scrollToBottom: () => Promise<void> } | null>(null);
49-
const chatInputRef = ref<InstanceType<typeof ClChatInput> | null>(null);
50-
5149
// Message handling
5250
const sendMessage = async (message: string) => {
5351
if (!message.trim()) return;

src/components/core/ai/useAssistantConsole.ts

Lines changed: 40 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,22 @@
1-
import { computed, ref, watch, reactive } from 'vue';
1+
import { computed, ref, watch, reactive, onMounted } from 'vue';
22
import { useI18n } from 'vue-i18n';
33
import useRequest from '@/services/request';
44
import { getRequestBaseUrl } from '@/utils';
55
import { debounce } from 'lodash';
66
import { ElMessage } from 'element-plus';
77
import { AxiosError } from 'axios';
8+
import { ClChatInput } from '@/components';
89

910
const useAssistantConsole = () => {
1011
const { t } = useI18n();
1112
const { get } = useRequest();
1213

14+
// Refs
15+
const messageListRef = ref<{ scrollToBottom: () => Promise<void> } | null>(
16+
null
17+
);
18+
const chatInputRef = ref<InstanceType<typeof ClChatInput> | null>(null);
19+
1320
// State management
1421
const currentConversation = ref<ChatConversation | null>(null);
1522
const currentConversationId = ref<string>('');
@@ -27,7 +34,9 @@ const useAssistantConsole = () => {
2734
// Computed properties
2835
const currentConversationTitle = computed(() => {
2936
if (!currentConversationId.value) return t('components.ai.chatbot.newChat');
30-
return currentConversation.value?.title || t('components.ai.chatbot.newChat');
37+
return (
38+
currentConversation.value?.title || t('components.ai.chatbot.newChat')
39+
);
3140
});
3241

3342
const chatbotConfig = ref<ChatbotConfig>({
@@ -60,7 +69,9 @@ const useAssistantConsole = () => {
6069

6170
isLoadingMessages.value = true;
6271
try {
63-
const res = await get(`/ai/chat/conversations/${conversationId}/messages`);
72+
const res = await get(
73+
`/ai/chat/conversations/${conversationId}/messages`
74+
);
6475
const messages = (res.data || []).map((msg: any) => {
6576
const message: ChatMessage = {
6677
...msg,
@@ -171,10 +182,13 @@ const useAssistantConsole = () => {
171182

172183
// Message stream handling
173184
const createChatRequest = (message: string): ChatRequest => {
174-
const { provider, model, systemPrompt, temperature, maxTokens } = chatbotConfig.value;
185+
const { provider, model, systemPrompt, temperature, maxTokens } =
186+
chatbotConfig.value;
175187

176188
if (!provider || !model) {
177-
throw new Error('Please select a provider and model before sending a message');
189+
throw new Error(
190+
'Please select a provider and model before sending a message'
191+
);
178192
}
179193

180194
return {
@@ -220,7 +234,9 @@ const useAssistantConsole = () => {
220234
}
221235

222236
const contentKey = chunk.key || '';
223-
const contentIndex = currentMessage.contents.findIndex(c => c.key === contentKey);
237+
const contentIndex = currentMessage.contents.findIndex(
238+
c => c.key === contentKey
239+
);
224240

225241
if (contentIndex >= 0) {
226242
// Update existing content
@@ -316,7 +332,7 @@ const useAssistantConsole = () => {
316332
const chatRequest = createChatRequest(message);
317333
const baseUrl = getRequestBaseUrl();
318334
const url = `${baseUrl}/ai/chat/stream`;
319-
335+
320336
const response = await fetch(url, {
321337
method: 'POST',
322338
headers: getStreamHeaders(),
@@ -329,7 +345,11 @@ const useAssistantConsole = () => {
329345
throw new Error(extractErrorMessage(text));
330346
}
331347

332-
await processStreamData(response.body!.getReader(), responseIndex, onMessageUpdate);
348+
await processStreamData(
349+
response.body!.getReader(),
350+
responseIndex,
351+
onMessageUpdate
352+
);
333353
} catch (error) {
334354
if (error instanceof Error) {
335355
throw error;
@@ -342,6 +362,7 @@ const useAssistantConsole = () => {
342362
const selectConversation = async (conversationId: string) => {
343363
if (currentConversationId.value === conversationId) return;
344364

365+
chatInputRef.value?.focus();
345366
currentConversationId.value = conversationId;
346367
streamError.value = '';
347368
await loadConversationMessages(conversationId);
@@ -352,6 +373,7 @@ const useAssistantConsole = () => {
352373
localStorage.removeItem('currentConversationId');
353374
streamError.value = '';
354375
chatHistory.splice(0, chatHistory.length);
376+
chatInputRef.value?.focus();
355377
};
356378

357379
// Watch conversation ID changes
@@ -370,7 +392,16 @@ const useAssistantConsole = () => {
370392
}
371393
});
372394

395+
onMounted(() => {
396+
setTimeout(() => {
397+
chatInputRef.value?.focus();
398+
}, 200);
399+
});
400+
373401
return {
402+
// Refs
403+
messageListRef,
404+
chatInputRef,
374405
// State
375406
currentConversation,
376407
currentConversationId,
@@ -402,4 +433,4 @@ const useAssistantConsole = () => {
402433
};
403434
};
404435

405-
export default useAssistantConsole;
436+
export default useAssistantConsole;

src/utils/ai.ts

Lines changed: 38 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,11 @@ export const getLLMProviderItems = (): LLMProviderItem[] => {
2020
defaultModels: [
2121
'gpt-4o',
2222
'gpt-4o-mini',
23-
'o1-mini',
2423
'o1',
24+
'o1-mini',
25+
'o1-pro',
2526
'o3-mini',
26-
'o3',
27+
'gpt-4.5-preview',
2728
],
2829
defaultApiVersions: ['2025-02-01-preview'],
2930
},
@@ -32,36 +33,67 @@ export const getLLMProviderItems = (): LLMProviderItem[] => {
3233
name: 'Anthropic',
3334
icon: ['svg', 'anthropic'],
3435
defaultModels: [
35-
'claude-3.7-sonnet',
36-
'claude-3.7-sonnet-thinking',
37-
'claude-3.5-sonnet',
36+
'claude-3-7-sonnet-latest',
37+
'claude-3-5-haiku-latest',
38+
'claude-3-5-sonnet-latest',
39+
'claude-3-opus-latest',
3840
],
3941
},
4042
{
4143
key: 'gemini',
4244
name: 'Gemini',
4345
icon: ['svg', 'gemini'],
46+
defaultModels: [
47+
'gemini-2.5-pro-preview-03-25',
48+
'gemini-2.0-flash',
49+
'gemini-2.0-flash-lite',
50+
'gemini-1.5-flash',
51+
'gemini-1.5-flash-8b',
52+
'gemini-1.5-pro',
53+
],
4454
},
4555
{
4656
key: 'grok',
4757
name: 'Grok',
4858
icon: ['svg', 'grok'],
59+
defaultModels: [
60+
'grok-3',
61+
'grok-3-fast',
62+
'grok-3-mini',
63+
'grok-3-mini-fast',
64+
],
4965
},
5066
{
5167
key: 'qwen',
5268
name: 'Qwen',
5369
icon: ['svg', 'qwen'],
54-
defaultModels: ['qwen-max', 'qwen-plus', 'qwen-turbo'],
70+
defaultModels: [
71+
'qwen-max',
72+
'qwen-plus',
73+
'qwen-turbo',
74+
'qwq-plus',
75+
'qwq-32b',
76+
'qwen-omni-turbo',
77+
],
5578
},
5679
{
5780
key: 'mistral',
5881
name: 'Mistral',
5982
icon: ['svg', 'mistral'],
83+
defaultModels: [
84+
'codestral-latest',
85+
'mistral-large-latest',
86+
'pixtral-large-latest',
87+
'mistral-saba-latest',
88+
'ministral-3b-latest',
89+
'ministral-8b-latest',
90+
],
6091
},
6192
{
6293
key: 'deepseek',
6394
name: 'DeepSeek',
6495
icon: ['svg', 'deepseek'],
96+
defaultModels: ['deepseek-chat', 'deepseek-reasoner'],
6597
},
6698
{
6799
key: 'openai-compatible',

src/views/system/detail/tabs/SystemDetailTabAi.vue

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -336,7 +336,7 @@ defineOptions({ name: 'ClSystemDetailTabAi' });
336336
<el-input
337337
v-model="customModelInput"
338338
:placeholder="$t('views.system.ai.addCustomModel')"
339-
@keyup.enter="addCustomModel"
339+
@keyup.enter.prevent="addCustomModel"
340340
>
341341
<template #append>
342342
<cl-fa-icon-button
@@ -436,7 +436,6 @@ defineOptions({ name: 'ClSystemDetailTabAi' });
436436
}
437437
438438
.custom-models {
439-
margin-top: 12px;
440439
441440
.add-model {
442441
margin-bottom: 12px;

0 commit comments

Comments
 (0)