Skip to content

Commit 948d57e

Browse files
authored
Merge branch 'master' into vsIcons
2 parents 5ea6b00 + de845d8 commit 948d57e

File tree

7 files changed

+74
-5
lines changed

7 files changed

+74
-5
lines changed

libs/remix-ai-core/src/helpers/streamHandler.ts

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ export const HandleOpenAIResponse = async (aiResponse: IAIStreamResponse | any,
6868
// Handle both IAIStreamResponse format and plain response for backward compatibility
6969
const streamResponse = aiResponse?.streamResponse || aiResponse
7070
const tool_callback = aiResponse?.callback
71+
const toolExecutionStatusCallback = aiResponse?.toolExecutionStatusCallback
7172
const reader = streamResponse.body?.getReader();
7273
const decoder = new TextDecoder("utf-8");
7374
let buffer = "";
@@ -143,7 +144,13 @@ export const HandleOpenAIResponse = async (aiResponse: IAIStreamResponse | any,
143144

144145
// Check if this is the finish reason for tool calls
145146
if (json.choices?.[0]?.finish_reason === "tool_calls" && tool_callback && toolCalls.size > 0) {
147+
toolExecutionStatusCallback?.(true);
146148
const response = await tool_callback(Array.from(toolCalls.values()))
149+
toolExecutionStatusCallback?.(false);
150+
// Keep the callback attached for recursive calls
151+
if (response && typeof response === 'object') {
152+
response.toolExecutionStatusCallback = toolExecutionStatusCallback;
153+
}
147154
cb("\n\n");
148155
HandleOpenAIResponse(response, cb, done_cb)
149156
return;
@@ -191,6 +198,7 @@ export const HandleMistralAIResponse = async (aiResponse: IAIStreamResponse | an
191198
// Handle both IAIStreamResponse format and plain response for backward compatibility
192199
const streamResponse = aiResponse?.streamResponse || aiResponse
193200
const tool_callback = aiResponse?.callback
201+
const toolExecutionStatusCallback = aiResponse?.toolExecutionStatusCallback
194202
const reader = streamResponse.body?.getReader();
195203
const decoder = new TextDecoder("utf-8");
196204
let buffer = "";
@@ -234,7 +242,13 @@ export const HandleMistralAIResponse = async (aiResponse: IAIStreamResponse | an
234242
const json = JSON.parse(jsonStr);
235243
threadId = json?.id || threadId;
236244
if (json.choices[0].delta.tool_calls && tool_callback){
245+
toolExecutionStatusCallback?.(true);
237246
const response = await tool_callback(json.choices[0].delta.tool_calls)
247+
// Keep the callback attached for recursive calls
248+
if (response && typeof response === 'object') {
249+
response.toolExecutionStatusCallback = toolExecutionStatusCallback;
250+
}
251+
toolExecutionStatusCallback?.(false);
238252
cb("\n\n");
239253
HandleMistralAIResponse(response, cb, done_cb)
240254
} else if (json.choices[0].delta.content){
@@ -259,6 +273,7 @@ export const HandleAnthropicResponse = async (aiResponse: IAIStreamResponse | an
259273
// Handle both IAIStreamResponse format and plain response for backward compatibility
260274
const streamResponse = aiResponse?.streamResponse || aiResponse
261275
const tool_callback = aiResponse?.callback
276+
const toolExecutionStatusCallback = aiResponse?.toolExecutionStatusCallback
262277
const reader = streamResponse.body?.getReader();
263278
const decoder = new TextDecoder("utf-8");
264279
let buffer = "";
@@ -334,7 +349,13 @@ export const HandleAnthropicResponse = async (aiResponse: IAIStreamResponse | an
334349
}));
335350

336351
if (toolCalls.length > 0) {
352+
toolExecutionStatusCallback?.(true);
337353
const response = await tool_callback(toolCalls)
354+
toolExecutionStatusCallback?.(false);
355+
// Keep the callback attached for recursive calls
356+
if (response && typeof response === 'object') {
357+
response.toolExecutionStatusCallback = toolExecutionStatusCallback;
358+
}
338359
cb("\n\n");
339360
HandleAnthropicResponse(response, cb, done_cb)
340361
return;
@@ -361,6 +382,7 @@ export const HandleOllamaResponse = async (aiResponse: IAIStreamResponse | any,
361382
// Handle both IAIStreamResponse format and plain response for backward compatibility
362383
const streamResponse = aiResponse?.streamResponse || aiResponse
363384
const tool_callback = aiResponse?.callback
385+
const toolExecutionStatusCallback = aiResponse?.toolExecutionStatusCallback
364386
const reader = streamResponse.body?.getReader();
365387
const decoder = new TextDecoder("utf-8");
366388
let resultText = "";
@@ -395,7 +417,13 @@ export const HandleOllamaResponse = async (aiResponse: IAIStreamResponse | any,
395417

396418
// Handle tool calls in Ollama format
397419
if (parsed.message?.tool_calls && tool_callback) {
420+
toolExecutionStatusCallback?.(true);
398421
const response = await tool_callback(parsed.message.tool_calls)
422+
toolExecutionStatusCallback?.(false);
423+
// Keep the callback attached for recursive calls
424+
if (response && typeof response === 'object') {
425+
response.toolExecutionStatusCallback = toolExecutionStatusCallback;
426+
}
399427
cb("\n\n");
400428
HandleOllamaResponse(response, cb, done_cb, reasoning_cb)
401429
return;

libs/remix-ai-core/src/inferencers/mcp/mcpInferencer.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -385,7 +385,7 @@ export class MCPInferencer extends RemoteInferencer implements ICompletions, IGe
385385
const response = await super.answer(enrichedPrompt, enhancedOptions);
386386
let toolExecutionCount = 0;
387387

388-
const toolExecutionCallback = async (tool_calls) => {
388+
const toolExecutionStatusCallback = async (tool_calls) => {
389389

390390
// avoid circular tooling
391391
if (toolExecutionCount >= this.MAX_TOOL_EXECUTIONS) {
@@ -503,13 +503,13 @@ export class MCPInferencer extends RemoteInferencer implements ICompletions, IGe
503503

504504
// Send empty prompt - the tool results are in toolsMessages
505505
// Don't add extra prompts as they cause Anthropic to summarize instead of using full tool results
506-
if (options.provider === 'openai' || options.provider === 'mistralai') return { streamResponse: await super.answer(prompt, followUpOptions), callback: toolExecutionCallback } as IAIStreamResponse;
507-
else return { streamResponse: await super.answer("", followUpOptions), callback: toolExecutionCallback } as IAIStreamResponse;
506+
if (options.provider === 'openai' || options.provider === 'mistralai') return { streamResponse: await super.answer(prompt, followUpOptions), callback: toolExecutionStatusCallback } as IAIStreamResponse;
507+
else return { streamResponse: await super.answer("", followUpOptions), callback: toolExecutionStatusCallback } as IAIStreamResponse;
508508
}
509509
}
510510
}
511511

512-
return { streamResponse: response, callback:toolExecutionCallback } as IAIStreamResponse;
512+
return { streamResponse: response, callback:toolExecutionStatusCallback } as IAIStreamResponse;
513513
} catch (error) {
514514
return { streamResponse: await super.answer(enrichedPrompt, options) };
515515
}

libs/remix-ai-core/src/types/types.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,8 @@ export interface IParams {
111111

112112
export interface IAIStreamResponse{
113113
streamResponse: any,
114-
callback?: any
114+
callback?: any,
115+
toolExecutionStatusCallback?: (isExecuting: boolean) => void
115116
}
116117

117118
export enum AIRequestType {

libs/remix-ui/remix-ai-assistant/src/components/chat.tsx

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,12 @@ export const ChatHistoryComponent: React.FC<ChatHistoryComponentProps> = ({
116116
)}
117117
</div>
118118
</div>
119+
{msg.role === 'assistant' && msg.isExecutingTools && (
120+
<div className="tool-execution-indicator mt-2 text-muted">
121+
<i className="fa fa-spinner fa-spin me-2"></i>
122+
<span>Executing tools...</span>
123+
</div>
124+
)}
119125

120126
{/* Feedback buttons */}
121127
{msg.role === 'assistant' && (

libs/remix-ui/remix-ai-assistant/src/components/remix-ui-remix-ai-assistant.tsx

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -427,6 +427,19 @@ export const RemixUiRemixAiAssistant = React.forwardRef<
427427
...prev,
428428
{ id: assistantId, role: 'assistant', content: '', timestamp: Date.now(), sentiment: 'none' }
429429
])
430+
431+
// Add tool execution callback to response object
432+
const toolExecutionStatusCallback = (isExecuting: boolean) => {
433+
setMessages(prev =>
434+
prev.map(m => (m.id === assistantId ? { ...m, isExecutingTools: isExecuting } : m))
435+
)
436+
}
437+
438+
// Attach the callback to the response if it's an object
439+
if (response && typeof response === 'object') {
440+
response.toolExecutionStatusCallback = toolExecutionStatusCallback
441+
}
442+
430443
switch (assistantChoice) {
431444
case 'openai':
432445
HandleOpenAIResponse(

libs/remix-ui/remix-ai-assistant/src/css/remix-ai-assistant.css

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -675,3 +675,23 @@ mark,
675675
.aiMarkup :where(h3) { margin-top: .85rem; }
676676

677677
.aiMarkup hr { margin: .95rem 0; }
678+
679+
/* Tool execution indicator */
680+
.tool-execution-indicator {
681+
display: flex;
682+
align-items: center;
683+
font-size: 0.9rem;
684+
padding: 8px 12px;
685+
border-radius: 6px;
686+
background-color: rgba(88, 166, 255, 0.1);
687+
border-left: 3px solid rgba(88, 166, 255, 0.5);
688+
}
689+
690+
.tool-execution-indicator i {
691+
color: #58a6ff;
692+
font-size: 1rem;
693+
}
694+
695+
.tool-execution-indicator span {
696+
color: inherit;
697+
}

libs/remix-ui/remix-ai-assistant/src/lib/types.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ export type ChatMessage = {
44
content: string
55
timestamp: number
66
sentiment?: 'none' | 'like' | 'dislike'
7+
isExecutingTools?: boolean
78
}
89

910
export const assistantAvatar = 'assets/img/remixai-logoDefault.webp'//'assets/img/aiLogo.svg'

0 commit comments

Comments
 (0)