Skip to content

Commit ce8d3a9

Browse files
wanhakimcheehook
andcommitted
agentqna patches (#53)
* patching to latest agent output format for openwebui compatible Signed-off-by: cheehook <chee.hoo.kok@intel.com> * change source of agent-tools Signed-off-by: wwanrif <wan.abdul.hakim.b.wan.arif@intel.com> * removed agentqna tools from app-backend templates Signed-off-by: wwanrif <wan.abdul.hakim.b.wan.arif@intel.com> * remove tool mount for sql agent as it cause confusion to react agent Signed-off-by: cheehook <chee.hoo.kok@intel.com> * change token counting for latest agent changes fix text area not grey out during agent is generating response Signed-off-by: cheehook <chee.hoo.kok@intel.com> * update for sql agent Signed-off-by: wwanrif <wan.abdul.hakim.b.wan.arif@intel.com> --------- Signed-off-by: cheehook <chee.hoo.kok@intel.com> Signed-off-by: wwanrif <wan.abdul.hakim.b.wan.arif@intel.com> Co-authored-by: cheehook <chee.hoo.kok@intel.com>
1 parent 0cddbe0 commit ce8d3a9

File tree

17 files changed

+342
-281
lines changed

17 files changed

+342
-281
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
test-results/
22
**.log
3-
**/report.html
3+
**/report.html
4+
docker-compose

app-backend/templates/tools/supervisor_agent_tools.yaml

Lines changed: 0 additions & 20 deletions
This file was deleted.

app-backend/templates/tools/tools.py

Lines changed: 0 additions & 35 deletions
This file was deleted.

app-backend/templates/tools/worker_agent_tools.py

Lines changed: 0 additions & 40 deletions
This file was deleted.

app-backend/templates/tools/worker_agent_tools.yaml

Lines changed: 0 additions & 11 deletions
This file was deleted.

app-frontend/react/src/components/Conversation/Conversation.tsx

Lines changed: 59 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
4141
const [startTime, setStartTime] = useState<number | null>(null);
4242
const [isAssistantTyping, setIsAssistantTyping] = useState<boolean>(false);
4343
const [showInferenceParams, setShowInferenceParams] = useState<boolean>(true);
44+
// const [isInThinkMode, setIsInThinkMode] = useState<boolean>(false);
4445

4546
const toSend = "Enter";
4647

@@ -74,6 +75,7 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
7475
maxTokens: tokenLimit,
7576
temperature: temperature,
7677
model: "Intel/neural-chat-7b-v3-3",
78+
// setIsInThinkMode
7779
});
7880
setPrompt("");
7981
setStartTime(Date.now());
@@ -89,24 +91,38 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
8991
let tokenLength: number;
9092
if (isAgent) {
9193
const currentSteps = getCurrentAgentSteps();
92-
const allContent = currentSteps.flatMap(step => step.content).join(" ");
93-
tokenLength = allContent.split(" ").length;
94+
const stepsContent = currentSteps.flatMap(step => step.content).join(" ");
95+
const stepsSource = currentSteps.flatMap(step => step.source).join(" ");
96+
const allContent = [stepsContent, stepsSource, onGoingResult].filter(str => str.trim()).join(" ");
97+
let prevTokenLen = messageTokenData[`${selectedConversationId}-${currentMessageIndex}`]?.tokens || 0;
98+
tokenLength = allContent.split(/\s+/).filter(token => token.length > 0).length + prevTokenLen;
99+
100+
console.log("Agent Token Calc:", {
101+
stepsContent,
102+
stepsSource,
103+
onGoingResult,
104+
tokenLength
105+
});
94106
} else {
95-
tokenLength = onGoingResult.split(" ").length;
107+
tokenLength = onGoingResult.split(/\s+/).filter(token => token.length > 0).length;
96108
}
97-
109+
98110
const currentTimestamp = Date.now();
99111
const elapsedTime = (currentTimestamp - startTime) / 1000;
100112
const tokenRate = elapsedTime > 0 ? tokenLength / elapsedTime : 0;
101-
102-
setMessageTokenData((prev) => ({
103-
...prev,
104-
[`${selectedConversationId}-${currentMessageIndex}`]: { tokens: tokenLength, rate: tokenRate, time: elapsedTime },
105-
}));
106-
113+
114+
setMessageTokenData((prev) => {
115+
const updatedData = {
116+
...prev,
117+
[`${selectedConversationId}-${currentMessageIndex}`]: { tokens: tokenLength, rate: tokenRate, time: elapsedTime },
118+
};
119+
console.log("Updated token data:", updatedData);
120+
return updatedData;
121+
});
122+
107123
setIsAssistantTyping(false);
108124
}
109-
125+
110126
scrollToBottom();
111127
}, [onGoingResult, startTime, selectedConversation?.Messages, currentMessageIndex, isAgent]);
112128

@@ -180,6 +196,7 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
180196
tokenCount={message.role === MessageRole.Assistant ? tokens : undefined}
181197
tokenRate={message.role === MessageRole.Assistant ? rate : undefined}
182198
agentSteps={message.agentSteps || []}
199+
// isInThink={isInThinkMode}
183200
/>
184201
);
185202
})}
@@ -194,6 +211,7 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
194211
tokenCount={0}
195212
tokenRate={0}
196213
agentSteps={getCurrentAgentSteps()}
214+
// isInThink={isInThinkMode}
197215
/>
198216
)}
199217

@@ -207,40 +225,40 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
207225
tokenCount={messageTokenData[`${selectedConversationId}-${currentMessageIndex}`]?.tokens}
208226
tokenRate={messageTokenData[`${selectedConversationId}-${currentMessageIndex}`]?.rate}
209227
agentSteps={getCurrentAgentSteps()}
228+
// isInThink={isInThinkMode}
210229
/>
211230
)}
212231
</div>
213232

214233
<div className={styleClasses.conversatioSliders}>
215-
<Button
216-
variant="light"
217-
size="xs"
218-
radius="xl"
219-
onClick={() => setShowInferenceParams(!showInferenceParams)}
220-
rightSection={showInferenceParams ? <IconChevronDown size={14} /> : <IconChevronUp size={14} />}
221-
mb="xs"
222-
>
223-
{showInferenceParams ? "Hide Inference Settings" : "Show Inference Settings"}
224-
</Button>
225-
<Collapse in={showInferenceParams} mb="md">
226-
<Stack style={{ marginLeft: '10px' }}>
227-
<Title size="sm">Inference Settings</Title>
228-
<Text size="sm">Token Limit: {tokenLimit}</Text>
229-
<Slider value={tokenLimit} onChange={setTokenLimit} min={10} max={500} step={1} />
230-
<Text size="sm">Temperature: {temperature.toFixed(2)}</Text>
231-
<Slider value={temperature} onChange={setTemperature} min={0.10} max={1.00} step={0.01} />
232-
<Textarea
233-
label="System Prompt"
234-
placeholder="Set system prompt"
235-
value={systemPrompt}
236-
onChange={(e) => setSystemPrompt(e.target.value)}
237-
size="sm"
238-
mb="sm"
239-
/>
240-
</Stack>
241-
</Collapse>
242-
</div>
243-
234+
<Button
235+
variant="light"
236+
size="xs"
237+
radius="xl"
238+
onClick={() => setShowInferenceParams(!showInferenceParams)}
239+
rightSection={showInferenceParams ? <IconChevronDown size={14} /> : <IconChevronUp size={14} />}
240+
mb="xs"
241+
>
242+
{showInferenceParams ? "Hide Inference Settings" : "Show Inference Settings"}
243+
</Button>
244+
<Collapse in={showInferenceParams} mb="md">
245+
<Stack style={{ marginLeft: '10px' }}>
246+
<Title size="sm">Inference Settings</Title>
247+
<Text size="sm">Token Limit: {tokenLimit}</Text>
248+
<Slider value={tokenLimit} onChange={setTokenLimit} min={10} max={500} step={1} />
249+
<Text size="sm">Temperature: {temperature.toFixed(2)}</Text>
250+
<Slider value={temperature} onChange={setTemperature} min={0.10} max={1.00} step={0.01} />
251+
<Textarea
252+
label="System Prompt"
253+
placeholder="Set system prompt"
254+
value={systemPrompt}
255+
onChange={(e) => setSystemPrompt(e.target.value)}
256+
size="sm"
257+
mb="sm"
258+
/>
259+
</Stack>
260+
</Collapse>
261+
</div>
244262

245263
<div className={styleClasses.conversationActions}>
246264
<Tooltip
@@ -261,7 +279,7 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
261279
<IconArrowRight style={{ width: rem(18), height: rem(18) }} stroke={1.5} />
262280
</ActionIcon>
263281
}
264-
disabled={!enabledUiFeatures.chat || !!onGoingResult}
282+
disabled={!enabledUiFeatures.chat || !!onGoingResult || isAssistantTyping}
265283
/>
266284
</Tooltip>
267285
</div>

app-frontend/react/src/components/Message/conversationMessage.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ export interface ConversationMessageProps {
1919
tokenRate?: number;
2020
elapsedTime?: number;
2121
agentSteps: AgentStep[];
22+
// isInThink: boolean;
2223
}
2324

2425
export function ConversationMessage({ human, message, date, elapsedTime, tokenCount, tokenRate, agentSteps }: ConversationMessageProps) {

app-frontend/react/src/redux/Conversation/Conversation.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ export type ConversationRequest = {
88
model: string;
99
maxTokens: number;
1010
temperature: number;
11+
// setIsInThinkMode: (isInThinkMode: boolean) => void;
1112
};
1213

1314
export enum MessageRole {

0 commit comments

Comments
 (0)