Skip to content

Commit 7a7f197

Browse files
committed
minor tweaks
1 parent 32812bc commit 7a7f197

File tree

3 files changed

+162
-17
lines changed

3 files changed

+162
-17
lines changed

src/agent/grok-agent.ts

Lines changed: 46 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -172,6 +172,34 @@ Current working directory: ${process.cwd()}`,
172172
return currentModel.toLowerCase().includes("grok");
173173
}
174174

175+
// Heuristic: enable web search only when likely needed
176+
private shouldUseSearchFor(message: string): boolean {
177+
const q = message.toLowerCase();
178+
const keywords = [
179+
"today",
180+
"latest",
181+
"news",
182+
"trending",
183+
"breaking",
184+
"current",
185+
"now",
186+
"recent",
187+
"x.com",
188+
"twitter",
189+
"tweet",
190+
"what happened",
191+
"as of",
192+
"update on",
193+
"release notes",
194+
"changelog",
195+
"price",
196+
];
197+
if (keywords.some((k) => q.includes(k))) return true;
198+
// crude date pattern (e.g., 2024/2025) may imply recency
199+
if (/(20\d{2})/.test(q)) return true;
200+
return false;
201+
}
202+
175203
async processUserMessage(message: string): Promise<ChatEntry[]> {
176204
// Add user message to conversation
177205
const userEntry: ChatEntry = {
@@ -192,7 +220,9 @@ Current working directory: ${process.cwd()}`,
192220
this.messages,
193221
tools,
194222
undefined,
195-
this.isGrokModel() ? { search_parameters: { mode: "auto" } } : undefined
223+
this.isGrokModel() && this.shouldUseSearchFor(message)
224+
? { search_parameters: { mode: "auto" } }
225+
: { search_parameters: { mode: "off" } }
196226
);
197227

198228
// Agent loop - continue until no more tool calls or max rounds reached
@@ -286,9 +316,9 @@ Current working directory: ${process.cwd()}`,
286316
this.messages,
287317
tools,
288318
undefined,
289-
this.isGrokModel()
319+
this.isGrokModel() && this.shouldUseSearchFor(message)
290320
? { search_parameters: { mode: "auto" } }
291-
: undefined
321+
: { search_parameters: { mode: "off" } }
292322
);
293323
} else {
294324
// No more tool calls, add final response
@@ -389,6 +419,7 @@ Current working directory: ${process.cwd()}`,
389419
const maxToolRounds = this.maxToolRounds; // Prevent infinite loops
390420
let toolRounds = 0;
391421
let totalOutputTokens = 0;
422+
let lastTokenUpdate = 0;
392423

393424
try {
394425
// Agent loop - continue until no more tool calls or max rounds reached
@@ -409,9 +440,9 @@ Current working directory: ${process.cwd()}`,
409440
this.messages,
410441
tools,
411442
undefined,
412-
this.isGrokModel()
443+
this.isGrokModel() && this.shouldUseSearchFor(message)
413444
? { search_parameters: { mode: "auto" } }
414-
: undefined
445+
: { search_parameters: { mode: "off" } }
415446
);
416447
let accumulatedMessage: any = {};
417448
let accumulatedContent = "";
@@ -468,12 +499,16 @@ Current working directory: ${process.cwd()}`,
468499
};
469500

470501
// Emit token count update
471-
yield {
472-
type: "token_count",
473-
tokenCount: inputTokens + totalOutputTokens,
474-
};
475-
}
502+
const now = Date.now();
503+
if (now - lastTokenUpdate > 250) {
504+
lastTokenUpdate = now;
505+
yield {
506+
type: "token_count",
507+
tokenCount: inputTokens + totalOutputTokens,
508+
};
509+
}
476510
}
511+
}
477512

478513
// Add assistant entry to history
479514
const assistantEntry: ChatEntry = {
@@ -548,6 +583,7 @@ Current working directory: ${process.cwd()}`,
548583
inputTokens = this.tokenCounter.countMessageTokens(
549584
this.messages as any
550585
);
586+
// Final token update after tools processed
551587
yield {
552588
type: "token_count",
553589
tokenCount: inputTokens + totalOutputTokens,

src/grok/client.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,13 +48,16 @@ export interface GrokResponse {
4848
export class GrokClient {
4949
private client: OpenAI;
5050
private currentModel: string = "grok-code-fast-1";
51+
private defaultMaxTokens: number;
5152

5253
constructor(apiKey: string, model?: string, baseURL?: string) {
5354
this.client = new OpenAI({
5455
apiKey,
5556
baseURL: baseURL || process.env.GROK_BASE_URL || "https://api.x.ai/v1",
5657
timeout: 360000,
5758
});
59+
const envMax = Number(process.env.GROK_MAX_TOKENS);
60+
this.defaultMaxTokens = Number.isFinite(envMax) && envMax > 0 ? envMax : 1536;
5861
if (model) {
5962
this.currentModel = model;
6063
}
@@ -81,7 +84,7 @@ export class GrokClient {
8184
tools: tools || [],
8285
tool_choice: tools && tools.length > 0 ? "auto" : undefined,
8386
temperature: 0.7,
84-
max_tokens: 4000,
87+
max_tokens: this.defaultMaxTokens,
8588
};
8689

8790
// Add search parameters if specified
@@ -111,7 +114,7 @@ export class GrokClient {
111114
tools: tools || [],
112115
tool_choice: tools && tools.length > 0 ? "auto" : undefined,
113116
temperature: 0.7,
114-
max_tokens: 4000,
117+
max_tokens: this.defaultMaxTokens,
115118
stream: true,
116119
};
117120

src/ui/components/chat-interface.tsx

Lines changed: 111 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -108,23 +108,129 @@ function ChatInterfaceWithAgent({
108108
setChatHistory([]);
109109
}, []);
110110

111-
// Process initial message if provided
111+
// Process initial message if provided (streaming for faster feedback)
112112
useEffect(() => {
113113
if (initialMessage && agent) {
114-
// First, immediately add the user message to chat history
115114
const userEntry: ChatEntry = {
116115
type: "user",
117116
content: initialMessage,
118117
timestamp: new Date(),
119118
};
120119
setChatHistory([userEntry]);
121120

122-
// Then process the message asynchronously
123121
const processInitialMessage = async () => {
124122
setIsProcessing(true);
125-
const entries = await agent.processUserMessage(initialMessage);
126-
setChatHistory(entries);
123+
setIsStreaming(true);
124+
125+
try {
126+
let streamingEntry: ChatEntry | null = null;
127+
for await (const chunk of agent.processUserMessageStream(initialMessage)) {
128+
switch (chunk.type) {
129+
case "content":
130+
if (chunk.content) {
131+
if (!streamingEntry) {
132+
const newStreamingEntry = {
133+
type: "assistant" as const,
134+
content: chunk.content,
135+
timestamp: new Date(),
136+
isStreaming: true,
137+
};
138+
setChatHistory((prev) => [...prev, newStreamingEntry]);
139+
streamingEntry = newStreamingEntry;
140+
} else {
141+
setChatHistory((prev) =>
142+
prev.map((entry, idx) =>
143+
idx === prev.length - 1 && entry.isStreaming
144+
? { ...entry, content: entry.content + chunk.content }
145+
: entry
146+
)
147+
);
148+
}
149+
}
150+
break;
151+
case "token_count":
152+
if (chunk.tokenCount !== undefined) {
153+
setTokenCount(chunk.tokenCount);
154+
}
155+
break;
156+
case "tool_calls":
157+
if (chunk.toolCalls) {
158+
// Stop streaming for the current assistant message
159+
setChatHistory((prev) =>
160+
prev.map((entry) =>
161+
entry.isStreaming
162+
? {
163+
...entry,
164+
isStreaming: false,
165+
toolCalls: chunk.toolCalls,
166+
}
167+
: entry
168+
)
169+
);
170+
streamingEntry = null;
171+
172+
// Add individual tool call entries to show tools are being executed
173+
chunk.toolCalls.forEach((toolCall) => {
174+
const toolCallEntry: ChatEntry = {
175+
type: "tool_call",
176+
content: "Executing...",
177+
timestamp: new Date(),
178+
toolCall: toolCall,
179+
};
180+
setChatHistory((prev) => [...prev, toolCallEntry]);
181+
});
182+
}
183+
break;
184+
case "tool_result":
185+
if (chunk.toolCall && chunk.toolResult) {
186+
setChatHistory((prev) =>
187+
prev.map((entry) => {
188+
if (entry.isStreaming) {
189+
return { ...entry, isStreaming: false };
190+
}
191+
if (
192+
entry.type === "tool_call" &&
193+
entry.toolCall?.id === chunk.toolCall?.id
194+
) {
195+
return {
196+
...entry,
197+
type: "tool_result",
198+
content: chunk.toolResult.success
199+
? chunk.toolResult.output || "Success"
200+
: chunk.toolResult.error || "Error occurred",
201+
toolResult: chunk.toolResult,
202+
};
203+
}
204+
return entry;
205+
})
206+
);
207+
streamingEntry = null;
208+
}
209+
break;
210+
case "done":
211+
if (streamingEntry) {
212+
setChatHistory((prev) =>
213+
prev.map((entry) =>
214+
entry.isStreaming ? { ...entry, isStreaming: false } : entry
215+
)
216+
);
217+
}
218+
setIsStreaming(false);
219+
break;
220+
}
221+
}
222+
} catch (error: any) {
223+
const errorEntry: ChatEntry = {
224+
type: "assistant",
225+
content: `Error: ${error.message}`,
226+
timestamp: new Date(),
227+
};
228+
setChatHistory((prev) => [...prev, errorEntry]);
229+
setIsStreaming(false);
230+
}
231+
127232
setIsProcessing(false);
233+
processingStartTime.current = 0;
128234
};
129235

130236
processInitialMessage();

0 commit comments

Comments
 (0)