Skip to content

Commit acd4767

Browse files
committed
feat(server): add basic js tool call support
1 parent 814f795 commit acd4767

File tree

6 files changed

+161
-25
lines changed

6 files changed

+161
-25
lines changed

tools/server/webui/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
"remark-math": "^6.0.0",
3434
"tailwindcss": "^4.1.1",
3535
"textlinestream": "^1.1.1",
36+
"unist-util-visit": "^5.0.0",
3637
"vite-plugin-singlefile": "^2.0.3"
3738
},
3839
"devDependencies": {

tools/server/webui/src/Config.ts

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,7 @@ import { isNumeric } from './utils/misc';
44
export const isDev = import.meta.env.MODE === 'development';
55

66
// constants
7-
export const BASE_URL = new URL('.', document.baseURI).href
8-
.toString()
9-
.replace(/\/$/, '');
7+
export const BASE_URL = 'http://127.0.0.1:8080';
108

119
export const CONFIG_DEFAULT = {
1210
// Note: in order not to introduce breaking changes, please keep the same data type (number, string, etc) if you want to change the default value. Do not use null or undefined for default value.
@@ -39,6 +37,7 @@ export const CONFIG_DEFAULT = {
3937
custom: '', // custom json-stringified object
4038
// experimental features
4139
pyIntepreterEnabled: false,
40+
jsInterpreterToolUse: false,
4241
};
4342
export const CONFIG_INFO: Record<string, string> = {
4443
apiKey: 'Set the API Key if you are using --api-key option for the server.',

tools/server/webui/src/components/SettingDialog.tsx

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -254,6 +254,21 @@ const SETTING_SECTIONS: SettingSection[] = [
254254
),
255255
key: 'pyIntepreterEnabled',
256256
},
257+
{
258+
type: SettingInputType.CHECKBOX,
259+
label: (
260+
<>
261+
<b>Enable JavaScript tool use</b>
262+
<br />
263+
<small className="text-xs">
264+
This alows LLM to use browser your browser console as tool. If
265+
model supports function calling, it can use the console to do e.g.
266+
data analysis etc. by itself.
267+
</small>
268+
</>
269+
),
270+
key: 'jsInterpreterToolUse',
271+
},
257272
],
258273
},
259274
];

tools/server/webui/src/utils/app.context.tsx

Lines changed: 116 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import {
55
Conversation,
66
Message,
77
PendingMessage,
8+
ToolCall,
89
ViewingChat,
910
} from './types';
1011
import StorageUtils from './storage';
@@ -15,6 +16,7 @@ import {
1516
} from './misc';
1617
import { BASE_URL, CONFIG_DEFAULT, isDev } from '../Config';
1718
import { matchPath, useLocation, useNavigate } from 'react-router';
19+
import { JS_TOOL_CALL_SPEC } from './js_tool_call';
1820

1921
interface AppContextValue {
2022
// conversations and messages
@@ -181,10 +183,13 @@ export const AppContextProvider = ({
181183
}
182184
if (isDev) console.log({ messages });
183185

186+
// stream does not support tool-use
187+
const streamResponse = !config.jsInterpreterToolUse;
188+
184189
// prepare params
185190
const params = {
186191
messages,
187-
stream: true,
192+
stream: streamResponse,
188193
cache_prompt: true,
189194
samplers: config.samplers,
190195
temperature: config.temperature,
@@ -206,6 +211,7 @@ export const AppContextProvider = ({
206211
dry_penalty_last_n: config.dry_penalty_last_n,
207212
max_tokens: config.max_tokens,
208213
timings_per_token: !!config.showTokensPerSecond,
214+
tools: config.jsInterpreterToolUse ? [JS_TOOL_CALL_SPEC] : undefined,
209215
...(config.custom.length ? JSON.parse(config.custom) : {}),
210216
};
211217

@@ -221,36 +227,124 @@ export const AppContextProvider = ({
221227
body: JSON.stringify(params),
222228
signal: abortController.signal,
223229
});
230+
224231
if (fetchResponse.status !== 200) {
225232
const body = await fetchResponse.json();
226233
throw new Error(body?.error?.message || 'Unknown error');
227234
}
228-
const chunks = getSSEStreamAsync(fetchResponse);
229-
for await (const chunk of chunks) {
230-
// const stop = chunk.stop;
231-
if (chunk.error) {
232-
throw new Error(chunk.error?.message || 'Unknown error');
235+
236+
if (streamResponse) {
237+
const chunks = getSSEStreamAsync(fetchResponse);
238+
for await (const chunk of chunks) {
239+
// const stop = chunk.stop;
240+
if (chunk.error) {
241+
throw new Error(chunk.error?.message || 'Unknown error');
242+
}
243+
const addedContent = chunk.choices[0].delta.content;
244+
const lastContent = pendingMsg.content || '';
245+
if (addedContent) {
246+
pendingMsg = {
247+
...pendingMsg,
248+
content: lastContent + addedContent,
249+
};
250+
}
251+
const timings = chunk.timings;
252+
if (timings && config.showTokensPerSecond) {
253+
// only extract what's really needed, to save some space
254+
pendingMsg.timings = {
255+
prompt_n: timings.prompt_n,
256+
prompt_ms: timings.prompt_ms,
257+
predicted_n: timings.predicted_n,
258+
predicted_ms: timings.predicted_ms,
259+
};
260+
}
261+
setPending(convId, pendingMsg);
262+
onChunk(); // don't need to switch node for pending message
233263
}
234-
const addedContent = chunk.choices[0].delta.content;
235-
const lastContent = pendingMsg.content || '';
236-
if (addedContent) {
264+
} else {
265+
const responseData = await fetchResponse.json();
266+
if (isDev) console.log({ responseData });
267+
268+
const choice = responseData.choices?.[0];
269+
if (choice) {
270+
const messageFromAPI = choice.message;
271+
let newContent = '';
272+
273+
if (messageFromAPI.content) {
274+
newContent = messageFromAPI.content;
275+
}
276+
277+
if (
278+
messageFromAPI.tool_calls &&
279+
messageFromAPI.tool_calls.length > 0
280+
) {
281+
console.log(messageFromAPI.tool_calls[0]);
282+
for (let i = 0; i < messageFromAPI.tool_calls.length; i++) {
283+
console.log('Tool use #' + i);
284+
const tc = messageFromAPI.tool_calls[i] as ToolCall;
285+
console.log(tc);
286+
287+
if (tc) {
288+
if (
289+
tc.function.name === 'javascript_interpreter' &&
290+
config.jsInterpreterToolUse
291+
) {
292+
// Execute code provided
293+
const args = JSON.parse(tc.function.arguments);
294+
console.log('Arguments for tool call:');
295+
console.log(args);
296+
const result = eval(args.code);
297+
console.log(result);
298+
299+
newContent += `<tool_result>${result}</tool_result>`;
300+
}
301+
}
302+
}
303+
304+
const toolCallsInfo = messageFromAPI.tool_calls
305+
.map(
306+
(
307+
tc: any // Use 'any' for tc temporarily if type is not imported/defined here
308+
) =>
309+
`Tool Call Invoked: ${tc.function.name}\nArguments: ${tc.function.arguments}`
310+
)
311+
.join('\n\n');
312+
313+
if (newContent.length > 0) {
314+
newContent += `\n\n${toolCallsInfo}`;
315+
} else {
316+
newContent = toolCallsInfo;
317+
}
318+
// TODO: Ideally, store structured tool_calls in pendingMsg if its type supports it.
319+
// pendingMsg.tool_calls = messageFromAPI.tool_calls;
320+
}
321+
237322
pendingMsg = {
238323
...pendingMsg,
239-
content: lastContent + addedContent,
240-
};
241-
}
242-
const timings = chunk.timings;
243-
if (timings && config.showTokensPerSecond) {
244-
// only extract what's really needed, to save some space
245-
pendingMsg.timings = {
246-
prompt_n: timings.prompt_n,
247-
prompt_ms: timings.prompt_ms,
248-
predicted_n: timings.predicted_n,
249-
predicted_ms: timings.predicted_ms,
324+
content: newContent,
250325
};
326+
327+
// Handle timings from the non-streaming response
328+
// The exact location of 'timings' in responseData might vary by API.
329+
// Assuming responseData.timings similar to streaming chunk for now.
330+
const apiTimings = responseData.timings;
331+
if (apiTimings && config.showTokensPerSecond) {
332+
pendingMsg.timings = {
333+
prompt_n: apiTimings.prompt_n,
334+
prompt_ms: apiTimings.prompt_ms,
335+
predicted_n: apiTimings.predicted_n,
336+
predicted_ms: apiTimings.predicted_ms,
337+
};
338+
}
339+
setPending(convId, pendingMsg);
340+
onChunk(); // Update UI to show the processed message
341+
} else {
342+
console.error(
343+
'API response missing choices or message:',
344+
responseData
345+
);
346+
throw new Error('Invalid API response structure');
251347
}
252-
setPending(convId, pendingMsg);
253-
onChunk(); // don't need to switch node for pending message
254348
}
255349
} catch (err) {
256350
setPending(convId, null);
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
export const JS_TOOL_CALL_SPEC = {
2+
type: 'function',
3+
function: {
4+
name: 'javascript_interpreter',
5+
description:
6+
'Executes JavaScript code in the browser console and returns the output or error. The code should be self-contained. Use JSON.stringify for complex return objects.',
7+
parameters: {
8+
type: 'object',
9+
properties: {
10+
code: {
11+
type: 'string',
12+
description: 'The JavaScript code to execute.',
13+
},
14+
},
15+
required: ['code'],
16+
},
17+
},
18+
};

tools/server/webui/src/utils/types.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,3 +89,12 @@ export interface CanvasPyInterpreter {
8989
}
9090

9191
export type CanvasData = CanvasPyInterpreter;
92+
93+
export interface ToolCall {
94+
id: string;
95+
type: 'function';
96+
function: {
97+
name: string;
98+
arguments: string; // JSON string of arguments
99+
};
100+
}

0 commit comments

Comments
 (0)