Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,5 @@ package-lock.json

/lib/
tsconfig.tsbuildinfo
tsconfig.build.tsbuildinfo
*storybook.log
119 changes: 77 additions & 42 deletions bin/chat.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,14 @@ import { tools } from './tools.js'
/** @type {'text' | 'tool'} */
let outputMode = 'text' // default output mode

function systemPrompt() {
return 'You are a machine learning web application named "Hyperparam" running on a CLI terminal.'
const instructions =
'You are a machine learning web application named "Hyperparam" running on a CLI terminal.'
+ '\nYou assist users with analyzing and exploring datasets, particularly in parquet format.'
+ ' The website and api are available at hyperparam.app.'
+ ' The Hyperparam CLI tool can list and explore local parquet files.'
+ '\nYou are on a terminal and can only output: text, emojis, terminal colors, and terminal formatting.'
+ ' Don\'t add additional markdown or html formatting unless requested.'
+ (process.stdout.isTTY ? ` The terminal width is ${process.stdout.columns} characters.` : '')
}
/** @type {Message} */
const systemMessage = { role: 'system', content: systemPrompt() }

const colors = {
system: '\x1b[36m', // cyan
Expand All @@ -24,12 +21,13 @@ const colors = {
}

/**
* @import { Message } from './types.d.ts'
* @param {Object} chatInput
* @returns {Promise<Message>}
* @import { ResponsesInput, ResponseInputItem } from './types.d.ts'
* @param {ResponsesInput} chatInput
* @returns {Promise<ResponseInputItem[]>}
*/
async function sendToServer(chatInput) {
const response = await fetch('https://hyperparam.app/api/functions/openai/chat', {
// Send the request to the server
const response = await fetch('https://hyperparam.app/api/functions/openai/responses', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(chatInput),
Expand All @@ -40,8 +38,8 @@ async function sendToServer(chatInput) {
}

// Process the streaming response
/** @type {Message} */
const streamResponse = { role: 'assistant', content: '' }
/** @type {ResponseInputItem[]} */
const incoming = []
const reader = response.body?.getReader()
if (!reader) throw new Error('No response body')
const decoder = new TextDecoder()
Expand All @@ -66,14 +64,31 @@ async function sendToServer(chatInput) {
write('\n')
}
outputMode = 'text'
streamResponse.content += chunk.delta

// Append to incoming message
const last = incoming[incoming.length - 1]
if (last && 'role' in last && last.role === 'assistant' && last.id === chunk.item_id) {
// Append to existing assistant message
last.content += chunk.delta
} else {
// Create a new incoming message
incoming.push({ role: 'assistant', content: chunk.delta, id: chunk.item_id })
}

write(chunk.delta)
} else if (error) {
console.error(error)
throw new Error(error)
} else if (chunk.function) {
streamResponse.tool_calls ??= []
streamResponse.tool_calls.push(chunk)
} else if (type === 'function_call') {
incoming.push(chunk)
} else if (type === 'response.output_item.done' && chunk.item.type === 'reasoning') {
/** @type {import('./types.d.ts').ReasoningItem} */
const reasoningItem = {
type: 'reasoning',
id: chunk.item.id,
summary: chunk.item.summary,
}
incoming.push(reasoningItem)
} else if (!chunk.key) {
console.log('Unknown chunk', chunk)
}
Expand All @@ -82,53 +97,66 @@ async function sendToServer(chatInput) {
}
}
}
return streamResponse
return incoming
}

/**
* Send messages to the server and handle tool calls.
* Will mutate the messages array!
*
* @import { ToolCall, ToolHandler } from './types.d.ts'
* @param {Message[]} messages
* @import { ResponseFunctionToolCall, ToolHandler } from './types.d.ts'
* @param {ResponseInputItem[]} input
* @returns {Promise<void>}
*/
async function sendMessages(messages) {
async function sendMessages(input) {
/** @type {ResponsesInput} */
const chatInput = {
model: 'gpt-4o',
messages,
model: 'gpt-5',
instructions,
input,
reasoning: {
effort: 'low',
},
tools: tools.map(tool => tool.tool),
}
const response = await sendToServer(chatInput)
messages.push(response)
// handle tool results
if (response.tool_calls?.length) {
/** @type {{ toolCall: ToolCall, tool: ToolHandler, result: Promise<string> }[]} */
const toolResults = []
for (const toolCall of response.tool_calls) {
const tool = tools.find(tool => tool.tool.function.name === toolCall.function.name)
const incoming = await sendToServer(chatInput)

// handle tool calls
/** @type {{ toolCall: ResponseFunctionToolCall, tool: ToolHandler, result: Promise<string> }[]} */
const toolResults = []

// start handling tool calls
for (const message of incoming) {
if (message.type === 'function_call') {
const tool = tools.find(tool => tool.tool.name === message.name)
if (tool) {
const args = JSON.parse(toolCall.function?.arguments ?? '{}')
const args = JSON.parse(message.arguments ?? '{}')
const result = tool.handleToolCall(args)
toolResults.push({ toolCall, tool, result })
toolResults.push({ toolCall: message, tool, result })
} else {
throw new Error(`Unknown tool: ${toolCall.function.name}`)
throw new Error(`Unknown tool: ${message.name}`)
}
}
// tool mode
}

// tool mode
if (toolResults.length > 0) {
if (outputMode === 'text') {
write('\n')
}
outputMode = 'tool' // switch to tool output mode

// Wait for pending tool calls and process results
for (const toolResult of toolResults) {
const { toolCall, tool } = toolResult
const { call_id } = toolCall
try {
const content = await toolResult.result
const output = await toolResult.result

// Construct function call message
const args = JSON.parse(toolCall.function?.arguments ?? '{}')
const args = JSON.parse(toolCall.arguments)
const entries = Object.entries(args)
let func = toolCall.function.name
let func = toolCall.name
if (entries.length === 0) {
func += '()'
} else {
Expand All @@ -137,15 +165,22 @@ async function sendMessages(messages) {
func += `(${pairs.join(', ')})`
}
write(colors.tool, `${tool.emoji} ${func}`, colors.normal, '\n')
messages.push({ role: 'tool', content, tool_call_id: toolCall.id })
incoming.push({ type: 'function_call_output', output, call_id })
} catch (error) {
write(colors.error, `\nError calling tool ${toolCall.function.name}: ${error.message}`, colors.normal)
messages.push({ role: 'tool', content: `Error calling tool ${toolCall.function.name}: ${error.message}`, tool_call_id: toolCall.id })
const message = error instanceof Error ? error.message : String(error)
const toolName = toolCall.name ?? toolCall.id
write(colors.error, `\nError calling tool ${toolName}: ${message}`, colors.normal)
incoming.push({ type: 'function_call_output', output: `Error calling tool ${toolName}: ${message}`, call_id })
}
}

input.push(...incoming)

// send messages with tool results
await sendMessages(messages)
await sendMessages(input)
} else {
// no tool calls, just append incoming messages
input.push(...incoming)
}
}

Expand Down Expand Up @@ -196,8 +231,8 @@ function writeWithColor() {
}

export function chat() {
/** @type {Message[]} */
const messages = [systemMessage]
/** @type {ResponseInputItem[]} */
const messages = []
process.stdin.setEncoding('utf-8')

write(colors.system, 'question: ', colors.normal)
Expand Down
82 changes: 41 additions & 41 deletions bin/tools.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,24 +12,22 @@ export const tools = [
emoji: '📂',
tool: {
type: 'function',
function: {
name: 'list_files',
description: `List the files in a directory. Files are listed recursively up to ${fileLimit} per page.`,
parameters: {
type: 'object',
properties: {
path: {
type: 'string',
description: 'The path to list files from. Optional, defaults to the current directory.',
},
filetype: {
type: 'string',
description: 'Optional file type to filter by, e.g. "parquet", "csv". If not provided, all files are listed.',
},
offset: {
type: 'number',
description: 'Skip offset number of files in the listing. Defaults to 0. Optional.',
},
name: 'list_files',
description: `List the files in a directory. Files are listed recursively up to ${fileLimit} per page.`,
parameters: {
type: 'object',
properties: {
path: {
type: 'string',
description: 'The path to list files from. Optional, defaults to the current directory.',
},
filetype: {
type: 'string',
description: 'Optional file type to filter by, e.g. "parquet", "csv". If not provided, all files are listed.',
},
offset: {
type: 'number',
description: 'Skip offset number of files in the listing. Defaults to 0. Optional.',
},
},
},
Expand Down Expand Up @@ -61,31 +59,29 @@ export const tools = [
emoji: '📄',
tool: {
type: 'function',
function: {
name: 'parquet_get_rows',
description: 'Get up to 5 rows of data from a parquet file.',
parameters: {
type: 'object',
properties: {
filename: {
type: 'string',
description: 'The name of the parquet file to read.',
},
offset: {
type: 'number',
description: 'The starting row index to fetch (0-indexed).',
},
limit: {
type: 'number',
description: 'The number of rows to fetch. Default 5. Maximum 5.',
},
orderBy: {
type: 'string',
description: 'The column name to sort by.',
},
name: 'parquet_get_rows',
description: 'Get up to 5 rows of data from a parquet file.',
parameters: {
type: 'object',
properties: {
filename: {
type: 'string',
description: 'The name of the parquet file to read.',
},
offset: {
type: 'number',
description: 'The starting row index to fetch (0-indexed).',
},
limit: {
type: 'number',
description: 'The number of rows to fetch. Default 5. Maximum 5.',
},
orderBy: {
type: 'string',
description: 'The column name to sort by.',
},
required: ['filename'],
},
required: ['filename'],
},
},
/**
Expand Down Expand Up @@ -133,6 +129,10 @@ function validateInteger(name, value, min, max) {
return value
}

/**
* @param {unknown} obj
* @param {number} [limit=1000]
*/
function stringify(obj, limit = 1000) {
const str = JSON.stringify(toJson(obj))
return str.length <= limit ? str : str.slice(0, limit) + '…'
Expand Down
Loading