Skip to content

Commit 0d4c50d

Browse files
committed
add log to llm/agent, fix clear to keep prompt
add AiAgent.LoadConversation builtin
1 parent 0157338 commit 0d4c50d

File tree

4 files changed

+469
-13
lines changed

4 files changed

+469
-13
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,4 +33,5 @@ server.key
3333
*.in
3434
sujs_darwin_*
3535
sujs_linux_*
36-
sujs_windows_*
36+
sujs_windows_*
37+
.ai/

builtin/aiagent.go

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,17 @@ func agent_ClearHistory(this Value) Value {
9898
return nil
9999
}
100100

101+
var _ = method(agent_LoadConversation, "(filename)")
102+
103+
func agent_LoadConversation(th *Thread, this Value, args []Value) Value {
104+
err := this.(*suAgent).agent.LoadConversation(ToStr(args[0]))
105+
if err != nil {
106+
th.ReturnThrow = true
107+
return SuStr("LoadConversation: " + err.Error())
108+
}
109+
return True
110+
}
111+
101112
var _ = method(agent_Close, "()")
102113

103114
func agent_Close(this Value) Value {

llm/agent.go

Lines changed: 183 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,36 +5,195 @@ package llm
55

66
import (
77
"context"
8+
"encoding/json"
9+
"fmt"
10+
"log"
11+
"os"
12+
"path/filepath"
813
"strings"
14+
"time"
915
)
1016

1117
type Agent struct {
1218
client *OpenAIClient
1319
mcpClient *MCPClient
1420
model string
21+
prompt string
1522
history []Message
1623
outfn OutFn
1724
cancel context.CancelFunc
25+
logFile *os.File
1826
}
1927

28+
var aiDir = ".ai"
29+
2030
// OutFn is the push callback for streaming output.
2131
// what is one of "think", "output", "tool", "complete"
2232
type OutFn func(what, data string)
2333

2434
// NewAgent creates an agent.
2535
// prompt and mcpClient are optional.
2636
func NewAgent(baseURL, apiKey, model, prompt string, mcpClient *MCPClient, outfn OutFn) *Agent {
27-
history := []Message{}
28-
if prompt != "" {
29-
history = append(history, Message{Role: "system", Content: prompt})
30-
}
31-
return &Agent{
37+
agent := &Agent{
3238
client: NewOpenAIClient(baseURL, apiKey),
3339
mcpClient: mcpClient,
3440
model: model,
35-
history: history,
41+
prompt: prompt,
3642
outfn: outfn,
3743
}
44+
agent.resetHistory()
45+
return agent
46+
}
47+
48+
func (agent *Agent) resetHistory() {
49+
agent.history = nil
50+
if agent.prompt != "" {
51+
agent.history = append(agent.history, Message{Role: "system", Content: agent.prompt})
52+
}
53+
}
54+
55+
func (agent *Agent) ensureLogFile() {
56+
if agent.logFile != nil {
57+
return
58+
}
59+
if err := os.MkdirAll(aiDir, 0755); err != nil {
60+
return
61+
}
62+
filename := fmt.Sprintf("ai%s.md", time.Now().Format("20060102_150405"))
63+
path := filepath.Join(aiDir, filename)
64+
f, err := os.Create(path)
65+
if err != nil {
66+
return
67+
}
68+
agent.logFile = f
69+
// Log the model at the start
70+
if agent.model != "" {
71+
agent.logWrite("## Model\n\n" + agent.model + "\n\n")
72+
}
73+
}
74+
75+
func (agent *Agent) logMessage(role, content string) {
76+
agent.ensureLogFile()
77+
if agent.logFile == nil {
78+
return
79+
}
80+
var marker string
81+
switch role {
82+
case "user":
83+
marker = "## User\n\n"
84+
case "assistant":
85+
marker = "## Assistant\n\n"
86+
case "system":
87+
marker = "## Prompt\n\n"
88+
default:
89+
marker = "## " + role + "\n\n"
90+
}
91+
agent.logWrite(marker + content + "\n\n")
92+
}
93+
94+
func (agent *Agent) logAssistantToolCalls(msg Message) {
95+
agent.ensureLogFile()
96+
if agent.logFile == nil {
97+
return
98+
}
99+
b, _ := json.Marshal(msg)
100+
agent.logWrite("## AssistantTool\n\n" + string(b) + "\n\n")
101+
}
102+
103+
func (agent *Agent) logToolResult(msg Message) {
104+
agent.ensureLogFile()
105+
if agent.logFile == nil {
106+
return
107+
}
108+
b, _ := json.Marshal(msg)
109+
agent.logWrite("## ToolResult\n\n" + string(b) + "\n\n")
110+
}
111+
112+
func (agent *Agent) closeLogFile() {
113+
if agent.logFile != nil {
114+
agent.logFile.Close()
115+
agent.logFile = nil
116+
}
117+
}
118+
119+
func (agent *Agent) logWrite(s string) {
120+
if _, err := agent.logFile.WriteString(s); err != nil {
121+
log.Println("log write error:", err)
122+
}
123+
}
124+
125+
// LoadConversation loads a conversation from a markdown file.
126+
// The file should be in the format created by the logging.
127+
// The current prompt is used, not the one from the file.
128+
// The loaded conversation is copied to a new log file and logging continues there.
129+
func (agent *Agent) LoadConversation(path string) error {
130+
data, err := os.ReadFile(path)
131+
if err != nil {
132+
return err
133+
}
134+
agent.closeLogFile()
135+
agent.resetHistory()
136+
if err := agent.parseConversation(string(data)); err != nil {
137+
return err
138+
}
139+
// Copy the loaded conversation to a new log file
140+
agent.copyToNewLogFile(string(data))
141+
return nil
142+
}
143+
144+
// copyToNewLogFile creates a new log file and copies existing conversation content
145+
func (agent *Agent) copyToNewLogFile(content string) {
146+
if err := os.MkdirAll(aiDir, 0755); err != nil {
147+
return
148+
}
149+
filename := fmt.Sprintf("ai%s.md", time.Now().Format("20060102_150405"))
150+
path := filepath.Join(aiDir, filename)
151+
f, err := os.Create(path)
152+
if err != nil {
153+
return
154+
}
155+
agent.logFile = f
156+
agent.logWrite(content)
157+
agent.logWrite("\n## Continued\n\n---\n\n")
158+
}
159+
160+
func (agent *Agent) parseConversation(content string) error {
161+
for section := range strings.SplitSeq(content, "## ") {
162+
if section == "" {
163+
continue
164+
}
165+
lines := strings.SplitN(section, "\n", 2)
166+
if len(lines) < 2 {
167+
continue
168+
}
169+
role := strings.TrimSpace(lines[0])
170+
body := strings.TrimSpace(lines[1])
171+
switch role {
172+
case "Model":
173+
agent.model = body
174+
case "User":
175+
agent.history = append(agent.history, Message{Role: "user", Content: body})
176+
case "Assistant":
177+
agent.history = append(agent.history, Message{Role: "assistant", Content: body})
178+
case "Prompt", "Continued":
179+
// skip - use current prompt, Continued is just a marker
180+
case "AssistantTool":
181+
var msg Message
182+
if err := json.Unmarshal([]byte(body), &msg); err == nil {
183+
agent.history = append(agent.history, msg)
184+
} else {
185+
log.Println("parseConversation AssistantTool:", err)
186+
}
187+
case "ToolResult":
188+
var msg Message
189+
if err := json.Unmarshal([]byte(body), &msg); err == nil {
190+
agent.history = append(agent.history, msg)
191+
} else {
192+
log.Println("parseConversation ToolResult:", err)
193+
}
194+
}
195+
}
196+
return nil
38197
}
39198

40199
func (agent *Agent) Input(input string) {
@@ -50,12 +209,17 @@ func (agent *Agent) Interrupt() {
50209

51210
// SetModel sets the model to use for requests
52211
func (agent *Agent) SetModel(model string) {
212+
if agent.logFile != nil && model != agent.model {
213+
agent.logWrite("## Model\n\n" + model + "\n\n")
214+
}
53215
agent.model = model
54216
}
55217

56-
// ClearHistory clears the conversation history
218+
// ClearHistory clears the conversation history and starts a new log file
219+
// The prompt is retained/restored.
57220
func (agent *Agent) ClearHistory() {
58-
agent.history = []Message{}
221+
agent.closeLogFile()
222+
agent.resetHistory()
59223
}
60224

61225
// request sends the request and streams the response to outfn
@@ -65,6 +229,7 @@ func (agent *Agent) request(input string) {
65229
defer cancel()
66230

67231
agent.history = append(agent.history, Message{Role: "user", Content: input})
232+
agent.logMessage("user", input)
68233

69234
for {
70235
req := &ChatRequest{
@@ -161,11 +326,13 @@ func (agent *Agent) request(input string) {
161326
// Handle tool calls
162327
if len(toolCallsList) > 0 && agent.mcpClient != nil {
163328
// Add assistant message with tool calls to history
164-
agent.history = append(agent.history, Message{
329+
assistantMsg := Message{
165330
Role: "assistant",
166331
Content: content.String(),
167332
ToolCalls: toolCallsList,
168-
})
333+
}
334+
agent.history = append(agent.history, assistantMsg)
335+
agent.logAssistantToolCalls(assistantMsg)
169336

170337
// Process each tool call
171338
for _, tc := range toolCallsList {
@@ -174,13 +341,16 @@ func (agent *Agent) request(input string) {
174341
result, err := agent.mcpClient.CallToolFromLLM(ctx, tc)
175342
if err != nil {
176343
agent.emit("tool", "**Error:** "+err.Error()+"<br>")
344+
result = "Error: " + err.Error()
177345
}
178346
// Add tool result to history
179-
agent.history = append(agent.history, Message{
347+
toolMsg := Message{
180348
Role: "tool",
181349
Content: result,
182350
ToolCallID: tc.ID,
183-
})
351+
}
352+
agent.history = append(agent.history, toolMsg)
353+
agent.logToolResult(toolMsg)
184354
}
185355

186356
// Continue the loop to get next response
@@ -189,6 +359,7 @@ func (agent *Agent) request(input string) {
189359

190360
// No tool calls, we're done
191361
agent.history = append(agent.history, Message{Role: "assistant", Content: content.String()})
362+
agent.logMessage("assistant", content.String())
192363
agent.emit("complete", "")
193364
return
194365
}

0 commit comments

Comments
 (0)