Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
0e921d2
WIP for Session Tests
nisranjan Aug 18, 2025
0f65d98
Sessions Test Cases Complete #466
nisranjan Aug 19, 2025
a1c80ac
Changes as per comments #497
nisranjan Sep 5, 2025
671311a
Merge branch 'main' into session_persistance
nisranjan Sep 5, 2025
68bb9c9
WIP for Adding Session Init() for Azure Open AI #459
nisranjan Sep 5, 2025
8f7862b
Merge branch 'session_persistance' of https://github.com/nisranjan/ku…
nisranjan Sep 5, 2025
cd4a59c
WIP OpenAI Session Initialization and persistance #465
nisranjan Sep 10, 2025
4333acd
WIP Added Initialize() method for Grok, enabling support for session …
nisranjan Sep 10, 2025
4e22973
WIP for Session Tests
nisranjan Aug 18, 2025
98db964
Sessions Test Cases Complete #466
nisranjan Aug 19, 2025
3e43875
Changes as per comments #497
nisranjan Sep 5, 2025
9ecc7a6
WIP for Adding Session Init() for Azure Open AI #459
nisranjan Sep 5, 2025
3d96b24
WIP OpenAI Session Initialization and persistance #465
nisranjan Sep 10, 2025
267b13e
WIP Added Initialize() method for Grok, enabling support for session …
nisranjan Sep 10, 2025
025a036
Removed the ListSession() call in GetLatestSession(), so #518 now loo…
nisranjan Sep 10, 2025
123954a
chore(format): add license headers and gofmt via dev/tasks/format.sh
nisranjan Oct 6, 2025
325734a
Revert "chore(format): add license headers and gofmt via dev/tasks/fo…
nisranjan Oct 6, 2025
68b0066
Reapply "chore(format): add license headers and gofmt via dev/tasks/f…
nisranjan Oct 6, 2025
5cfe3fe
Revert "Reapply "chore(format): add license headers and gofmt via dev…
nisranjan Oct 6, 2025
b996f0c
Merge branch 'session_persistance' of https://github.com/nisranjan/ku…
nisranjan Oct 8, 2025
00e681b
WIP #463 llama.cpp Session Init() implement #464 Ollama Session Init(…
nisranjan Oct 13, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ require (
github.com/mark3labs/mcp-go v0.31.0
github.com/spf13/cobra v1.9.1
github.com/spf13/pflag v1.0.6
github.com/stretchr/testify v1.10.0
go.uber.org/mock v0.6.0
golang.org/x/sync v0.16.0
golang.org/x/term v0.31.0
Expand Down Expand Up @@ -61,6 +62,7 @@ require (
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.11.4 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
Expand All @@ -87,6 +89,7 @@ require (
github.com/ollama/ollama v0.6.5 // indirect
github.com/openai/openai-go v1.11.0 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/sahilm/fuzzy v0.1.1 // indirect
github.com/spf13/cast v1.7.1 // indirect
Expand All @@ -111,4 +114,5 @@ require (
google.golang.org/genproto/googleapis/rpc v0.0.0-20250219182151-9fdb1cabc7b2 // indirect
google.golang.org/grpc v1.70.0 // indirect
google.golang.org/protobuf v1.36.5 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
47 changes: 46 additions & 1 deletion gollm/azopenai.go
Original file line number Diff line number Diff line change
Expand Up @@ -271,10 +271,55 @@ func (c *AzureOpenAIChat) IsRetryableError(err error) bool {
}

func (c *AzureOpenAIChat) Initialize(messages []*api.Message) error {
klog.Warning("chat history persistence is not supported for provider 'azopenai', using in-memory chat history")
klog.Info("Initializing Azure OpenAI chat with history")
c.history = make([]azopenai.ChatRequestMessageClassification, 0, len(messages))
for _, msg := range messages {
content, err := c.messageToAzureContent(msg)
if err != nil {
continue // Skip malformed messages but continue processing
}
c.history = append(c.history, content)
}
return nil
}

func (c *AzureOpenAIChat) messageToAzureContent(msg *api.Message) (azopenai.ChatRequestMessageClassification, error) {
var role string
switch msg.Source {
case api.MessageSourceUser:
role = "user"
case api.MessageSourceModel:
role = "assistant"
case api.MessageSourceAgent:
role = "user" // Treat agent messages as user messages
default:
return nil, fmt.Errorf("unknown message source: %s", msg.Source)
}

switch v := msg.Payload.(type) {
case string:
if role == "user" {
return &azopenai.ChatRequestUserMessage{
Content: azopenai.NewChatRequestUserMessageContent(v),
}, nil
} else {
return &azopenai.ChatRequestAssistantMessage{
Content: azopenai.NewChatRequestAssistantMessageContent(v),
}, nil
}
case FunctionCallResult:
// Handle function call results appropriately
return &azopenai.ChatRequestUserMessage{
Content: azopenai.NewChatRequestUserMessageContent(fmt.Sprintf("Function call result: %s", v.Result)),
}, nil
default:
// Convert unknown types to string representation
return &azopenai.ChatRequestUserMessage{
Content: azopenai.NewChatRequestUserMessageContent(fmt.Sprintf("%v", v)),
}, nil
}
}

func (c *AzureOpenAIChat) SendStreaming(ctx context.Context, contents ...any) (ChatResponseIterator, error) {
// TODO: Implement streaming
response, err := c.Send(ctx, contents...)
Expand Down
59 changes: 58 additions & 1 deletion gollm/grok.go
Original file line number Diff line number Diff line change
Expand Up @@ -381,10 +381,67 @@ func (cs *grokChatSession) IsRetryableError(err error) bool {
}

func (cs *grokChatSession) Initialize(messages []*api.Message) error {
klog.Warning("chat history persistence is not supported for provider 'grok', using in-memory chat history")
klog.Info("Initializing Grok chat with history")
cs.history = make([]openai.ChatCompletionMessageParamUnion, 0, len(messages))
for _, msg := range messages {
content, err := cs.messageToGrokProvider(msg)
if err != nil {
continue // Skip malformed messages but continue processing
}
cs.history = append(cs.history, content)
}
return nil
}

// messageToGrokProvider converts api.Message to Grok-specific message format
func (cs *grokChatSession) messageToGrokProvider(msg *api.Message) (openai.ChatCompletionMessageParamUnion, error) {
var role string
switch msg.Source {
case api.MessageSourceUser:
role = "user"
case api.MessageSourceModel:
role = "assistant"
case api.MessageSourceAgent:
role = "system" // Grok treats agent messages as system messages
default:
return openai.UserMessage(""), fmt.Errorf("unknown message source: %s", msg.Source)
}

switch v := msg.Payload.(type) {
case string:
switch role {
case "user":
return openai.UserMessage(v), nil
case "assistant":
return openai.AssistantMessage(v), nil
case "system":
return openai.SystemMessage(v), nil
default:
return openai.UserMessage(v), nil
}
case FunctionCallResult:
// Handle function call results as tool messages for Grok
resultJSON, err := json.Marshal(v.Result)
if err != nil {
return openai.UserMessage(""), fmt.Errorf("failed to marshal function call result: %w", err)
}
return openai.ToolMessage(string(resultJSON), v.ID), nil
default:
// Convert unknown types to string representation
content := fmt.Sprintf("%v", v)
switch role {
case "user":
return openai.UserMessage(content), nil
case "assistant":
return openai.AssistantMessage(content), nil
case "system":
return openai.SystemMessage(content), nil
default:
return openai.UserMessage(content), nil
}
}
}

// --- Helper structs for ChatResponse interface ---

type grokChatResponse struct {
Expand Down
42 changes: 41 additions & 1 deletion gollm/llamacpp.go
Original file line number Diff line number Diff line change
Expand Up @@ -296,10 +296,50 @@ func (c *LlamaCppChat) IsRetryableError(err error) bool {
}

func (c *LlamaCppChat) Initialize(messages []*api.Message) error {
klog.Warning("chat history persistence is not supported for provider 'llamacpp', using in-memory chat history")
klog.Info("Initializing llama.cpp chat with history")
c.history = make([]llamacppChatMessage, 0, len(messages))
for _, msg := range messages {
content, err := c.messageToLlamaCppContent(msg)
if err != nil {
// Skip malformed messages but continue processing
continue
}
c.history = append(c.history, content)
}
return nil
}

func (c *LlamaCppChat) messageToLlamaCppContent(msg *api.Message) (llamacppChatMessage, error) {
var role string
switch msg.Source {
case api.MessageSourceUser:
role = "user"
case api.MessageSourceModel:
role = "assistant"
case api.MessageSourceAgent:
// Treat agent messages as system messages to seed context
role = "system"
default:
return llamacppChatMessage{}, fmt.Errorf("unknown message source: %s", msg.Source)
}

switch v := msg.Payload.(type) {
case string:
return llamacppChatMessage{Role: role, Content: ptrTo(v)}, nil
case FunctionCallResult:
// Represent function call results as tool message with JSON content
resultJSON, err := json.Marshal(v.Result)
if err != nil {
return llamacppChatMessage{}, fmt.Errorf("failed to marshal function call result: %w", err)
}
return llamacppChatMessage{Role: "tool", Content: ptrTo(string(resultJSON)), ToolCallID: v.ID}, nil
default:
// Convert unknown types to string representation
content := fmt.Sprintf("%v", v)
return llamacppChatMessage{Role: role, Content: ptrTo(content)}, nil
}
}

func ptrTo[T any](t T) *T {
return &t
}
Expand Down
43 changes: 42 additions & 1 deletion gollm/ollama.go
Original file line number Diff line number Diff line change
Expand Up @@ -210,10 +210,51 @@ func (c *OllamaChat) SendStreaming(ctx context.Context, contents ...any) (ChatRe
}

func (c *OllamaChat) Initialize(messages []*kctlApi.Message) error {
klog.Warning("chat history persistence is not supported for provider 'ollama', using in-memory chat history")
klog.Info("Initializing ollama chat with history")
c.history = make([]api.Message, 0, len(messages))
for _, msg := range messages {
content, err := c.messageToOllamaContent(msg)
if err != nil {
// Skip malformed messages but continue processing
continue
}
c.history = append(c.history, content)
}
return nil
}

func (c *OllamaChat) messageToOllamaContent(msg *kctlApi.Message) (api.Message, error) {
var role string
switch msg.Source {
case kctlApi.MessageSourceUser:
role = "user"
case kctlApi.MessageSourceModel:
role = "assistant"
case kctlApi.MessageSourceAgent:
// Treat agent messages as system to seed context
role = "system"
default:
return api.Message{}, fmt.Errorf("unknown message source: %s", msg.Source)
}

switch v := msg.Payload.(type) {
case string:
return api.Message{Role: role, Content: v}, nil
case FunctionCallResult:
// Represent tool output as a tool response; Ollama does not have a distinct tool role in history API,
// so include a textual representation for context.
resultJSON, err := json.Marshal(v.Result)
if err != nil {
return api.Message{}, fmt.Errorf("failed to marshal function call result: %w", err)
}
return api.Message{Role: "user", Content: string(resultJSON)}, nil
default:
// Convert unknown types to string representation
content := fmt.Sprintf("%v", v)
return api.Message{Role: role, Content: content}, nil
}
}

type OllamaChatResponse struct {
candidates []*OllamaCandidate
ollamaResponse api.ChatResponse
Expand Down
48 changes: 47 additions & 1 deletion gollm/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -424,10 +424,56 @@ func (cs *openAIChatSession) IsRetryableError(err error) bool {
}

func (cs *openAIChatSession) Initialize(messages []*api.Message) error {
klog.Warning("chat history persistence is not supported for provider 'openai', using in-memory chat history")
klog.Info("Initializing OpenAI chat with history")
cs.history = make([]openai.ChatCompletionMessageParamUnion, 0, len(messages))
for _, msg := range messages {
content, err := cs.messageToOpenAIContent(msg)
if err != nil {
continue // Skip malformed messages but continue processing
}
cs.history = append(cs.history, content)
}
return nil
}

func (cs *openAIChatSession) messageToOpenAIContent(msg *api.Message) (openai.ChatCompletionMessageParamUnion, error) {
var role string
switch msg.Source {
case api.MessageSourceUser:
role = "user"
case api.MessageSourceModel:
role = "assistant"
case api.MessageSourceAgent:
role = "agent"
default:
return openai.UserMessage(""), fmt.Errorf("unknown message source: %s", msg.Source)
}

switch v := msg.Payload.(type) {
case string:
if role == "user" {
return openai.UserMessage(v), nil
} else {
return openai.AssistantMessage(v), nil
}
case FunctionCallResult:
// Handle function call results as tool messages
resultJSON, err := json.Marshal(v.Result)
if err != nil {
return openai.UserMessage(""), fmt.Errorf("failed to marshal function call result: %w", err)
}
return openai.ToolMessage(string(resultJSON), v.ID), nil
default:
// Convert unknown types to string representation
content := fmt.Sprintf("%v", v)
if role == "user" {
return openai.UserMessage(content), nil
} else {
return openai.AssistantMessage(content), nil
}
}
}

// Helper structs for ChatResponse interface

type openAIChatResponse struct {
Expand Down
16 changes: 12 additions & 4 deletions pkg/sessions/session.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
package sessions

import (
"bufio"
"encoding/json"
"fmt"
"os"
Expand Down Expand Up @@ -146,11 +147,18 @@ func (s *Session) ChatMessages() []*api.Message {
}
defer f.Close()

scanner := json.NewDecoder(f)
for scanner.More() {
// Read file line by line instead of using json.Decoder
scanner := bufio.NewScanner(f)
for scanner.Scan() {
line := scanner.Text()
if line == "" {
continue
}

var message api.Message
if err := scanner.Decode(&message); err != nil {
continue // skip malformed messages
if err := json.Unmarshal([]byte(line), &message); err != nil {
// Skip malformed messages
continue
}
messages = append(messages, &message)
}
Expand Down
Loading