Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
d50af9e
feat: support agui
Flash-LHR Sep 18, 2025
c4ec52f
refactor
Flash-LHR Sep 19, 2025
f6d0c59
Merge branch 'main' into support-agui
Flash-LHR Sep 19, 2025
067afd5
fix
Flash-LHR Sep 19, 2025
b83f997
feat: support DefaultNewService
Flash-LHR Sep 22, 2025
d1bf559
Merge branch 'main' into support-agui
Flash-LHR Sep 22, 2025
ba011eb
feat: adapter
Flash-LHR Sep 25, 2025
dc7c5af
feat: support non-stream message
Flash-LHR Sep 25, 2025
b939e00
fix
Flash-LHR Sep 25, 2025
dad2ebf
example: add copilokit
Flash-LHR Sep 25, 2025
376680b
docs
Flash-LHR Sep 25, 2025
8f7861d
doc
Flash-LHR Sep 25, 2025
e1214a4
front
Flash-LHR Sep 25, 2025
e5bdc2d
doc
Flash-LHR Sep 25, 2025
2d90ab0
doc
Flash-LHR Sep 25, 2025
e9a15c6
Merge branch 'main' into support-agui
Flash-LHR Sep 25, 2025
6b246c2
feat: use Handler
Flash-LHR Sep 26, 2025
5361c06
refactor
Flash-LHR Sep 26, 2025
d05620b
only post
Flash-LHR Sep 26, 2025
cbe1424
fix
Flash-LHR Sep 26, 2025
13f40ec
test
Flash-LHR Sep 26, 2025
5e2e01e
fix
Flash-LHR Sep 26, 2025
3113f81
doc
Flash-LHR Sep 26, 2025
a31d209
Merge branch 'main' into support-agui
Flash-LHR Sep 26, 2025
be3e701
typo
Flash-LHR Sep 26, 2025
6333979
fix
Flash-LHR Sep 26, 2025
a4cd818
docs
Flash-LHR Sep 26, 2025
e95347d
fix
Flash-LHR Sep 26, 2025
afec996
refactor: runner
Flash-LHR Sep 26, 2025
f5fa1a4
Revert "refactor: runner"
Flash-LHR Sep 26, 2025
613c887
docs
Flash-LHR Sep 28, 2025
390ccce
doc
Flash-LHR Sep 28, 2025
783b880
example
Flash-LHR Sep 28, 2025
2a3ed72
refactor: runner instead of agent
Flash-LHR Sep 28, 2025
2fa5e13
test
Flash-LHR Sep 28, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions examples/agui/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# AG-UI Examples

This folder collects runnable demos that showcase how to integrate the
`tRPC-Agent-Go` AG-UI server and various clients.

- [`bubbletea/`](bubbletea/) – Terminal chat experience with an AG-UI SSE server using `llmagent` and a minimal CLI client.

Each subdirectory contains its own instructions.
59 changes: 59 additions & 0 deletions examples/agui/bubbletea/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# Bubbletea AG-UI Demo

This example pairs a minimal AG-UI SSE server powered by `llmagent` with a
lightweight terminal client. The demo streams real model responses via the
AG-UI protocol.

## Layout

- `server/` – AG-UI SSE server backed by an OpenAI-compatible LLM agent.
- `client/` – Simple terminal client that reads a prompt, streams AG-UI events,
and prints the assistant output.

## Prerequisites

- Go 1.24+
- An OpenAI-compatible API key (export `OPENAI_API_KEY`).

## Run the server

```bash
export OPENAI_API_KEY=sk-...
cd examples/agui
GO111MODULE=on go run ./bubbletea/server/cmd
```

The server listens on `http://localhost:8080/agui/run` by default. Adjust the
model name in `server/cmd/main.go` if you prefer a different backend.

## Run the client

In a second terminal:

```bash
cd examples/agui
GO111MODULE=on go run ./bubbletea/client
```

Type a message and press Enter. The client streams AG-UI events from the server
and prints the assistant response. Press Enter on an empty line to exit.

Example session:

```
╭──────────────────────────────────────────────────────────────╮
│ │
│ Simple AG-UI Client. Press Ctrl+C to quit. │
│ You> calculate 1.25^0.42 │
│ Agent> [RUN_STARTED] │
│ Agent> [TOOL_CALL_START] tool call 'calculator' started, │
│ Agent> [TOOL_CALL_ARGS] tool args: {"a":1.25,"b":0.42,"operation":"power"}
│ Agent> [TOOL_CALL_END] tool call completed, id: call_00_... │
│ Agent> [TOOL_CALL_RESULT] tool result: {"result":1.09825...}│
│ Agent> [TEXT_MESSAGE_START] │
│ Agent> [TEXT_MESSAGE_CONTENT] The result of 1.25^0.42 is... │
│ Agent> [TEXT_MESSAGE_END] │
│ Agent> [RUN_FINISHED] │
│ │
╰──────────────────────────────────────────────────────────────╯
```
292 changes: 292 additions & 0 deletions examples/agui/bubbletea/client/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,292 @@
package main

import (
"context"
"flag"
"fmt"
"log"
"strings"
"time"

"github.com/ag-ui-protocol/ag-ui/sdks/community/go/pkg/client/sse"
"github.com/ag-ui-protocol/ag-ui/sdks/community/go/pkg/core/events"
"github.com/charmbracelet/bubbles/spinner"
"github.com/charmbracelet/bubbles/textinput"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
"github.com/sirupsen/logrus"
)

var (
docStyle = lipgloss.NewStyle().Margin(1, 2)
headerStyle = lipgloss.NewStyle().Foreground(lipgloss.Color("205")).Bold(true)
bodyStyle = lipgloss.NewStyle().Border(lipgloss.RoundedBorder()).Padding(1, 2)
inputStyle = lipgloss.NewStyle().MarginTop(1)
)

const headerText = "AG-UI Demo"

func main() {
endpoint := flag.String("endpoint", "http://localhost:8080/agui/run", "AG-UI SSE endpoint")
flag.Parse()

if _, err := tea.NewProgram(initialModel(*endpoint), tea.WithAltScreen()).Run(); err != nil {
log.Fatalf("bubbletea program failed: %v", err)
}
}

type model struct {
endpoint string
history []string
input textinput.Model
viewport viewport.Model
spinner spinner.Model
busy bool
ready bool
}

func initialModel(endpoint string) model {
input := textinput.New()
input.Placeholder = "Ask something..."
input.Prompt = "You> "
input.Focus()

spin := spinner.New()
spin.Spinner = spinner.Dot

m := model{
endpoint: endpoint,
history: []string{"Simple AG-UI Client. Press Ctrl+C to quit."},
input: input,
spinner: spin,
}
return m
}

func (m model) Init() tea.Cmd {
return m.spinner.Tick
}

func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
switch msg := msg.(type) {
case tea.WindowSizeMsg:
m.ready = true
m.configureViewport(msg.Width, msg.Height)
m.refreshViewport()
return m, nil

case tea.KeyMsg:
switch msg.Type {
case tea.KeyCtrlC, tea.KeyEsc:
return m, tea.Quit
case tea.KeyEnter:
trimmed := strings.TrimSpace(m.input.Value())
if trimmed == "" || m.busy {
return m, nil
}
m.input.Reset()
m.busy = true
m.history = append(m.history, fmt.Sprintf("You> %s", trimmed))
m.refreshViewport()
return m, tea.Batch(m.spinner.Tick, startChatCmd(trimmed, m.endpoint))
default:
if !m.busy {
var cmd tea.Cmd
m.input, cmd = m.input.Update(msg)
return m, cmd
}
return m, nil
}

case chatResultMsg:
m.history = append(m.history, msg.lines...)
m.busy = false
m.refreshViewport()
m.input.Focus()
return m, nil

case errMsg:
m.history = append(m.history, fmt.Sprintf("Error: %v", msg.error))
m.busy = false
m.refreshViewport()
m.input.Focus()
return m, nil

case spinner.TickMsg:
if !m.busy {
return m, nil
}
var cmd tea.Cmd
m.spinner, cmd = m.spinner.Update(msg)
return m, cmd
}

if !m.busy {
var cmd tea.Cmd
m.input, cmd = m.input.Update(msg)
return m, cmd
}
return m, nil
}

func (m model) View() string {
if !m.ready {
return "Loading..."
}

header := headerStyle.Render(headerText)
bodyFrameWidth, bodyFrameHeight := bodyStyle.GetFrameSize()
bodyWidth := m.viewport.Width + bodyFrameWidth
bodyHeight := m.viewport.Height + bodyFrameHeight
body := bodyStyle.Width(bodyWidth).Height(bodyHeight).Render(m.viewport.View())

inputView := m.input.View()
if m.busy {
spin := lipgloss.NewStyle().Foreground(lipgloss.Color("63")).Render(m.spinner.View())
inputView += " " + spin
}

content := lipgloss.JoinVertical(lipgloss.Left, header, body, inputStyle.Render(inputView))
return docStyle.Render(content)
}

func (m *model) refreshViewport() {
if !m.ready {
return
}
content := strings.Join(m.history, "\n")
m.viewport.SetContent(content)
m.viewport.GotoBottom()
}

func (m *model) configureViewport(width, height int) {
hFrameDoc, vFrameDoc := docStyle.GetFrameSize()
hFrameBody, vFrameBody := bodyStyle.GetFrameSize()
_, vFrameInput := inputStyle.GetFrameSize()
headerHeight := lipgloss.Height(headerStyle.Render(headerText))
inputHeight := 1 + vFrameInput

viewportWidth := width - hFrameDoc - hFrameBody
if viewportWidth < 20 {
viewportWidth = 20
}

viewportHeight := height - vFrameDoc - vFrameBody - headerHeight - inputHeight
if viewportHeight < 5 {
viewportHeight = 5
}

if m.viewport.Width != viewportWidth || m.viewport.Height != viewportHeight {
m.viewport = viewport.New(viewportWidth, viewportHeight)
} else {
m.viewport.Width = viewportWidth
m.viewport.Height = viewportHeight
}
m.input.Width = viewportWidth
}

type chatResultMsg struct{ lines []string }
type errMsg struct{ error }

func startChatCmd(prompt, endpoint string) tea.Cmd {
return func() tea.Msg {
lines, err := fetchResponse(prompt, endpoint)
if err != nil {
return errMsg{err}
}
return chatResultMsg{lines: lines}
}
}

func fetchResponse(prompt, endpoint string) ([]string, error) {
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
defer cancel()

logger := logrus.New()
logger.SetLevel(logrus.FatalLevel)

client := sse.NewClient(sse.Config{
Endpoint: endpoint,
ConnectTimeout: 30 * time.Second,
ReadTimeout: 5 * time.Minute,
BufferSize: 100,
Logger: logger,
})
defer client.Close()

payload := map[string]any{
"threadId": "demo-thread",
"runId": fmt.Sprintf("run-%d", time.Now().UnixNano()),
"messages": []map[string]any{{"role": "user", "content": prompt}},
}

frames, errCh, err := client.Stream(sse.StreamOptions{Context: ctx, Payload: payload})
if err != nil {
return nil, fmt.Errorf("failed to start SSE stream: %w", err)
}

var collected []events.Event
for {
select {
case frame, ok := <-frames:
if !ok {
return renderEvents(collected), nil
}
evt, err := events.EventFromJSON(frame.Data)
if err != nil {
return nil, fmt.Errorf("parse event: %w", err)
}
collected = append(collected, evt)
case err, ok := <-errCh:
if ok && err != nil {
return nil, err
}
case <-ctx.Done():
return nil, ctx.Err()
}
}
}

func renderEvents(evts []events.Event) []string {
var output []string
for _, evt := range evts {
output = append(output, formatEvent(evt)...)
}
if len(output) == 0 {
output = append(output, "Bot> (no response)")
}
return output
}

func formatEvent(evt events.Event) []string {
label := fmt.Sprintf("[%s]", evt.Type())

switch e := evt.(type) {
case *events.RunStartedEvent:
return []string{fmt.Sprintf("Agent> %s", label)}
case *events.RunFinishedEvent:
return []string{fmt.Sprintf("Agent> %s", label)}
case *events.RunErrorEvent:
return []string{fmt.Sprintf("Agent> %s: %s", label, e.Message)}
case *events.TextMessageStartEvent:
return []string{fmt.Sprintf("Agent> %s", label)}
case *events.TextMessageContentEvent:
if strings.TrimSpace(e.Delta) == "" {
return nil
}
return []string{fmt.Sprintf("Agent> %s %s", label, e.Delta)}
case *events.TextMessageEndEvent:
return []string{fmt.Sprintf("Agent> %s", label)}
case *events.ToolCallStartEvent:
return []string{fmt.Sprintf("Agent> %s tool call '%s' started, id: %s", label, e.ToolCallName, e.ToolCallID)}
case *events.ToolCallArgsEvent:
return []string{fmt.Sprintf("Agent> %s tool args: %s", label, e.Delta)}
case *events.ToolCallEndEvent:
return []string{fmt.Sprintf("Agent> %s tool call completed, id: %s", label, e.ToolCallID)}
case *events.ToolCallResultEvent:
return []string{fmt.Sprintf("Agent> %s tool result: %s", label, e.Content)}
default:
return nil
}
}
Loading
Loading