A simple and powerful Go library for building AI agents with multi-LLM provider support, tool integration, and structured outputs.
- Multi-LLM Provider Support: OpenAI, Google Gemini, Anthropic Claude, and Ollama
- Tool/Function Calling: Add custom tools that agents can use
- MCP (Model Context Protocol) Support: Connect to MCP servers for external tool integration
- Structured Outputs: Define JSON schemas for structured responses
- Conversation History: Automatic message history management
- Thread-Safe: Concurrent operations with proper synchronization
- Recursion Protection: Prevents infinite tool call loops
- AttachInterNetAccess: This feature is currently in beta. Please do not use it in production as it may be unstable and subject to change.
go get github.com/4nkitd/sapienspackage main
import (
"context"
"fmt"
"log"
"os"
"github.com/4nkitd/sapiens"
)
func main() {
// Initialize LLM provider
llm := sapiens.NewGemini(os.Getenv("GEMINI_API_KEY"))
// Create agent
agent := sapiens.NewAgent(
context.Background(),
llm.Client(),
llm.GetDefaultModel(),
"You are a helpful assistant",
)
// Create message
message := sapiens.NewMessages()
// Ask a question
resp, err := agent.Ask(message.MergeMessages(
message.UserMessage("Hello! How are you today?"),
))
if err != nil {
log.Fatalf("Error: %v", err)
}
fmt.Println("Response:", resp.Choices[0].Message.Content)
}Tools allow your agent to perform specific functions:
// Add a weather tool
agent.AddTool(
"get_weather",
"Get current weather for a location",
map[string]jsonschema.Definition{
"location": {
Type: jsonschema.String,
Description: "The city and state, e.g. San Francisco, CA",
},
"unit": {
Type: jsonschema.String,
Enum: []string{"celsius", "fahrenheit"},
},
},
[]string{"location"},
func(parameters map[string]string) string {
location := parameters["location"]
unit := parameters["unit"]
// Your weather API logic here
return fmt.Sprintf(`{"temperature":"25", "unit":"%s", "location":"%s"}`, unit, location)
},
)
Connect to MCP servers to use external tools and services:
// Connect to MCP server
err := agent.AddMCP("http://localhost:8080/sse", nil)
if err != nil {
log.Fatalf("Failed to connect to MCP server: %v", err)
}
}Request structured responses using JSON schemas:
// Define response structure
type Result struct {
Steps []struct {
Explanation string `json:"explanation"`
Output string `json:"output"`
} `json:"steps"`
FinalAnswer string `json:"final_answer"`
}
var result Result
// Set structured response schema
schema, err := agent.SetResponseSchema(
"analysis_result",
"Structured analysis with steps and final answer",
true,
result,
)
if err != nil {
log.Fatalf("SetResponseSchema error: %v", err)
}
_ = schema
message := NewMessages()
resp, err := agent.Ask(message.MergeMessages(
message.UserMessage("Analyze the benefits of renewable energy"),
))
if err != nil {
log.Fatalf("Error: %v", err)
}
// Parse the structured response
err = agent.ParseStructuredResponse(resp, &result)
if err != nil {
log.Fatalf("ParseStructuredResponse error: %v", err)
}
log.Printf("Structured result: %+v", result)
}llm := NewOpenai(os.Getenv("OPENAI_API_KEY"))Default model: gpt-4.1-2025-04-14
llm := NewGemini(os.Getenv("GEMINI_API_KEY"))Default model: gemini-2.0-flash
llm := NewAnthropic(os.Getenv("ANTHROPIC_API_KEY"))Default model: claude-sonet-3.5
llm := NewOllama(
"http://localhost:11434/v1/", // Base URL
"", // Auth token (optional)
"llama2", // Model name
)Creates a new agent instance.
Parameters:
ctx: Context for operationsclient: OpenAI-compatible clientmodel: Model name to usesystemPrompt: System prompt that defines agent behavior
Adds a tool that the agent can use.
Parameters:
name: Tool namedescription: Tool descriptionparameters: JSON schema definition for parametersrequired: Required parameter namescallback: Function to execute when tool is called
Connects to an MCP server to use external tools.
Parameters:
url: MCP server URL (typically SSE endpoint)headers: Optional custom headers for authentication
Creates a structured response schema for an agent.