forked from vamplabAI/sgr-agent-core
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.yaml.example
More file actions
60 lines (50 loc) · 2.22 KB
/
config.yaml.example
File metadata and controls
60 lines (50 loc) · 2.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
# SGR Agent Core - Configuration Template
# Copy this file to config.yaml and fill in your data
# LLM Configuration
llm:
api_key: "your-openai-api-key-here" # Your OpenAI API key
base_url: "https://api.openai.com/v1" # API base URL
model: "gpt-4o-mini" # Model name
max_tokens: 8000 # Max output tokens
temperature: 0.4 # Temperature (0.0-1.0)
# proxy: "socks5://127.0.0.1:1081" # Optional proxy (socks5:// or http://)
# Search Configuration (Tavily)
search:
tavily_api_key: "your-tavily-api-key-here" # Tavily API key (get at tavily.com)
tavily_api_base_url: "https://api.tavily.com" # Tavily API URL
max_searches: 4 # Max search operations
max_results: 10 # Max results in search query
content_limit: 1500 # Content char limit per source
# Execution Settings
execution:
max_clarifications: 3 # Max clarification requests
max_iterations: 10 # Max agent iterations
mcp_context_limit: 15000 # Max context length from MCP server response
logs_dir: "logs" # Directory for saving agent execution logs
reports_dir: "reports" # Directory for saving agent reports
# Prompts Configuration
# prompts:
# # Option 1: Use file paths (absolute or relative to project root)
# system_prompt_file: "path/to/your/system_prompt.txt"
# initial_user_request_file: "path/to/your/initial_user_request.txt"
# clarification_response_file: "path/to/your/clarification_response.txt"
# # Option 2: Provide prompts directly as strings
# system_prompt_str: "Your custom system prompt here..."
# initial_user_request_str: "Your custom initial request template..."
# clarification_response_str: "Your custom clarification template..."
# Note: If both file and string are provided, string takes precedence
# MCP (Model Context Protocol) Configuration
mcp:
mcpServers:
deepwiki:
url: "https://mcp.deepwiki.com/mcp"
# Add more MCP servers here:
# your_server:
# url: "https://your-mcp-server.com/mcp"
# headers:
# Authorization: "Bearer your-token"
# Note: The 'agents' field is optional and can be loaded from either:
# - This config.yaml file
# - Any separate file by GlobalConfig.definitions_from_yaml method
# See examples in agents.yaml.example for agent configuration options
agents: {}