-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
105 lines (82 loc) · 3.33 KB
/
.env.example
File metadata and controls
105 lines (82 loc) · 3.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# LLM Provider Configuration
# Choose one: gemini, claude, openai, xai, ollama
LLM_PROVIDER=gemini
# LLM Model (optional - defaults set per provider)
# Gemini: gemini-2.0-flash-exp
# Claude: claude-3-5-sonnet-20241022
# OpenAI: gpt-4
# xAI: grok-3
# Ollama: llama3.2
# LLM_MODEL=
# Provider-Specific API Keys
# For Gemini (default provider)
GOOGLE_API_KEY=your-google-api-key-here
# GEMINI_API_KEY=your-google-api-key-here
# For Claude
# ANTHROPIC_API_KEY=your-anthropic-api-key-here
# CLAUDE_API_KEY=your-anthropic-api-key-here
# For OpenAI
# OPENAI_API_KEY=your-openai-api-key-here
# For xAI (Grok)
# XAI_API_KEY=your-xai-api-key-here
# For Ollama (local)
# OLLAMA_URL=http://localhost:11434
# Generic LLM API Key (overrides provider-specific keys)
# LLM_API_KEY=
# Custom LLM Base URL (for custom endpoints)
# LLM_BASE_URL=
# Search Provider Configuration
# Choose one: serper, serpapi
SEARCH_PROVIDER=serper
# Serper API Key (https://serper.dev)
# Get your key at: https://serper.dev/api-key
SERPER_API_KEY=your-serper-api-key-here
# SerpAPI Key (https://serpapi.com)
# Alternative search provider
# SERPAPI_API_KEY=your-serpapi-key-here
# Agent URLs (defaults shown - customize if needed)
# RESEARCH_AGENT_URL=http://localhost:8001
# VERIFICATION_AGENT_URL=http://localhost:8002
# ORCHESTRATOR_URL=http://localhost:8000
# ORCHESTRATOR_EINO_URL=http://localhost:8003
# A2A Protocol Configuration
# A2A_ENABLED=true
# A2A_AUTH_TYPE=apikey
# A2A_AUTH_TOKEN=
# =============================================================================
# LLM Observability Configuration (via OmniObserve)
# =============================================================================
# Choose one provider: opik, langfuse, phoenix
# OBSERVABILITY_ENABLED=false
# OBSERVABILITY_PROVIDER=opik
# -----------------------------------------------------------------------------
# Opik Configuration (https://github.com/comet-ml/opik)
# Open-source LLM observability with tracing, evaluation, and prompt management
# -----------------------------------------------------------------------------
# Get your API key at: https://www.comet.com/opik
# OPIK_API_KEY=your-opik-api-key-here
# For self-hosted Opik (optional)
# OPIK_ENDPOINT=https://opik.example.com
# Workspace name (optional, for Comet cloud)
# OPIK_WORKSPACE=your-workspace
# Default project name for traces
# OPIK_PROJECT=stats-agent-team
# -----------------------------------------------------------------------------
# Langfuse Configuration (https://langfuse.com)
# Open-source LLM observability and analytics
# -----------------------------------------------------------------------------
# Get your keys at: https://cloud.langfuse.com (Settings > API Keys)
# LANGFUSE_PUBLIC_KEY=pk-lf-your-public-key-here
# LANGFUSE_SECRET_KEY=sk-lf-your-secret-key-here
# For self-hosted Langfuse (optional)
# LANGFUSE_ENDPOINT=https://langfuse.example.com
# -----------------------------------------------------------------------------
# Phoenix Configuration (https://phoenix.arize.com)
# Open-source LLM observability with OpenTelemetry support
# -----------------------------------------------------------------------------
# Local Phoenix server (default: http://localhost:6006)
# PHOENIX_ENDPOINT=http://localhost:6006
# For Arize cloud (optional)
# PHOENIX_API_KEY=your-phoenix-api-key-here
# Project/dataset name
# PHOENIX_PROJECT=stats-agent-team