forked from Tracer-Cloud/opensre
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
276 lines (238 loc) · 9.31 KB
/
.env.example
File metadata and controls
276 lines (238 loc) · 9.31 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
LLM_PROVIDER=anthropic
# OpenAI Codex CLI for `opensre investigate` (auth: `codex login`, no API key).
CODEX_MODEL= # if blank => Codex CLI uses its configured default/current model
CODEX_BIN= # set full path only if `codex` is not on PATH and defaults do not find it
# Required for any RCA run: set one provider key below (not needed when LLM_PROVIDER=codex).
ANTHROPIC_API_KEY=
ANTHROPIC_REASONING_MODEL=
ANTHROPIC_TOOLCALL_MODEL=
OPENAI_API_KEY=
OPENAI_REASONING_MODEL=
OPENAI_TOOLCALL_MODEL=
# OpenRouter (supports 100+ models via a single API)
OPENROUTER_API_KEY=
OPENROUTER_REASONING_MODEL=
OPENROUTER_TOOLCALL_MODEL=
# Google Gemini (using OpenAI-compatible endpoint)
GEMINI_API_KEY=
GEMINI_REASONING_MODEL=
GEMINI_TOOLCALL_MODEL=
# NVIDIA NIM (Inference Microservices)
NVIDIA_API_KEY=
NVIDIA_REASONING_MODEL=
NVIDIA_TOOLCALL_MODEL=
# Quick local LLM setup:
#
# opensre onboard
# # optional: Grafana, Datadog, Honeycomb, Coralogix, Slack, AWS, GitHub MCP, and Sentry
# ─── Integrations ────────────────────────────────────────────────────────────
# When running against the Tracer web app, integrations are fetched automatically.
# For local/standalone use, configure integrations via the CLI:
#
# python -m app.integrations setup grafana
# python -m app.integrations setup aws
# python -m app.integrations setup datadog
# python -m app.integrations setup honeycomb
# python -m app.integrations setup coralogix
# python -m app.integrations setup opensearch
# python -m app.integrations setup rds
# python -m app.integrations list
#
# Credentials are stored in ~/.tracer/integrations.json
# The env vars below are kept as a fallback for direct client usage only.
# ─── Integrations ────────────────────────────────────────────────────────────
# JWT_TOKEN is optional and only needed for the Tracer Web App path.
# If you are running locally against your own credentials, leave JWT_TOKEN empty.
#
# When running against the Tracer web app, integrations are fetched automatically.
# For local/standalone use, configure integrations via the CLI:
#
# python -m app.integrations setup grafana
# python -m app.integrations setup aws
# python -m app.integrations setup datadog
# python -m app.integrations setup honeycomb
# python -m app.integrations setup coralogix
# python -m app.integrations setup opensearch
# python -m app.integrations setup rds
# python -m app.integrations list
#
# Credentials can be stored in ~/.tracer/integrations.json
# The env vars below are also supported as a fallback integration source for
# local/standalone investigations.
#
# Configure only the systems you want to use for RCA:
# - Datadog OR Grafana OR Honeycomb OR Coralogix is enough for a first real-system run
# - AWS is optional unless you want AWS evidence
# - GitHub MCP is optional unless you want repository/code investigation
# - Sentry is optional unless you want issue/event investigation
# - Slack is optional unless you want outbound report delivery
# Local Grafana stack (make grafana-local-up): no real auth required, use placeholder token.
# Replace with your real Grafana Cloud values when connecting to a live instance.
GRAFANA_READ_TOKEN=local
GRAFANA_INSTANCE_URL=http://localhost:3000
GRAFANA_LOKI_DATASOURCE_UID=
GRAFANA_TEMPO_DATASOURCE_UID=
# Multi-instance Grafana (optional). When set, takes precedence over the
# single-instance vars above. See docs/multi-instance-integrations.mdx.
# Example:
# GRAFANA_INSTANCES='[
# {"name":"prod","tags":{"env":"prod"},"endpoint":"https://prod.grafana.net","api_key":"..."},
# {"name":"staging","tags":{"env":"staging"},"endpoint":"https://staging.grafana.net","api_key":"..."}
# ]'
GRAFANA_INSTANCES=
DD_API_KEY=
DD_APP_KEY=
DD_SITE=datadoghq.com
# Multi-instance Datadog (optional, see docs/multi-instance-integrations.mdx)
# DD_INSTANCES='[{"name":"prod","api_key":"...","app_key":"...","site":"datadoghq.com"}]'
DD_INSTANCES=
# Honeycomb
# Dataset can be a specific dataset slug or __all__ for environment-wide queries.
HONEYCOMB_API_KEY=
HONEYCOMB_DATASET=__all__
HONEYCOMB_API_URL=https://api.honeycomb.io
# Multi-instance Honeycomb (optional)
# HONEYCOMB_INSTANCES='[{"name":"prod","api_key":"...","dataset":"__all__"}]'
HONEYCOMB_INSTANCES=
# Coralogix
# API URL is region/domain-specific in some accounts; keep the default unless your
# Coralogix workspace uses a different API domain.
CORALOGIX_API_KEY=
CORALOGIX_API_URL=https://api.coralogix.com
CORALOGIX_APPLICATION_NAME=
CORALOGIX_SUBSYSTEM_NAME=
# Multi-instance Coralogix (optional)
# CORALOGIX_INSTANCES='[{"name":"prod","api_key":"...","base_url":"https://api.coralogix.com"}]'
CORALOGIX_INSTANCES=
AWS_REGION=us-east-1
AWS_ROLE_ARN=
AWS_EXTERNAL_ID=
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
AWS_SESSION_TOKEN=
# Multi-instance AWS (optional, e.g. two accounts or regions)
# AWS_INSTANCES='[
# {"name":"prod","tags":{"account":"prod"},"role_arn":"arn:aws:iam::111:role/opensre","external_id":"..."},
# {"name":"sandbox","tags":{"account":"sandbox"},"role_arn":"arn:aws:iam::222:role/opensre","external_id":"..."}
# ]'
AWS_INSTANCES=
# GitHub MCP
GITHUB_MCP_URL=
GITHUB_MCP_MODE=streamable-http
GITHUB_MCP_COMMAND=
GITHUB_MCP_ARGS=
GITHUB_MCP_AUTH_TOKEN=
GITHUB_MCP_TOOLSETS=repos,issues,pull_requests,actions,search
# Optional: max repos stored from one validation list/search call (5–500, default 50).
# OPENSRE_GITHUB_MCP_REPO_PROBE_LIMIT=
# Sentry
SENTRY_URL=https://sentry.io
SENTRY_ORG_SLUG=
SENTRY_PROJECT_SLUG=
SENTRY_AUTH_TOKEN=
# MongoDB
MONGODB_CONNECTION_STRING=
MONGODB_DATABASE=
MONGODB_AUTH_SOURCE=admin
MONGODB_TLS=true
# MongoDB Atlas (managed-service RCA via Atlas Admin API v2)
MONGODB_ATLAS_PUBLIC_KEY=
MONGODB_ATLAS_PRIVATE_KEY=
MONGODB_ATLAS_PROJECT_ID=
MONGODB_ATLAS_BASE_URL=https://cloud.mongodb.com/api/atlas/v2
# MariaDB (direct connection for database-level RCA)
MARIADB_HOST=
MARIADB_PORT=3306
MARIADB_DATABASE=
MARIADB_USERNAME=
MARIADB_PASSWORD=
MARIADB_SSL=true
# RabbitMQ (read-only Management HTTP API — the `rabbitmq_management` plugin
# must be enabled on the broker: `rabbitmq-plugins enable rabbitmq_management`)
RABBITMQ_HOST=
RABBITMQ_MANAGEMENT_PORT=15672
RABBITMQ_USERNAME=
RABBITMQ_PASSWORD=
RABBITMQ_VHOST=/
RABBITMQ_SSL=false
RABBITMQ_VERIFY_SSL=true
# Better Stack Telemetry (ClickHouse SQL query API over HTTP Basic auth).
# Credentials: Better Stack dashboard > Integrations > Connect ClickHouse HTTP client.
# Query endpoint is region-specific (e.g. https://eu-nbg-2-connect.betterstackdata.com).
# BETTERSTACK_SOURCES is an optional comma-separated list of base source IDs
# from the dashboard (e.g. t123456_myapp,t123456_gateway). The integration
# appends _logs / _s3 internally. If omitted, the planner must pass a source
# name extracted from alert metadata.
BETTERSTACK_QUERY_ENDPOINT=
BETTERSTACK_USERNAME=
BETTERSTACK_PASSWORD=
BETTERSTACK_SOURCES=
# Kafka
KAFKA_BOOTSTRAP_SERVERS=
KAFKA_SECURITY_PROTOCOL=PLAINTEXT
KAFKA_SASL_MECHANISM=
KAFKA_SASL_USERNAME=
KAFKA_SASL_PASSWORD=
# ClickHouse
CLICKHOUSE_HOST=
CLICKHOUSE_PORT=8123
CLICKHOUSE_DATABASE=default
CLICKHOUSE_USER=default
CLICKHOUSE_PASSWORD=
CLICKHOUSE_SECURE=false
# Bitbucket
BITBUCKET_WORKSPACE=
BITBUCKET_USERNAME=
BITBUCKET_APP_PASSWORD=
# Jira
JIRA_BASE_URL=
JIRA_EMAIL=
JIRA_API_TOKEN=
JIRA_PROJECT_KEY=
# Optional: only required for the Tracer Web App path.
JWT_TOKEN=
# LangGraph deployment database
# PostgreSQL and Redis are required for deployed OpenSRE / LangGraph services.
#
# Local hosting:
# docker compose -f docker-compose.database.yml up -d
# DATABASE_URI=postgresql://opensre:opensre@localhost:5432/opensre
# REDIS_URI=redis://localhost:6379/0
#
# Railway hosted deployment:
# 1. Create Postgres and Redis services in your Railway project.
# 2. Copy the Postgres and Redis connection strings from Railway.
# 3. Set DATABASE_URI and REDIS_URI on your OpenSRE service, for example:
# railway variables set DATABASE_URI=postgresql://user:pass@prod-db.railway.app:5432/opensre
# railway variables set REDIS_URI=redis://default:pass@prod-redis.railway.app:6379
#
# Then deploy with:
# opensre deploy railway
DATABASE_URI=
REDIS_URI=
# LangSmith integration (optional)
LANGSMITH_API_KEY=
LANGSMITH_DEPLOYMENT_NAME=open-sre-agent
# Optional: only required if you want to post RCA reports to Slack.
SLACK_WEBHOOK_URL=
# Optional: required for 'opensre investigate --service --slack-thread CHANNEL/TS'.
# Used to pull a Slack thread via conversations.replies as runtime investigation context.
SLACK_BOT_TOKEN=
ENV=development
# Reversible masking of sensitive infrastructure identifiers before external LLM calls.
# See docs/masking.mdx for details. Off by default; enable per operator.
OPENSRE_MASK_ENABLED=false
# Comma-separated kinds to mask. Empty = all defaults
# (pod,namespace,cluster,hostname,account_id,ip_address,email,service_name).
OPENSRE_MASK_KINDS=
# Optional JSON object of label→regex for custom identifier patterns.
# Example: '{"jira_key": "\\b[A-Z]+-\\d+\\b"}'
OPENSRE_MASK_EXTRA_REGEX=
#Gitlab
GITLAB_BASE_URL=https://gitlab.com/api/v4
GITLAB_MR_WRITEBACK=false
# Optional
GITLAB_MR_IID=your-mr-iid
GITLAB_ACCESS_TOKEN=your-pat
GITLAB_PROJECT_ID=your-project-id
GITLAB_REPO_URL=your-gitlab-repo-url