Skip to content

Commit 44540db

Browse files
committed
feat: MCP integration for AI tools (#1091)
- Auto-create Grafana service account and token at startup - Generate /etc/lgtm/mcp.json (mcp-grafana + Tempo native MCP) - Generate /etc/lgtm/claude-mcp-setup.sh for one-command Claude Code setup - Add opt-in collector debug exporter (OTEL_COLLECTOR_DEBUG_EXPORTER=true) - Enable Tempo MCP server, expose port 3200 - Add docs/mcp-integration.md
1 parent f02cfd2 commit 44540db

File tree

8 files changed

+177
-1
lines changed

8 files changed

+177
-1
lines changed

README.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -304,6 +304,17 @@ OIDC_ISSUER="https://token.actions.githubusercontent.com"
304304
cosign verify ${IMAGE} --certificate-identity ${IDENTITY} --certificate-oidc-issuer ${OIDC_ISSUER}
305305
```
306306

307+
## AI Tool Integration (MCP)
308+
309+
The container exposes [MCP servers][mcp] so AI coding tools can query traces, metrics, logs,
310+
and dashboards directly.
311+
312+
```sh
313+
docker exec lgtm cat /etc/lgtm/mcp.json # or: podman exec ...
314+
```
315+
316+
Paste the JSON into your AI tool's MCP configuration. See [docs/mcp-integration.md](docs/mcp-integration.md) for details.
317+
307318
## Related Work
308319

309320
- [Metrics, Logs, Traces and Profiles in Grafana][mltp]
@@ -330,4 +341,5 @@ cosign verify ${IMAGE} --certificate-identity ${IDENTITY} --certificate-oidc-iss
330341
[otlp-endpoint]: https://opentelemetry.io/docs/languages/sdk-configuration/otlp-exporter/#otel_exporter_otlp_endpoint
331342
[otlp-headers]: https://opentelemetry.io/docs/languages/sdk-configuration/otlp-exporter/#otel_exporter_otlp_headers
332343
[oats]: https://github.com/grafana/oats
344+
[mcp]: https://modelcontextprotocol.io/ "Model Context Protocol"
333345
[red-method]: https://grafana.com/blog/the-red-method-how-to-instrument-your-services/ "The RED Method"

docker/otelcol-config-debug.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
service:
2+
pipelines:
3+
traces:
4+
exporters: [otlphttp/traces, debug/traces]
5+
metrics:
6+
exporters: [otlphttp/metrics, debug/metrics]
7+
logs:
8+
exporters: [otlphttp/logs, debug/logs]

docker/run-all.sh

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,59 @@ echo "Total: ${total_elapsed} seconds"
131131
touch /tmp/ready
132132
echo "The OpenTelemetry collector and the Grafana LGTM stack are up and running. (created /tmp/ready)"
133133

134+
# Create a service account token and MCP config for AI tool access
135+
# Try to create SA; if it already exists (persisted data), look it up
136+
SA_RESPONSE=$(curl -sf http://127.0.0.1:3000/api/serviceaccounts -H "Content-Type: application/json" -u admin:admin -d '{"name":"ai-tools","role":"Viewer"}')
137+
if [ -z "$SA_RESPONSE" ]; then
138+
# SA already exists — find its ID
139+
SA_RESPONSE=$(curl -sf "http://127.0.0.1:3000/api/serviceaccounts/search?query=ai-tools" -u admin:admin)
140+
SA_ID=$(echo "$SA_RESPONSE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
141+
else
142+
SA_ID=$(echo "$SA_RESPONSE" | grep -o '"id":[0-9]*' | head -1 | cut -d: -f2)
143+
fi
144+
if [ -n "$SA_ID" ]; then
145+
# Delete existing tokens and create a fresh one
146+
EXISTING_TOKENS=$(curl -sf "http://127.0.0.1:3000/api/serviceaccounts/${SA_ID}/tokens" -u admin:admin)
147+
for TOKEN_ID in $(echo "$EXISTING_TOKENS" | grep -o '"id":[0-9]*' | cut -d: -f2); do
148+
curl -sf -X DELETE "http://127.0.0.1:3000/api/serviceaccounts/${SA_ID}/tokens/${TOKEN_ID}" -u admin:admin > /dev/null
149+
done
150+
TOKEN_RESPONSE=$(curl -sf "http://127.0.0.1:3000/api/serviceaccounts/${SA_ID}/tokens" -H "Content-Type: application/json" -u admin:admin -d '{"name":"ai-tools-token"}')
151+
SA_TOKEN=$(echo "$TOKEN_RESPONSE" | grep -o '"key":"[^"]*"' | cut -d'"' -f4)
152+
if [ -n "$SA_TOKEN" ]; then
153+
echo "${SA_TOKEN}" > /tmp/grafana-sa-token
154+
mkdir -p /etc/lgtm
155+
EXEC="${CONTAINER_RUNTIME:-docker} exec lgtm"
156+
cat > /etc/lgtm/mcp.json <<-MCPEOF
157+
{
158+
"mcpServers": {
159+
"grafana": {
160+
"command": "uvx",
161+
"args": ["mcp-grafana"],
162+
"env": {
163+
"GRAFANA_URL": "http://localhost:3000",
164+
"GRAFANA_SERVICE_ACCOUNT_TOKEN": "${SA_TOKEN}"
165+
}
166+
},
167+
"tempo": {
168+
"url": "http://localhost:3200/api/mcp"
169+
}
170+
}
171+
}
172+
MCPEOF
173+
cat > /etc/lgtm/claude-mcp-setup.sh <<-SETUPEOF
174+
#!/bin/bash
175+
# Connect Claude Code to the LGTM stack
176+
claude mcp add grafana -e GRAFANA_URL=http://localhost:3000 -e GRAFANA_SERVICE_ACCOUNT_TOKEN=${SA_TOKEN} -- uvx mcp-grafana
177+
claude mcp add --transport http tempo http://localhost:3200/api/mcp
178+
SETUPEOF
179+
echo ""
180+
echo "AI Tool Integration (MCP):"
181+
echo " Claude Code: bash <($EXEC cat /etc/lgtm/claude-mcp-setup.sh)"
182+
echo " Other tools: $EXEC cat /etc/lgtm/mcp.json"
183+
echo " Docs: docs/mcp-integration.md"
184+
fi
185+
fi
186+
134187
if [[ ${ENABLE_OBI:-false} == "true" ]]; then
135188
# Non-blocking check — don't delay readiness if OBI fails (e.g. missing capabilities)
136189
if curl -o /dev/null -sg "http://127.0.0.1:6060/metrics" -w "%{response_code}" 2>/dev/null | grep -q "200"; then
@@ -153,6 +206,7 @@ echo "Open ports:"
153206
echo " - 4317: OpenTelemetry GRPC endpoint"
154207
echo " - 4318: OpenTelemetry HTTP endpoint"
155208
echo " - 3000: Grafana (http://localhost:3000). User: admin, password: admin"
209+
echo " - 3200: Tempo endpoint (MCP at http://localhost:3200/api/mcp)"
156210
echo " - 4040: Pyroscope endpoint"
157211
echo " - 9090: Prometheus endpoint"
158212

docker/run-otelcol.sh

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,5 +78,11 @@ if [[ -n ${OTEL_EXPORTER_OTLP_ENDPOINT:-} ||
7878
fi
7979
fi
8080

81+
debug_config_file=""
82+
if [[ ${OTEL_COLLECTOR_DEBUG_EXPORTER:-false} == "true" ]]; then
83+
echo "Enabling debug exporter for OpenTelemetry Collector"
84+
debug_config_file="--config=file:./otelcol-config-debug.yaml"
85+
fi
86+
8187
run_with_logging "OpenTelemetry Collector ${OPENTELEMETRY_COLLECTOR_VERSION}" "${ENABLE_LOGS_OTELCOL:-false}" \
82-
./otelcol-contrib/otelcol-contrib --feature-gates service.profilesSupport --config=file:./otelcol-config.yaml ${secondary_config_file}
88+
./otelcol-contrib/otelcol-contrib --feature-gates service.profilesSupport --config=file:./otelcol-config.yaml ${secondary_config_file} ${debug_config_file}

docker/tempo-config.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,10 @@ memberlist:
2727
bind_addr: [127.0.0.1]
2828
bind_port: 7947
2929

30+
query_frontend:
31+
mcp_server:
32+
enabled: true
33+
3034
querier:
3135
frontend_worker:
3236
frontend_address: 127.0.0.1:9096

docs/mcp-integration.md

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
# AI Tool Integration (MCP)
2+
3+
The `grafana/otel-lgtm` image exposes [Model Context Protocol (MCP)][mcp] servers
4+
so AI coding tools (Claude, Cursor, etc.) can query your telemetry data directly.
5+
6+
## What you get
7+
8+
- **Traces**: query via TraceQL through Tempo's built-in MCP server
9+
- **Metrics**: query via PromQL through Grafana MCP
10+
- **Logs**: query via LogQL through Grafana MCP
11+
- **Dashboards**: list, read, and search dashboards through Grafana MCP
12+
13+
## Setup
14+
15+
1. Start the container:
16+
17+
```sh
18+
./run-lgtm.sh
19+
```
20+
21+
2. Get the MCP config:
22+
23+
```sh
24+
docker exec lgtm cat /etc/lgtm/mcp.json # or: podman exec ...
25+
```
26+
27+
3. Paste the JSON into your AI tool's MCP configuration.
28+
29+
For Claude Code, you can add the servers individually:
30+
31+
```sh
32+
# Get the service account token
33+
TOKEN=$(docker exec lgtm cat /tmp/grafana-sa-token)
34+
35+
# Add the Grafana MCP server (requires uvx)
36+
claude mcp add grafana \
37+
-e GRAFANA_URL=http://localhost:3000 \
38+
-e GRAFANA_SERVICE_ACCOUNT_TOKEN="$TOKEN" \
39+
-- uvx mcp-grafana
40+
41+
# Add the Tempo MCP server
42+
claude mcp add --transport http tempo http://localhost:3200/api/mcp
43+
```
44+
45+
## Backend mapping
46+
47+
| Component | MCP Server | Transport | What you can query |
48+
|------------|---------------|-----------|-------------------------------------|
49+
| Tempo | `tempo` | HTTP | Traces via TraceQL |
50+
| Grafana | `grafana` | stdio | Dashboards, PromQL, LogQL |
51+
52+
## Collector debug exporter
53+
54+
The OpenTelemetry Collector includes a debug exporter that logs all received
55+
telemetry to stdout. This is useful for verifying that data is flowing correctly.
56+
57+
Enable it by setting the environment variable before starting the container:
58+
59+
```sh
60+
OTEL_COLLECTOR_DEBUG_EXPORTER=true ./run-lgtm.sh
61+
```
62+
63+
This adds the `debug` exporter to the traces, metrics, and logs pipelines.
64+
The output appears in the collector's logs (enable with `ENABLE_LOGS_OTELCOL=true`
65+
or `ENABLE_LOGS_ALL=true`).
66+
67+
## OBI (eBPF auto-instrumentation)
68+
69+
When [OBI is enabled][obi-readme], it generates traces and RED metrics automatically.
70+
These are queryable via PromQL through the Grafana MCP server:
71+
72+
```promql
73+
# Number of instrumented processes
74+
obi_instrumented_processes
75+
76+
# HTTP request duration (RED metrics)
77+
http_server_request_duration_seconds_count{http_route="/rolldice"}
78+
```
79+
80+
See the [OBI section in README.md][obi-readme] for setup instructions.
81+
82+
## Pyroscope (continuous profiling)
83+
84+
Pyroscope collects continuous profiles on port 4040. Explore them in Grafana's
85+
**Explore > Profiles** view. There is no MCP integration for Pyroscope yet.
86+
87+
[mcp]: https://modelcontextprotocol.io/
88+
[obi-readme]: ../README.md#enable-obi-ebpf-auto-instrumentation

examples/python/oats.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@ expected:
1313
# We also check that this exact log line is present in the logs, which some users rely on
1414
compose-logs:
1515
- "The OpenTelemetry collector and the Grafana LGTM stack are up and running. (created /tmp/ready)"
16+
- "AI Tool Integration (MCP):"
17+
- "docker exec lgtm cat /etc/lgtm/mcp.json"
1618
traces:
1719
- traceql: '{ span.http.route = "/rolldice" }'
1820
equals: "GET /rolldice"

run-lgtm.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ $RUNTIME container run \
6464
"${OBI_FLAGS[@]}" \
6565
"${OBI_ENV_FLAGS[@]}" \
6666
-p 3000:3000 \
67+
-p 3200:3200 \
6768
-p 4040:4040 \
6869
-p 4317:4317 \
6970
-p 4318:4318 \
@@ -74,5 +75,6 @@ $RUNTIME container run \
7475
-v "${LOCAL_VOLUME}"/prometheus:/data/prometheus:"${MOUNT_OPTS}" \
7576
-v "${LOCAL_VOLUME}"/loki:/data/loki:"${MOUNT_OPTS}" \
7677
-e GF_PATHS_DATA=/data/grafana \
78+
-e CONTAINER_RUNTIME="$RUNTIME" \
7779
--env-file .env \
7880
"$IMAGE"

0 commit comments

Comments
 (0)