Skip to content

Commit c571e71

Browse files
committed
Add sd-bridge script
1 parent b393ad4 commit c571e71

File tree

4 files changed

+132
-0
lines changed

4 files changed

+132
-0
lines changed

framework/.changeset/v0.10.17.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
- Added SD-bridge script that queries each node’s /discovery endpoint and generates Prometheus targets, enabling automatic collection of LOOPP metrics

framework/observability/compose/conf/prometheus.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,3 +35,7 @@ scrape_configs:
3535
- job_name: 'postgres_exporter_4'
3636
static_configs:
3737
- targets: ['postgres_exporter_4:9187']
38+
- job_name: 'node-sd'
39+
file_sd_configs:
40+
- files: ["/etc/prometheus/targets/merged.json"]
41+
refresh_interval: 15s

framework/observability/compose/docker-compose.yaml

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ services:
4545
volumes:
4646
- /var/run/docker.sock:/var/run/docker.sock
4747
- ./conf/prometheus.yml:/etc/prometheus/prometheus.yml
48+
- sd-targets:/etc/prometheus/targets
4849
ports:
4950
- '9099:9090'
5051

@@ -135,13 +136,29 @@ services:
135136
- '9304:9187'
136137
restart: unless-stopped
137138

139+
sd-merge:
140+
image: alpine:3.20
141+
command: [ "/bin/sh","-c","apk add --no-cache bash curl jq docker-cli && exec bash scripts/sd-merge.sh" ]
142+
volumes:
143+
- /var/run/docker.sock:/var/run/docker.sock:ro
144+
- sd-targets:/out
145+
- ./scripts:/scripts:ro
146+
environment:
147+
LABEL_MATCH: "framework=ctf"
148+
DISCOVERY_PATH: "/discovery"
149+
DISCOVERY_PORT: "6688"
150+
DISCOVERY_SCHEME: "http"
151+
OUT: "/out/merged.json"
152+
SLEEP: "15"
153+
138154
volumes:
139155
loki_data:
140156
grafana_data:
141157
grafana_home:
142158
grafana_logs:
143159
grafana_plugins:
144160
tempo_data:
161+
sd-targets:
145162

146163
networks:
147164
default:
Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
#!/usr/bin/env bash
2+
# sd-merge.sh
3+
# Discover Docker containers by label, fetch each container's /discovery JSON,
4+
# add labels (container_name, scrape_path), merge + dedupe, and write a single file_sd JSON.
5+
6+
set -Eeuo pipefail
7+
8+
# -------------------- Configuration (via env) --------------------
9+
LABEL_MATCH="${LABEL_MATCH:-framework=ctf}" # docker ps --filter "label=$LABEL_MATCH"
10+
DEFAULT_PATH="${DISCOVERY_PATH:-/discovery}" # default discovery path inside each container
11+
DEFAULT_PORT="${DISCOVERY_PORT:-6688}" # default discovery port
12+
DEFAULT_SCHEME="${DISCOVERY_SCHEME:-http}" # http or https
13+
PREFER_NETWORK="${NETWORK_NAME:-}" # prefer IP from this Docker network (optional)
14+
OUT="${OUT:-/out/merged.json}" # file_sd output path
15+
SLEEP="${SLEEP:-15}" # seconds between scans
16+
REQUEST_TIMEOUT="${REQUEST_TIMEOUT:-5}" # curl timeout (s)
17+
REWRITE_TO_IP="${REWRITE_TO_IP:-0}" # 1 = replace host with container IP in targets
18+
19+
# -------------------- Helpers --------------------
20+
log(){ printf '[sd-merge] %s\n' "$*" >&2; }
21+
22+
# Atomic writer: reads stdin, writes to $1.tmp, then mv -> $1
23+
atomic_write(){
24+
local path="$1" tmp="$1.tmp"
25+
cat > "$tmp" && mv "$tmp" "$path"
26+
}
27+
28+
# -------------------- Init --------------------
29+
mkdir -p "$(dirname "$OUT")"
30+
echo '[]' | atomic_write "$OUT"
31+
32+
# -------------------- Main loop --------------------
33+
while true; do
34+
# List container IDs matching the label
35+
mapfile -t cids < <(docker ps -q --filter "label=$LABEL_MATCH" || true)
36+
37+
if ((${#cids[@]} == 0)); then
38+
echo '[]' | atomic_write "$OUT"
39+
log "no matching containers; wrote empty array"
40+
sleep "$SLEEP"
41+
continue
42+
fi
43+
44+
# Emit each container's (possibly empty) discovery array, then merge once with jq -s
45+
{
46+
for cid in "${cids[@]}"; do
47+
# Inspect once, reuse for IP, name, and labels
48+
inspect="$(docker inspect "$cid" 2>/dev/null || true)"
49+
[[ -z "$inspect" ]] && { log "skip ${cid:0:12}: inspect failed"; echo '[]'; continue; }
50+
51+
# Resolve container IP (optionally prefer a specific network)
52+
if [[ -n "$PREFER_NETWORK" ]]; then
53+
ip="$(jq -r --arg n "$PREFER_NETWORK" '.[0].NetworkSettings.Networks[$n].IPAddress // ""' <<<"$inspect")"
54+
else
55+
ip="$(jq -r '.[0].NetworkSettings.Networks | to_entries[0].value.IPAddress // ""' <<<"$inspect")"
56+
fi
57+
[[ -z "$ip" ]] && { log "skip ${cid:0:12}: no IP"; echo '[]'; continue; }
58+
59+
# Container name and optional per-container overrides
60+
name="$(jq -r '.[0].Name | ltrimstr("/")' <<<"$inspect")"
61+
path="$(jq -r '.[0].Config.Labels.prom_sd_path // empty' <<<"$inspect")"; path="${path:-$DEFAULT_PATH}"
62+
port="$(jq -r '.[0].Config.Labels.prom_sd_port // empty' <<<"$inspect")"; port="${port:-$DEFAULT_PORT}"
63+
scheme="$(jq -r '.[0].Config.Labels.prom_sd_scheme // empty' <<<"$inspect")"; scheme="${scheme:-$DEFAULT_SCHEME}"
64+
65+
url="${scheme}://${ip}:${port}${path}"
66+
67+
# Fetch discovery JSON; treat errors as empty array
68+
payload="$(curl -fsSL --max-time "$REQUEST_TIMEOUT" "$url" 2>/dev/null || echo '[]')"
69+
70+
# Normalize to array, add labels, optional host->IP rewrite while keeping port from targets
71+
if [[ "$REWRITE_TO_IP" == "1" ]]; then
72+
jq --arg ip "$ip" --arg name "$name" '
73+
(if type=="array" then . else [] end)
74+
| map(
75+
.targets |= map( $ip + ":" + (split(":")[1]) ) |
76+
.labels = ((.labels // {}) + {
77+
container_name: $name,
78+
scrape_path: (.labels.__metrics_path__ // "")
79+
})
80+
)
81+
' <<<"$payload"
82+
else
83+
jq --arg name "$name" '
84+
(if type=="array" then . else [] end)
85+
| map(
86+
.labels = ((.labels // {}) + {
87+
container_name: $name,
88+
scrape_path: (.labels.__metrics_path__ // "")
89+
})
90+
)
91+
' <<<"$payload"
92+
fi
93+
94+
log "ok $url"
95+
done
96+
} \
97+
| jq -s '
98+
# Merge all arrays, coerce to {targets,labels}, then group by labels and dedupe targets
99+
add // []
100+
| map({targets: (.targets // []), labels: (.labels // {})})
101+
| group_by(.labels)
102+
| map({ labels: (.[0].labels)
103+
, targets: ([.[].targets[]] | unique | sort)
104+
})
105+
' \
106+
| atomic_write "$OUT"
107+
108+
log "wrote $(wc -c < "$OUT") bytes to $OUT"
109+
sleep "$SLEEP"
110+
done

0 commit comments

Comments
 (0)