Skip to content

Trigger n8n Webhook with Complete PR Info #42

Trigger n8n Webhook with Complete PR Info

Trigger n8n Webhook with Complete PR Info #42

name: Trigger n8n Webhook with Complete PR Info
on:
pull_request:
types: [opened, reopened]
workflow_dispatch:
inputs:
pr_number:
description: "Pull request number to process (maintainer-triggered runs)"
required: true
type: string
permissions:
contents: write
pull-requests: write
jobs:
gather-and-send:
# Manual dispatch is allowed, but we verify the actor has write access before continuing.
# Pull request runs still skip forks to avoid leaking secrets.
if: ${{ github.event_name == 'workflow_dispatch' || (github.event.pull_request != null && github.event.pull_request.head.repo.fork == false) }}
runs-on: ubuntu-latest
environment: n8n-sending
outputs:
filtered_file_count: ${{ steps.filter_files.outputs.file_count }}
should_send: ${{ steps.filter_files.outputs.file_count != '0' && steps.diff_threshold.outputs.skip_send != 'true' && steps.send_payload.outputs.cancelled != 'true' && steps.send_payload.outputs.error != 'true' }}
diff_char_count: ${{ steps.diff_threshold.outputs.diff_char_count }}
pr_number: ${{ steps.run_context.outputs.pr_number }}
repository: ${{ steps.run_context.outputs.repository }}
run_url: ${{ steps.run_context.outputs.run_url }}
response_payload: ${{ steps.capture_payload.outputs.payload }}
cancelled: ${{ steps.send_payload.outputs.cancelled }}
send_executed: ${{ steps.send_payload.outputs.executed }}
send_error: ${{ steps.send_payload.outputs.error }}
env:
PR_NUMBER: ${{ github.event.pull_request.number || github.event.inputs.pr_number }}
IS_MANUAL_RUN: ${{ github.event_name == 'workflow_dispatch' }}
MIN_DIFF_CHAR_THRESHOLD: ${{ vars.N8N_MIN_DIFF_CHAR_THRESHOLD || '1000' }}
steps:
# Step 1: Checkout repository
- name: Checkout repository
uses: actions/checkout@v4
# Step 2: Generate unique run token
- name: Generate Run UUID
id: uuid
run: echo "run_token=$(uuidgen)" >> "$GITHUB_OUTPUT"
- name: Capture run context
id: run_context
run: |
echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT"
echo "repository=${GITHUB_REPOSITORY}" >> "$GITHUB_OUTPUT"
echo "run_url=${GITHUB_SERVER_URL:-https://github.com}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" >> "$GITHUB_OUTPUT"
# Step 2b: Require dispatcher to have access
- name: Validate dispatcher permissions
if: ${{ github.event_name == 'workflow_dispatch' }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPO: ${{ github.repository }}
ACTOR: ${{ github.actor }}
run: |
set -euo pipefail
PERMISSION=$(gh api \
-H "Accept: application/vnd.github+json" \
"repos/${REPO}/collaborators/${ACTOR}/permission" \
--jq '.permission // ""')
case "$PERMISSION" in
admin|maintain|write)
echo "✅ ${ACTOR} has ${PERMISSION} permission."
;;
*)
echo "❌ ${ACTOR} lacks write or maintain access (permission='${PERMISSION}')."
exit 1
;;
esac
# Step 3: Pre-flight validation
- name: Validate setup
run: |
if [[ -z "${{ secrets.GITHUB_TOKEN }}" ]]; then
echo "Missing GITHUB_TOKEN secret."
exit 1
fi
if [[ -z "${PR_NUMBER}" ]]; then
echo "No PR number provided. For manual runs, supply pr_number input."
exit 1
fi
if [[ -z "${{ secrets.N8N_SENDING_TOKEN }}" ]]; then
echo "Missing N8N_SENDING_TOKEN secret."
exit 1
fi
if [[ "${IS_MANUAL_RUN}" == "true" ]]; then
echo "ℹ️ Maintainer-triggered run for PR #${PR_NUMBER}"
fi
# Step 4: Fetch PR metadata, files, commits
- name: Fetch PR metadata
run: |
gh api repos/${{ github.repository }}/pulls/${PR_NUMBER} > pr.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Fetch PR files
run: |
gh api --paginate repos/${{ github.repository }}/pulls/${PR_NUMBER}/files --jq '.[]' | jq -s '.' > files.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Fetch PR commits
run: |
gh api repos/${{ github.repository }}/pulls/${PR_NUMBER}/commits > commits.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Step 5: Download raw PR diff
- name: Fetch PR diff
run: |
curl -sSL \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
-H "Accept: application/vnd.github.v3.diff" \
"https://api.github.com/repos/${{ github.repository }}/pulls/${PR_NUMBER}" \
> pr.diff
# Step 6: Remove .ai and llms files from metadata
- name: Filter excluded files
id: filter_files
run: |
set -euo pipefail
python3 - <<'PY'
import json
import pathlib
import re
import sys
files_path = pathlib.Path("files.json")
try:
raw = files_path.read_text()
except FileNotFoundError:
print("files.json not found", file=sys.stderr)
sys.exit(1)
try:
data = json.loads(raw)
except json.JSONDecodeError as exc:
print(f"Unable to decode files.json as JSON: {exc}", file=sys.stderr)
print(raw)
data = []
if not isinstance(data, list):
print("files.json payload is not an array; logging contents and treating as empty list.")
print(raw)
data = []
pattern_ai_dir = re.compile(r'(^|/)\.ai(/|$)', re.IGNORECASE)
pattern_llms = re.compile(r'llms', re.IGNORECASE)
filtered = []
for item in data:
if not isinstance(item, dict):
continue
filename = item.get("filename") or ""
status = (item.get("status") or "").lower()
if status == "removed":
continue
if pattern_ai_dir.search(filename) or pattern_llms.search(filename):
continue
filtered.append(item)
files_path.write_text(json.dumps(filtered))
pathlib.Path("file_count.txt").write_text(str(len(filtered)))
PY
FILE_COUNT=$(cat file_count.txt)
echo "file_count=${FILE_COUNT}" >> "$GITHUB_OUTPUT"
echo "Remaining files after filter: ${FILE_COUNT}"
# Step 7: Strip excluded paths from diff payload
- name: Filter diff to excluded paths
run: |
python3 - <<'PY'
import pathlib
diff_path = pathlib.Path("pr.diff")
if not diff_path.exists():
raise SystemExit("pr.diff missing")
def is_excluded(path: str) -> bool:
lower = path.lower()
if "/.ai/" in lower or lower.startswith(".ai/") or lower.endswith("/.ai"):
return True
if "llms" in lower:
return True
return False
out_lines = []
current_chunk = []
exclude_chunk = False
with diff_path.open("r", encoding="utf-8", errors="replace") as diff_file:
for line in diff_file:
if line.startswith("diff --git "):
if current_chunk and not exclude_chunk:
out_lines.extend(current_chunk)
current_chunk = [line]
exclude_chunk = False
parts = line.strip().split()
if len(parts) >= 4:
a_path = parts[2][2:]
b_path = parts[3][2:]
if is_excluded(a_path) or is_excluded(b_path):
exclude_chunk = True
else:
current_chunk.append(line)
if current_chunk and not exclude_chunk:
out_lines.extend(current_chunk)
diff_path.write_text("".join(out_lines), encoding="utf-8")
PY
# Step 8: Compress & mask diff for payload
- name: Compress filtered diff
run: |
gzip -c pr.diff > pr.diff.gz
base64 -w 0 pr.diff.gz > diff.b64
python3 - <<'PY'
from pathlib import Path
chunk_size = 8000
diff_contents = Path("diff.b64").read_text()
for start in range(0, len(diff_contents), chunk_size):
chunk = diff_contents[start:start + chunk_size]
if chunk:
print(f"::add-mask::{chunk}")
PY
# Step 9: Inspect payload sizes for debugging
- name: Debug payload size
run: |
echo "PR metadata size: $(stat -c%s pr.json) bytes"
echo "Files metadata size: $(stat -c%s files.json) bytes"
echo "Commits metadata size: $(stat -c%s commits.json) bytes"
echo "Compressed diff size: $(stat -c%s pr.diff.gz) bytes"
# Step 9b: Evaluate diff length against threshold
- name: Evaluate diff char threshold
id: diff_threshold
run: |
set -euo pipefail
THRESHOLD="${MIN_DIFF_CHAR_THRESHOLD}"
if [[ ! "$THRESHOLD" =~ ^[0-9]+$ ]]; then
echo "Invalid MIN_DIFF_CHAR_THRESHOLD='$THRESHOLD'; defaulting to 0"
THRESHOLD=0
fi
CHAR_COUNT=$(wc -c < pr.diff | tr -d '[:space:]')
echo "diff_char_count=${CHAR_COUNT}" >> "$GITHUB_OUTPUT"
echo "threshold=${THRESHOLD}" >> "$GITHUB_OUTPUT"
if (( CHAR_COUNT < THRESHOLD )); then
echo "skip_send=true" >> "$GITHUB_OUTPUT"
echo "Filtered diff char count ${CHAR_COUNT} is below threshold ${THRESHOLD}; skipping downstream send."
else
echo "skip_send=false" >> "$GITHUB_OUTPUT"
echo "Filtered diff char count ${CHAR_COUNT} meets threshold ${THRESHOLD}; continuing."
fi
# Step 9c: Short-circuit when below threshold
- name: Diff below threshold
if: ${{ steps.filter_files.outputs.file_count != '0' && steps.diff_threshold.outputs.skip_send == 'true' }}
run: |
echo "ℹ️ Diff char count (${{ steps.diff_threshold.outputs.diff_char_count }}) is below threshold (${{ steps.diff_threshold.outputs.threshold }}). Skipping n8n send."
# Step 10: Send consolidated payload to n8n
- name: Combine and send to n8n webhook
id: send_payload
if: ${{ steps.filter_files.outputs.file_count != '0' && steps.diff_threshold.outputs.skip_send != 'true' }}
env:
N8N_WEBHOOK_URL: ${{ secrets.N8N_WEBHOOK_URL }}
N8N_SENDING_TOKEN: ${{ secrets.N8N_SENDING_TOKEN }}
RUN_TOKEN: ${{ steps.uuid.outputs.run_token }}
run: |
set -euo pipefail
echo "cancelled=false" >> "$GITHUB_OUTPUT"
echo "executed=true" >> "$GITHUB_OUTPUT"
echo "error=false" >> "$GITHUB_OUTPUT"
python3 - <<'PY'
import json
import os
from pathlib import Path
root = Path(".")
def read_json(filename):
path = root / filename
try:
return json.loads(path.read_text())
except FileNotFoundError:
raise SystemExit(f"{filename} not found")
except json.JSONDecodeError as exc:
raise SystemExit(f"Unable to parse {filename}: {exc}")
pr = read_json("pr.json")
files = read_json("files.json")
commits = read_json("commits.json")
diff_path = root / "diff.b64"
try:
diff_base64 = diff_path.read_text().replace("\n", "")
except FileNotFoundError:
raise SystemExit("diff.b64 not found")
payload = {
"pr": pr,
"files": files,
"commits": commits,
"diff_base64": diff_base64,
"token": os.environ.get("RUN_TOKEN", ""),
"n8n_sending_token": os.environ.get("N8N_SENDING_TOKEN", "")
}
(root / "payload.json").write_text(json.dumps(payload))
PY
echo "::add-mask::$N8N_SENDING_TOKEN"
PAYLOAD_SIZE=$(stat -c%s payload.json)
MAX_BYTES=$((10*1024*1024))
if (( PAYLOAD_SIZE > MAX_BYTES )); then
echo "Payload too large ($PAYLOAD_SIZE bytes)."
echo "error=true" >> "$GITHUB_OUTPUT"
exit 0
fi
RESPONSE=$(curl -s -w "\n%{http_code}" -X POST \
-H "Content-Type: application/json" \
--data-binary @payload.json \
"$N8N_WEBHOOK_URL")
HTTP_BODY=$(echo "$RESPONSE" | sed '$d')
HTTP_STATUS=$(echo "$RESPONSE" | tail -n1)
echo "n8n responded with status: $HTTP_STATUS"
echo "$HTTP_BODY" > response_body.json
STATUS=$(jq -r ".status" response_body.json)
MATCHED=$(jq -r ".token" response_body.json)
if [ "$MATCHED" != "${{ steps.uuid.outputs.run_token }}" ] || [ "$STATUS" != "completed" ]; then
if [ "$STATUS" = "cancelled" ]; then
echo "n8n workflow reported cancellation; skipping downstream processing."
echo "cancelled=true" >> "$GITHUB_OUTPUT"
exit 0
fi
echo "n8n workflow failed or token mismatch"
echo "error=true" >> "$GITHUB_OUTPUT"
exit 0
fi
if [ "$HTTP_STATUS" -lt 200 ] || [ "$HTTP_STATUS" -ge 300 ]; then
echo "n8n workflow failed (HTTP $HTTP_STATUS)"
echo "error=true" >> "$GITHUB_OUTPUT"
exit 0
fi
- name: Capture response payload
id: capture_payload
if: ${{ steps.filter_files.outputs.file_count != '0' && steps.diff_threshold.outputs.skip_send != 'true' && steps.send_payload.outputs.cancelled != 'true' && steps.send_payload.outputs.error != 'true' }}
run: |
set -euo pipefail
if [ ! -s response_body.json ]; then
echo "payload=" >> "$GITHUB_OUTPUT"
exit 0
fi
base64 -w0 response_body.json > response_body.b64
echo "payload=$(cat response_body.b64)" >> "$GITHUB_OUTPUT"
rm -f response_body.b64
- name: Enforce n8n send failure
if: ${{ steps.send_payload.outputs.error == 'true' }}
run: |
echo "n8n send step reported failure. Marking job as failed."
exit 1
# Step 11: Short-circuit when nothing qualifies
- name: No eligible files to send
if: ${{ steps.filter_files.outputs.file_count == '0' }}
run: echo "ℹ️ No eligible files after filtering llms/.ai paths. Skipping n8n send."
receive-validate-and-comment:
runs-on: ubuntu-latest
needs: gather-and-send
if: ${{ needs.gather-and-send.result == 'success' && needs.gather-and-send.outputs.filtered_file_count != '0' && needs.gather-and-send.outputs.should_send == 'true' }}
environment: n8n-receiving
steps:
# Step 13: Re-checkout repo (fresh workspace)
- name: Checkout repository
uses: actions/checkout@v4
# Step 14: Restore response payload from upstream output
- name: Restore response payload
run: |
set -euo pipefail
payload='${{ needs.gather-and-send.outputs.response_payload }}'
if [ -z "$payload" ]; then
echo "No response payload found; exiting."
exit 1
fi
printf '%s' "$payload" | base64 -d > response_body.json
# Step 15: Confirm handshake token
- name: Validate receiving token
env:
EXPECTED_TOKEN: ${{ secrets.N8N_RECEIVING_TOKEN }}
run: |
RECEIVED_TOKEN=$(jq -r 'if type=="array" then .[0].receiving_token else .receiving_token end // empty' response_body.json)
if [ -z "$RECEIVED_TOKEN" ]; then
echo "No receiving_token provided by n8n"
exit 1
fi
if [ "$RECEIVED_TOKEN" != "$EXPECTED_TOKEN" ]; then
echo "Receiving token mismatch"
exit 1
fi
echo "✅ Receiving token validated successfully"
# Step 16: Prepare grouped review batches
- name: Prepare batched review payloads
run: |
python3 - <<'PY'
import json
import pathlib
data = json.loads(pathlib.Path("response_body.json").read_text() or "null")
if isinstance(data, list):
data = data[0] if data else {}
payloads = data.get("prepared_comment_payloads") or []
def normalize(raw):
path = raw.get("path") or raw.get("file_path")
if not path:
return None
body = (raw.get("body") or "").strip()
if not body:
body = "```suggestion\n```\n"
comment = {"path": path, "body": body}
if "line" in raw and raw["line"] is not None:
comment["line"] = raw["line"]
if raw.get("side"):
comment["side"] = raw["side"]
if raw.get("start_line") is not None:
comment["start_line"] = raw["start_line"]
comment["start_side"] = raw.get("start_side", "RIGHT")
elif "position" in raw and raw["position"] is not None:
comment["position"] = raw["position"]
commit_id = raw.get("commit_id")
return comment, commit_id
normalized = [normalize(raw) for raw in payloads]
normalized = [item for item in normalized if item is not None]
seen = set()
deduped = []
for comment, cid in normalized:
key = (cid or "", json.dumps(comment, sort_keys=True))
if key in seen:
continue
seen.add(key)
deduped.append((comment, cid))
comments = [item[0] for item in deduped]
commit_ids = [item[1] for item in deduped]
chunk_size = 30
total = len(comments)
batches = []
for start in range(0, total, chunk_size):
chunk = comments[start:start + chunk_size]
summary = f"Automated style guide suggestions ({start + 1}-{start + len(chunk)} of {total})"
chunk_commit_ids = {cid for cid in commit_ids[start:start + chunk_size] if cid}
batch = {"body": summary, "comments": chunk}
if len(chunk_commit_ids) == 1:
batch["commit_id"] = chunk_commit_ids.pop()
batches.append(batch)
output = {
"status": data.get("status"),
"owner": data.get("repo_owner"),
"repo": data.get("repo_name"),
"pr": data.get("pr_number"),
"batches": batches
}
pathlib.Path("review_batches.json").write_text(json.dumps(output, indent=2))
PY
# Step 17: Post grouped inline suggestions
- name: Post batched inline suggestions
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euo pipefail
STATUS=$(jq -r '.status // empty' review_batches.json)
if [ "$STATUS" != "completed" ]; then
echo "n8n output not completed (status='$STATUS')"
jq . response_body.json || true
exit 1
fi
OWNER=$(jq -r '.owner // empty' review_batches.json)
REPO=$(jq -r '.repo // empty' review_batches.json)
PR=$(jq -r '.pr // empty' review_batches.json)
if [ -z "$OWNER" ] || [ -z "$REPO" ] || [ -z "$PR" ]; then
echo "Missing repo context."
jq . response_body.json || true
exit 1
fi
BATCH_COUNT=$(jq '.batches | length' review_batches.json)
if [ "$BATCH_COUNT" -eq 0 ]; then
echo "No suggestions returned; nothing to post."
exit 0
fi
echo "Posting $BATCH_COUNT suggestion batch(es) to $OWNER/$REPO#${PR}"
for index in $(jq -r '.batches | keys[]' review_batches.json); do
BODY_SUMMARY=$(jq -r ".batches[$index].body" review_batches.json)
COMMENTS=$(jq ".batches[$index].comments" review_batches.json)
COMMIT_ID=$(jq -r ".batches[$index].commit_id // empty" review_batches.json)
if [ -n "$COMMIT_ID" ] && [ "$COMMIT_ID" != "null" ]; then
PAYLOAD=$(jq -n --arg body "$BODY_SUMMARY" --argjson comments "$COMMENTS" --arg commit "$COMMIT_ID" '{event:"COMMENT", body:$body, comments:$comments, commit_id:$commit}')
else
PAYLOAD=$(jq -n --arg body "$BODY_SUMMARY" --argjson comments "$COMMENTS" '{event:"COMMENT", body:$body, comments:$comments}')
fi
RESP=$(curl -sS -w "%{http_code}" -X POST \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "Accept: application/vnd.github.v3+json" \
"https://api.github.com/repos/${OWNER}/${REPO}/pulls/${PR}/reviews" \
-d "$PAYLOAD")
CODE="${RESP:(-3)}"
BODY="${RESP%$CODE}"
if [[ "$CODE" -lt 200 || "$CODE" -ge 300 ]]; then
echo "❌ Failed to post review batch (HTTP $CODE)"
echo "$BODY"
exit 1
fi
URL=$(echo "$BODY" | jq -r '.html_url // empty')
echo "✅ Posted review batch ${index} ${URL:+→ $URL}"
done
echo "—— Skipped upstream (for visibility) ——"
jq -r 'if type=="array" then .[0] else . end | .skipped_comments // [] | .[] | "\(.file_path // "-")#L\(.line_number // "-"): \(.reason // "-")"' response_body.json
echo "✅ Done. Suggestions posted."
# Step 18: Publish any verification reviews
- name: Post verification comments
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euo pipefail
RESPONSE_BODY=$(cat response_body.json)
VERIFS=$(
echo "$RESPONSE_BODY" | jq -c '
(if type=="array" then .[0] else . end) as $o
| def to_arr(x):
if (x|type) == "array" then x
elif (x|type) == "object" then [x]
elif (x|type) == "string" then (try (x|fromjson) catch [])
else [] end;
[
(to_arr($o.reviews) // []) as $r1
| ($r1 | map(select(((.type? | tostring | ascii_downcase) == "verification")))
| map(.formattedReview // .review // .body // .comment // .text // .content // "")),
(to_arr($o.review) // []) as $r2
| ($r2 | map(select(((.type? | tostring | ascii_downcase) == "verification")))
| map(.formattedReview // .review // .body // .comment // .text // .content // "")),
($o | .. | objects
| select(((.type? | tostring | ascii_downcase) == "verification"))
| (.formattedReview // .review // .body // .comment // .text // .content // ""))
]
| flatten
| map(select(type == "string" and (.|length) > 0))
| unique
'
)
COUNT=$(echo "$VERIFS" | jq 'length')
if [ "$COUNT" -eq 0 ]; then
echo "No verification reviews found; skipping."
exit 0
fi
OWNER=$(echo "$RESPONSE_BODY" | jq -r 'if type=="array" then .[0].repo_owner else .repo_owner end // empty')
REPO=$(echo "$RESPONSE_BODY" | jq -r 'if type=="array" then .[0].repo_name else .repo_name end // empty')
PR=$(echo "$RESPONSE_BODY" | jq -r 'if type=="array" then .[0].pr_number else .pr_number end // empty')
if [ -z "$OWNER" ] || [ -z "$REPO" ] || [ -z "$PR" ]; then
echo "Missing repo context; skipping verification comments."
exit 0
fi
echo "Found $COUNT verification review(s). Preview:"
echo "$VERIFS" | jq -r 'to_entries[] | "\(.key): " + (.value | .[0:160] + (if length>160 then "…" else "" end))'
echo "$VERIFS" | jq -r '.[] + "\u0000"' | while IFS= read -r -d '' BODY; do
if [ -z "${BODY//[$'\t\r\n ']}" ]; then
echo "Skipping empty verification body"
continue
fi
RESP=$(curl -sS -w "%{http_code}" -X POST \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "Accept: application/vnd.github.v3+json" \
"https://api.github.com/repos/${OWNER}/${REPO}/issues/${PR}/comments" \
-d "$(jq -nc --arg b "$BODY" '{body:$b}')")
CODE="${RESP:(-3)}"
RBODY="${RESP%$CODE}"
if [[ "$CODE" -lt 200 || "$CODE" -ge 300 ]]; then
echo "❌ Failed to post verification comment (HTTP $CODE)"
echo "$RBODY"
exit 1
fi
url=$(echo "$RBODY" | jq -r '.html_url // empty')
echo "✅ Posted verification review ${url:+→ $url}"
done
echo "✅ Done posting verification reviews."
# Step 18b: Clean up response artifacts
- name: Remove response payload
if: ${{ always() }}
run: |
rm -f response_body.json review_batches.json || true
notify-on-failure:
runs-on: ubuntu-latest
needs:
- gather-and-send
- receive-validate-and-comment
if: ${{ always() && ((needs.gather-and-send.result == 'failure' && (needs.gather-and-send.outputs.send_executed != 'true' || needs.gather-and-send.outputs.send_error == 'true')) || needs.receive-validate-and-comment.result == 'failure') }}
env:
GATHER_RESULT: ${{ needs.gather-and-send.result }}
RECEIVE_RESULT: ${{ needs.receive-validate-and-comment.result }}
PR_NUMBER: ${{ needs.gather-and-send.outputs.pr_number }}
REPOSITORY: ${{ needs.gather-and-send.outputs.repository || github.repository }}
RUN_URL: ${{ needs.gather-and-send.outputs.run_url || format('{0}/{1}/actions/runs/{2}', github.server_url, github.repository, github.run_id) }}
WORKFLOW_NAME: ${{ github.workflow }}
RUN_ID: ${{ github.run_id }}
RUN_ATTEMPT: ${{ github.run_attempt }}
ERROR_WEBHOOK_URL: ${{ secrets.ERROR_WEBHOOK_URL }}
ACTOR: ${{ github.actor }}
SEND_EXECUTED: ${{ needs.gather-and-send.outputs.send_executed }}
SEND_ERROR: ${{ needs.gather-and-send.outputs.send_error }}
steps:
- name: Evaluate failure state
id: alert
run: |
set -euo pipefail
gather="${GATHER_RESULT:-unknown}"
receive="${RECEIVE_RESULT:-skipped}"
if [[ "$gather" == "failure" || "$receive" == "failure" ]]; then
echo "alert=true" >> "$GITHUB_OUTPUT"
echo "Failure detected (gather=$gather, receive=$receive)."
else
echo "alert=false" >> "$GITHUB_OUTPUT"
echo "No downstream alert required (gather=$gather, receive=$receive)."
fi
- name: Send failure webhook
if: ${{ steps.alert.outputs.alert == 'true' }}
run: |
set -euo pipefail
if [[ -z "${ERROR_WEBHOOK_URL:-}" ]]; then
echo "Webhook URL secret not configured; skipping alert dispatch."
exit 0
fi
gather="${GATHER_RESULT:-unknown}"
receive="${RECEIVE_RESULT:-skipped}"
pr="${PR_NUMBER:-unknown}"
repo="${REPOSITORY:-${{ github.repository }}}"
payload=$(jq -n \
--arg repo "$repo" \
--arg pr "$pr" \
--arg gather_status "$gather" \
--arg receive_status "$receive" \
--arg run_url "$RUN_URL" \
--arg workflow "$WORKFLOW_NAME" \
--arg run_id "$RUN_ID" \
--arg run_attempt "$RUN_ATTEMPT" \
--arg actor "$ACTOR" \
'{repository:$repo, pr_number:$pr, gather_status:$gather_status, receive_status:$receive_status, run_url:$run_url, workflow:$workflow, run_id:$run_id, run_attempt:$run_attempt, actor:$actor}' )
curl -sS -X POST \
-H "Content-Type: application/json" \
--data "$payload" \
"$ERROR_WEBHOOK_URL"