Skip to content

fix(providers): 🐛 exempt 503 retries from usage counting #348

fix(providers): 🐛 exempt 503 retries from usage counting

fix(providers): 🐛 exempt 503 retries from usage counting #348

Workflow file for this run

name: Build and Release Executable
# ╔═══════════════════════════════════════════════════════════════════════════════════════╗
# ║ CONFIGURATION SECTION ║
# ║ Edit the values below to customize build triggers, release contents, and behavior. ║
# ║ All user-configurable options are centralized in this section for easy maintenance. ║
# ╚═══════════════════════════════════════════════════════════════════════════════════════╝
env:
# ┌───────────────────────────────────────────────────────────────────────────────────────┐
# │ BRANCH CONFIGURATION │
# │ NOTE: BUILD_BRANCHES is for reference only. To change which branches trigger builds, │
# │ you must ALSO update the 'on.push.branches' section below. │
# └───────────────────────────────────────────────────────────────────────────────────────┘
# Branches that trigger automatic builds (for reference - actual triggers are in 'on:' block)
BUILD_BRANCHES: "dev,main"
# Branches marked as "stable" releases (non-prerelease, marked as "latest")
# Branches NOT in this list will be marked as experimental/prerelease
STABLE_BRANCHES: "main"
# Parent branches to search for tags when current branch has none (space-separated, priority order)
# Used for first build on a new branch to find a comparison point
FALLBACK_BRANCHES: "main master develop dev"
# ┌───────────────────────────────────────────────────────────────────────────────────────┐
# │ RELEASE FILES - Files to include in release archives │
# │ Paths are relative to repository root. Space-separated. │
# └───────────────────────────────────────────────────────────────────────────────────────┘
# Files included in ALL platform archives (Windows, Linux, macOS)
RELEASE_FILES_ALL: ".env.example README.md DOCUMENTATION.md LICENSE"
# Platform-specific files (in addition to RELEASE_FILES_ALL)
RELEASE_FILES_WINDOWS: ""
RELEASE_FILES_LINUX: ""
RELEASE_FILES_MACOS: ""
# ┌───────────────────────────────────────────────────────────────────────────────────────┐
# │ FILE DESCRIPTIONS - Shown in release notes under "Included Files" │
# │ Format: One file per line as "filename|description" │
# │ Use **text** for bold in descriptions (markdown supported) │
# └───────────────────────────────────────────────────────────────────────────────────────┘
FILE_DESCRIPTIONS: |
proxy_app.exe|Main application executable with built-in TUI launcher for **Windows**.
proxy_app|Main application executable with built-in TUI launcher for **Linux** and **macOS**.
.env.example|Example configuration file. Copy to .env and add your API keys.
README.md|Project overview and quick start guide.
DOCUMENTATION.md|Detailed configuration and usage documentation.
LICENSE|License.
# ┌───────────────────────────────────────────────────────────────────────────────────────┐
# │ EXPERIMENTAL BUILD WARNING │
# │ Shown at the top of release notes for branches NOT in STABLE_BRANCHES │
# │ Placeholders: {BRANCH} = branch name, {VERSION} = build version, {REPO} = owner/repo │
# └───────────────────────────────────────────────────────────────────────────────────────┘
EXPERIMENTAL_WARNING: |
> [!WARNING]
> | ⚠️ **EXPERIMENTAL BUILD** ⚠️ |
> |:---------------------------:|
> This release is from the [`{BRANCH}`](https://github.com/{REPO}/tree/{BRANCH}) branch and is **highly unstable**. It contains features that are under active development, may be feature-incomplete, contain bugs, or have features that will be removed in the future.
>
> **Do not use in production environments.**
>
> ---
>
> **Found an issue?** Please [report it here](https://github.com/{REPO}/issues/new/choose) and include the build version (`{VERSION}`) in your report.
# ┌───────────────────────────────────────────────────────────────────────────────────────┐
# │ RELEASE PRUNING CONFIGURATION │
# │ Automatically clean up old releases to save space │
# └───────────────────────────────────────────────────────────────────────────────────────┘
PRUNE_ENABLED: "false"
PRUNE_PROTECTED_BRANCHES: "main,master,production,prod,staging,develop"
PRUNE_RETENTION_DAYS: "1"
PRUNE_KEEP_DAILY_SNAPSHOT: "true"
PRUNE_MAX_COUNT: "10"
# ════════════════════════════════════════════════════════════════════════════════════════════
# WORKFLOW TRIGGERS
# ════════════════════════════════════════════════════════════════════════════════════════════
on:
workflow_dispatch:
inputs:
manual_previous_tag:
description: 'Optional: Manually set the previous tag to generate the changelog from.'
required: false
default: ''
dry_run:
description: 'Dry run mode for pruning (preview without deleting)'
required: false
type: boolean
default: false
push:
# ┌─────────────────────────────────────────────────────────────────────────────────────┐
# │ BRANCHES: Update this list to match BUILD_BRANCHES in the env section above │
# └─────────────────────────────────────────────────────────────────────────────────────┘
branches:
- dev
- main
# ┌─────────────────────────────────────────────────────────────────────────────────────┐
# │ PATHS: Files/directories that trigger a build when changed │
# │ The build will only run if changes are made to files matching these patterns │
# └─────────────────────────────────────────────────────────────────────────────────────┘
paths:
- 'src/proxy_app/**' # Main application source code
- 'src/rotator_library/**' # Key rotation library
- '.github/workflows/build.yml' # This workflow file
- 'cliff.toml' # Changelog configuration
# ════════════════════════════════════════════════════════════════════════════════════════════
# JOBS
# ════════════════════════════════════════════════════════════════════════════════════════════
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, ubuntu-latest, macos-latest]
steps:
- name: Check out repository
uses: actions/checkout@v5
- name: Set up uv
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: "requirements.txt"
- name: Set up Python with uv
shell: bash
run: |
uv python install 3.12
uv venv
- name: Install dependencies
shell: bash
run: |
grep -v -- '-e src/rotator_library' requirements.txt > temp_requirements.txt
uv pip install --python .venv -r temp_requirements.txt
uv pip install --python .venv pyinstaller
uv pip install --python .venv -e src/rotator_library
- name: Get PyInstaller cache directory
id: pyinstaller-cache-dir
shell: bash
run: |
if [ "${{ runner.os }}" == "Windows" ]; then
echo "path=$USERPROFILE/AppData/Local/pyinstaller" >> $GITHUB_OUTPUT
elif [ "${{ runner.os }}" == "Linux" ]; then
echo "path=$HOME/.cache/pyinstaller" >> $GITHUB_OUTPUT
elif [ "${{ runner.os }}" == "macOS" ]; then
echo "path=$HOME/Library/Application Support/pyinstaller" >> $GITHUB_OUTPUT
fi
- name: Cache PyInstaller build data
uses: actions/cache@v4
with:
path: ${{ steps.pyinstaller-cache-dir.outputs.path }}
key: ${{ runner.os }}-pyinstaller-3.12-${{ hashFiles('requirements.txt') }}
restore-keys: |
${{ runner.os }}-pyinstaller-3.12-
- name: Build executable
shell: bash
run: |
if [ "${{ runner.os }}" == "Windows" ]; then
.venv/Scripts/python src/proxy_app/build.py
else
.venv/bin/python src/proxy_app/build.py
fi
- name: Ensure PyInstaller cache directory exists
shell: pwsh
run: New-Item -ItemType Directory -Force -Path "${{ steps.pyinstaller-cache-dir.outputs.path }}"
- name: Get short SHA
id: version
shell: bash
run: |
sha=$(git rev-parse --short HEAD)
echo "sha=$sha" >> $GITHUB_OUTPUT
- name: Prepare files for artifact
shell: bash
run: |
stagingDir="staging"
mkdir -p $stagingDir
if [ "${{ runner.os }}" == "Windows" ]; then
cp src/proxy_app/dist/proxy_app.exe "$stagingDir/"
else
cp src/proxy_app/dist/proxy_app "$stagingDir/"
fi
echo "--- Staging directory contents ---"
ls -R $stagingDir
echo "------------------------------------"
- name: Archive build artifact
uses: actions/upload-artifact@v4
with:
name: proxy-app-build-${{ runner.os }}-${{ steps.version.outputs.sha }}
path: staging/
release:
needs: build
runs-on: ubuntu-latest
permissions:
contents: write
# Note: STABLE_BRANCHES is defined in the top-level env section
steps:
- name: Check out repository
uses: actions/checkout@v5
with:
fetch-depth: 0
- name: Fetch all tags and history
shell: bash
run: git fetch --prune --tags
- name: Get short SHA
id: get_sha
shell: bash
run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Generate Build Version
id: version
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH_NAME=${{ github.ref_name }}
DATE_STAMP_NEW=$(date +'%Y%m%d')
DATE_STAMP_OLD=$(date +'%Y.%m.%d')
# Find the number of releases already created today for this branch, matching either old or new format.
# We use grep -E for an OR condition and wrap it to prevent failures when no matches are found.
BUILD_COUNT=$(gh release list --repo "${{ github.repository }}" --limit 100 | { grep -E "$BRANCH_NAME/build-($DATE_STAMP_NEW|$DATE_STAMP_OLD)" || true; } | wc -l)
# Increment the build number for the new release
BUILD_NUMBER=$((BUILD_COUNT + 1))
# Create the new, sortable version string using the new format
VERSION="$DATE_STAMP_NEW-$BUILD_NUMBER-${{ steps.get_sha.outputs.sha }}"
# Define all naming components
echo "release_title=Build ($BRANCH_NAME): $VERSION" >> $GITHUB_OUTPUT
echo "release_tag=$BRANCH_NAME/build-$VERSION" >> $GITHUB_OUTPUT
echo "archive_version_part=$BRANCH_NAME-$VERSION" >> $GITHUB_OUTPUT
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "timestamp=$(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_OUTPUT
- name: Download build artifacts
uses: actions/download-artifact@v4
with:
path: release-assets
pattern: proxy-app-build-*-${{ steps.get_sha.outputs.sha }}
- name: Archive release files
id: archive
shell: bash
run: |
ASSET_PATHS=""
for dir in release-assets/proxy-app-build-*; do
if [ -d "$dir" ]; then
os_name=$(basename "$dir" | cut -d'-' -f4)
echo "📦 Processing $os_name archive..."
# ═══════════════════════════════════════════════════════════
# Copy common files (from RELEASE_FILES_ALL config)
# ═══════════════════════════════════════════════════════════
for file in ${{ env.RELEASE_FILES_ALL }}; do
if [ -f "$file" ]; then
cp "$file" "$dir/"
echo " ✓ Added: $file"
else
echo " ⚠ Not found: $file"
fi
done
# ═══════════════════════════════════════════════════════════
# Copy platform-specific files
# ═══════════════════════════════════════════════════════════
case "$os_name" in
Windows) EXTRA_FILES="${{ env.RELEASE_FILES_WINDOWS }}" ;;
Linux) EXTRA_FILES="${{ env.RELEASE_FILES_LINUX }}" ;;
macOS) EXTRA_FILES="${{ env.RELEASE_FILES_MACOS }}" ;;
*) EXTRA_FILES="" ;;
esac
if [ -n "$EXTRA_FILES" ]; then
for file in $EXTRA_FILES; do
if [ -f "$file" ]; then
cp "$file" "$dir/"
echo " ✓ Added ($os_name only): $file"
else
echo " ⚠ Not found: $file"
fi
done
fi
# Create the archive
archive_name="LLM-API-Key-Proxy-${os_name}-${{ steps.version.outputs.archive_version_part }}.zip"
(
cd "$dir"
zip -r "../../$archive_name" .
)
echo " ✅ Created: $archive_name"
if [ -z "$ASSET_PATHS" ]; then
ASSET_PATHS="$archive_name"
else
ASSET_PATHS="$ASSET_PATHS $archive_name"
fi
fi
done
echo "ASSET_PATHS=$ASSET_PATHS" >> $GITHUB_OUTPUT
- name: Install git-cliff
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
API_RESPONSE=$(curl -s -H "Authorization: token $GITHUB_TOKEN" https://api.github.com/repos/orhun/git-cliff/releases/latest)
LATEST_CLIFF_URL=$(echo "$API_RESPONSE" | jq -r '.assets[] | select(.name | endswith("x86_64-unknown-linux-gnu.tar.gz")) | .browser_download_url')
if [ -z "$LATEST_CLIFF_URL" ]; then
echo "::error::Could not find git-cliff asset URL."
echo "API Response: $API_RESPONSE"
exit 1
fi
curl -L "$LATEST_CLIFF_URL" | tar xz
sudo mv git-cliff-*/git-cliff /usr/local/bin/
- name: Prepare git-cliff config
shell: bash
run: |
# Inject the GitHub repo URL into your template
sed -i "s|{{ repository_url }}|https://github.com/${GITHUB_REPOSITORY}|g" .github/cliff.toml
echo "✅ cliff.toml:"
head -20 .github/cliff.toml
- name: Generate Changelog
id: changelog
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH_NAME=${{ github.ref_name }}
if [ -n "${{ github.event.inputs.manual_previous_tag }}" ]; then
echo "Manual tag provided: ${{ github.event.inputs.manual_previous_tag }}"
LAST_TAG="${{ github.event.inputs.manual_previous_tag }}"
else
echo "No manual tag, searching for latest tag on branch '$BRANCH_NAME'..."
# Prioritize finding the latest tag with the new format (e.g., build-20250707-1-...).
echo "Attempting to find latest tag with new format..."
LAST_TAG=$(git describe --tags --abbrev=0 --match="$BRANCH_NAME/build-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-*" 2>/dev/null || true)
# If no new format tag is found, fall back to the old, more generic pattern.
if [ -z "$LAST_TAG" ]; then
echo "No new format tag found. Falling back to search for any older build tag..."
LAST_TAG=$(git describe --tags --abbrev=0 --match="$BRANCH_NAME/build-*" 2>/dev/null || echo "")
fi
# ═══════════════════════════════════════════════════════════
# PARENT BRANCH FALLBACK: Find closest parent branch's tag
# ═══════════════════════════════════════════════════════════
if [ -z "$LAST_TAG" ]; then
echo ""
echo "⚠️ No tag found for '$BRANCH_NAME', searching parent branches..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
# Use FALLBACK_BRANCHES from config
FALLBACK_BRANCHES="${{ env.FALLBACK_BRANCHES }}"
BEST_TAG=""
BEST_DISTANCE=999999
BEST_PARENT=""
for PARENT in $FALLBACK_BRANCHES; do
# Skip if same as current branch
[ "$PARENT" == "$BRANCH_NAME" ] && continue
# Check if branch exists (remote first, then local)
if git rev-parse --verify "origin/$PARENT" >/dev/null 2>&1; then
BRANCH_REF="origin/$PARENT"
elif git rev-parse --verify "$PARENT" >/dev/null 2>&1; then
BRANCH_REF="$PARENT"
else
echo " $PARENT: doesn't exist, skipping"
continue
fi
# Find merge-base (common ancestor)
MERGE_BASE=$(git merge-base HEAD "$BRANCH_REF" 2>/dev/null || true)
if [ -z "$MERGE_BASE" ]; then
echo " $PARENT: no common ancestor, skipping"
continue
fi
# Count commits from merge-base to HEAD (distance = how far we've diverged)
DISTANCE=$(git rev-list --count "$MERGE_BASE"..HEAD 2>/dev/null || echo "999999")
# Find tag at or before merge-base
PARENT_TAG=$(git describe --tags --abbrev=0 --match="$PARENT/build-*" "$MERGE_BASE" 2>/dev/null || true)
if [ -n "$PARENT_TAG" ]; then
echo " $PARENT: found $PARENT_TAG (distance: $DISTANCE commits)"
if [ "$DISTANCE" -lt "$BEST_DISTANCE" ]; then
BEST_DISTANCE=$DISTANCE
BEST_TAG=$PARENT_TAG
BEST_PARENT=$PARENT
fi
else
echo " $PARENT: no build tag found at merge-base"
fi
done
if [ -n "$BEST_TAG" ]; then
LAST_TAG="$BEST_TAG"
echo ""
echo "✅ Using parent tag: $LAST_TAG (from '$BEST_PARENT', $BEST_DISTANCE commits ago)"
fi
fi
# ═══════════════════════════════════════════════════════════
# ULTIMATE FALLBACK: Any ancestor tag
# ═══════════════════════════════════════════════════════════
if [ -z "$LAST_TAG" ]; then
echo ""
echo "🔍 No parent branch tag found, trying any ancestor tag..."
LAST_TAG=$(git describe --tags --abbrev=0 --match="*/build-*" HEAD 2>/dev/null || true)
if [ -n "$LAST_TAG" ]; then
echo "✅ Found ancestor tag: $LAST_TAG"
fi
fi
fi
echo "✅ Using tag: $LAST_TAG"
if [ -n "$LAST_TAG" ]; then
# Standard run: A previous tag was found.
echo "🔍 Generating changelog for range: $LAST_TAG..HEAD"
git-cliff \
--config .github/cliff.toml \
--github-repo "${{ github.repository }}" \
--strip all \
--output changelog.md \
"$LAST_TAG..HEAD"
else
# First run: No previous tag found.
echo "⚠️ No previous build tag found. Generating initial release changelog."
echo "## Initial Release" > changelog.md
echo "" >> changelog.md
echo "This is the first automated build release using this format. Future releases will contain a detailed list of changes." >> changelog.md
fi
# This part of the script remains to handle the output
if [ -s changelog.md ]; then
echo "✅ Changelog generated successfully"
CHANGELOG_B64=$(base64 -w 0 changelog.md)
echo "changelog_b64=$CHANGELOG_B64" >> $GITHUB_OUTPUT
echo "has_changelog=true" >> $GITHUB_OUTPUT
echo "previous_tag=$LAST_TAG" >> $GITHUB_OUTPUT
else
# This is now a true error condition
echo "❌ Critical error: Changelog is empty after generation."
echo "has_changelog=false" >> $GITHUB_OUTPUT
fi
- name: Resolve GitHub Usernames in Changelog
id: resolve_usernames
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🔍 Resolving GitHub Usernames in Changelog"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
# Exit early if no changelog
if [ ! -s changelog.md ]; then
echo "ℹ️ No changelog to process, skipping username resolution"
exit 0
fi
PREV_TAG="${{ steps.changelog.outputs.previous_tag }}"
CURRENT_SHA="${{ github.sha }}"
echo "📋 Configuration:"
echo " Previous tag: ${PREV_TAG:-'(none)'}"
echo " Current SHA: $CURRENT_SHA"
echo ""
# Count placeholders before processing
TOTAL_PLACEHOLDERS=$(grep -oE '\[\[[a-f0-9]{40}\|[^]]*\]\]' changelog.md | wc -l || echo "0")
echo "📊 Found $TOTAL_PLACEHOLDERS author placeholders to resolve"
echo ""
if [ "$TOTAL_PLACEHOLDERS" -eq 0 ]; then
echo "ℹ️ No placeholders found, skipping"
exit 0
fi
# ═══════════════════════════════════════════════════════════
# LAYER B-1: Compare API (single call, up to 250 commits)
# ═══════════════════════════════════════════════════════════
LAYER_B1_RESOLVED=0
if [ -n "$PREV_TAG" ]; then
echo "🔍 Layer B-1: Fetching authors via Compare API..."
echo " Endpoint: repos/${{ github.repository }}/compare/$PREV_TAG...$CURRENT_SHA"
AUTHOR_MAP=$(gh api "repos/${{ github.repository }}/compare/$PREV_TAG...$CURRENT_SHA" \
--jq '[.commits[] | {sha: .sha, username: .author.login}] | map(select(.username != null))' 2>/dev/null || echo "[]")
COMPARE_COUNT=$(echo "$AUTHOR_MAP" | jq 'length')
echo " Retrieved $COMPARE_COUNT commits with linked GitHub accounts"
# Apply replacements from Compare API
while read -r entry; do
if [ -n "$entry" ] && [ "$entry" != "null" ]; then
SHA=$(echo "$entry" | jq -r '.sha')
USERNAME=$(echo "$entry" | jq -r '.username')
if [ -n "$SHA" ] && [ -n "$USERNAME" ] && [ "$USERNAME" != "null" ]; then
# Replace [[SHA|...|...]] with @username
if grep -q "\[\[$SHA" changelog.md 2>/dev/null; then
sed -i "s|\[\[$SHA[^]]*\]\]|@$USERNAME|g" changelog.md
LAYER_B1_RESOLVED=$((LAYER_B1_RESOLVED + 1))
fi
fi
fi
done < <(echo "$AUTHOR_MAP" | jq -c '.[]' 2>/dev/null || true)
echo " ✅ Layer B-1 resolved: $LAYER_B1_RESOLVED authors"
else
echo "⚠️ Layer B-1: Skipped (no previous tag available)"
fi
echo ""
# ═══════════════════════════════════════════════════════════
# LAYER B-2: Per-commit API (for remaining unresolved)
# ═══════════════════════════════════════════════════════════
REMAINING_B2=$(grep -oE '\[\[[a-f0-9]{40}\|[^]]*\]\]' changelog.md 2>/dev/null || true)
REMAINING_B2_COUNT=$(echo "$REMAINING_B2" | grep -c '\[\[' 2>/dev/null || echo "0")
LAYER_B2_RESOLVED=0
if [ "$REMAINING_B2_COUNT" -gt 0 ] && [ -n "$REMAINING_B2" ]; then
echo "🔍 Layer B-2: Resolving $REMAINING_B2_COUNT remaining via per-commit API..."
# Get unique SHAs only
UNIQUE_SHAS=$(echo "$REMAINING_B2" | grep -oE '\[\[[a-f0-9]{40}' | sed 's/\[\[//' | sort -u)
while read -r SHA; do
if [ -n "$SHA" ]; then
USERNAME=$(gh api "repos/${{ github.repository }}/commits/$SHA" \
--jq '.author.login // empty' 2>/dev/null || true)
if [ -n "$USERNAME" ]; then
sed -i "s|\[\[$SHA[^]]*\]\]|@$USERNAME|g" changelog.md
LAYER_B2_RESOLVED=$((LAYER_B2_RESOLVED + 1))
echo " ✅ $SHA -> @$USERNAME"
else
echo " ⚠️ $SHA -> (no GitHub account linked)"
fi
fi
done <<< "$UNIQUE_SHAS"
echo " ✅ Layer B-2 resolved: $LAYER_B2_RESOLVED authors"
else
echo "ℹ️ Layer B-2: Skipped (no remaining placeholders)"
fi
echo ""
# ═══════════════════════════════════════════════════════════
# LAYER D: Noreply email extraction (no API needed)
# ═══════════════════════════════════════════════════════════
REMAINING_D=$(grep -oE '\[\[[a-f0-9]{40}\|[^]]*\]\]' changelog.md 2>/dev/null || true)
REMAINING_D_COUNT=$(echo "$REMAINING_D" | grep -c '\[\[' 2>/dev/null || echo "0")
LAYER_D_RESOLVED=0
if [ "$REMAINING_D_COUNT" -gt 0 ] && [ -n "$REMAINING_D" ]; then
echo "🔍 Layer D: Extracting usernames from noreply emails..."
echo "$REMAINING_D" | while read -r placeholder; do
if [ -n "$placeholder" ]; then
# Extract SHA, email, and name from [[sha|email|name]]
SHA=$(echo "$placeholder" | sed 's/\[\[\([^|]*\)|.*/\1/')
EMAIL=$(echo "$placeholder" | sed 's/\[\[[^|]*|\([^|]*\)|.*/\1/')
NAME=$(echo "$placeholder" | sed 's/\[\[[^|]*|[^|]*|\([^]]*\)\]\]/\1/')
USERNAME=""
# Pattern 1: [email protected]
if [[ "$EMAIL" =~ ^[0-9]+\+([^@]+)@users\.noreply\.github\.com$ ]]; then
USERNAME="${BASH_REMATCH[1]}"
# Pattern 2: [email protected] (no ID)
elif [[ "$EMAIL" =~ ^([^@+\[]+)@users\.noreply\.github\.com$ ]]; then
USERNAME="${BASH_REMATCH[1]}"
fi
if [ -n "$USERNAME" ]; then
sed -i "s|\[\[$SHA[^]]*\]\]|@$USERNAME|g" changelog.md
echo " ✅ Extracted @$USERNAME from $EMAIL"
fi
fi
done
# Recount after Layer D
AFTER_D=$(grep -oE '\[\[[a-f0-9]{40}\|[^]]*\]\]' changelog.md 2>/dev/null | wc -l || echo "0")
LAYER_D_RESOLVED=$((REMAINING_D_COUNT - AFTER_D))
echo " ✅ Layer D resolved: $LAYER_D_RESOLVED authors"
else
echo "ℹ️ Layer D: Skipped (no remaining placeholders)"
fi
echo ""
# ═══════════════════════════════════════════════════════════
# LAYER FINAL: Git author name fallback (no @)
# ═══════════════════════════════════════════════════════════
REMAINING_FINAL=$(grep -oE '\[\[[a-f0-9]{40}\|[^]]*\]\]' changelog.md 2>/dev/null || true)
REMAINING_FINAL_COUNT=$(echo "$REMAINING_FINAL" | grep -c '\[\[' 2>/dev/null || echo "0")
LAYER_FINAL_COUNT=0
if [ "$REMAINING_FINAL_COUNT" -gt 0 ] && [ -n "$REMAINING_FINAL" ]; then
echo "⚠️ Final fallback: Using git author names for $REMAINING_FINAL_COUNT unresolved..."
echo "$REMAINING_FINAL" | while read -r placeholder; do
if [ -n "$placeholder" ]; then
# Extract name from [[sha|email|name]]
SHA=$(echo "$placeholder" | sed 's/\[\[\([^|]*\)|.*/\1/')
NAME=$(echo "$placeholder" | sed 's/\[\[[^|]*|[^|]*|\([^]]*\)\]\]/\1/')
sed -i "s|\[\[$SHA[^]]*\]\]|$NAME|g" changelog.md
echo " ⚠️ Using fallback: $NAME"
fi
done
LAYER_FINAL_COUNT=$REMAINING_FINAL_COUNT
else
echo "ℹ️ Final fallback: Not needed (all resolved)"
fi
echo ""
# ═══════════════════════════════════════════════════════════
# LAYER PR: Generate Community Contributions section
# ═══════════════════════════════════════════════════════════
PR_COUNT=0
if [ -n "$PREV_TAG" ]; then
echo "🔍 Layer PR: Generating Community Contributions section..."
# Get all merge commits in the range
MERGE_COMMITS=$(git log "$PREV_TAG".."$CURRENT_SHA" --oneline --grep="Merge pull request" 2>/dev/null || true)
if [ -n "$MERGE_COMMITS" ]; then
PR_SECTION=""
while IFS= read -r commit_line; do
if [ -n "$commit_line" ]; then
# Extract PR number from "Merge pull request #XX from ..."
PR_NUM=$(echo "$commit_line" | grep -oE '#[0-9]+' | head -1 | tr -d '#')
if [ -n "$PR_NUM" ]; then
# Fetch PR info from GitHub API
PR_INFO=$(gh api "repos/${{ github.repository }}/pulls/$PR_NUM" \
--jq '{title: .title, author: .user.login}' 2>/dev/null || echo "{}")
PR_TITLE=$(echo "$PR_INFO" | jq -r '.title // empty')
PR_AUTHOR=$(echo "$PR_INFO" | jq -r '.author // empty')
if [ -n "$PR_TITLE" ] && [ -n "$PR_AUTHOR" ]; then
PR_URL="https://github.com/${{ github.repository }}/pull/$PR_NUM"
PR_SECTION="${PR_SECTION}- ${PR_TITLE} ([#${PR_NUM}](${PR_URL})) by @${PR_AUTHOR}"$'\n'
PR_COUNT=$((PR_COUNT + 1))
echo " ✅ PR #$PR_NUM: $PR_TITLE by @$PR_AUTHOR"
else
echo " ⚠️ PR #$PR_NUM: Could not fetch info"
fi
fi
fi
done <<< "$MERGE_COMMITS"
if [ "$PR_COUNT" -gt 0 ]; then
# Append PR section to changelog
{
echo ""
echo "### 💜 Community Contributions"
echo ""
echo "Thank you to our community contributors!"
echo ""
echo "$PR_SECTION"
} >> changelog.md
echo " ✅ Added $PR_COUNT PRs to Community Contributions section"
fi
else
echo " ℹ️ No merge commits found in range"
fi
else
echo "⚠️ Layer PR: Skipped (no previous tag available)"
fi
echo ""
# ═══════════════════════════════════════════════════════════
# Summary
# ═══════════════════════════════════════════════════════════
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "📊 Resolution Summary"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo " Total placeholders: $TOTAL_PLACEHOLDERS"
echo " Layer B-1 (Compare): $LAYER_B1_RESOLVED"
echo " Layer B-2 (Per-commit): $LAYER_B2_RESOLVED"
echo " Layer D (Noreply): $LAYER_D_RESOLVED"
echo " Fallback (name only): $LAYER_FINAL_COUNT"
echo " Community PRs: $PR_COUNT"
echo ""
# Verify no placeholders remain
REMAINING_CHECK=$(grep -c '\[\[[a-f0-9]\{40\}|' changelog.md 2>/dev/null || echo "0")
if [ "$REMAINING_CHECK" -eq 0 ]; then
echo "✅ All author placeholders resolved successfully!"
else
echo "⚠️ Warning: $REMAINING_CHECK placeholders may still remain"
fi
# Re-encode the changelog for the next step
if [ -s changelog.md ]; then
CHANGELOG_B64=$(base64 -w 0 changelog.md)
echo "changelog_b64=$CHANGELOG_B64" >> $GITHUB_OUTPUT
fi
- name: Debug artifact contents
shell: bash
run: |
echo "🔍 Debugging artifact contents..."
echo "Current directory:"
pwd
echo ""
echo "Release assets directory contents:"
ls -laR release-assets/ || echo "release-assets directory not found"
echo ""
echo "All files in current directory:"
find . -name "*.zip" | head -20
echo ""
echo "Directory structure:"
find release-assets -type f 2>/dev/null || echo "No files found in release-assets"
- name: Generate Build Metadata
id: metadata
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Find executable files and get their sizes
WINDOWS_EXE=$(find release-assets -name "proxy_app.exe" -type f | head -1)
if [ -n "$WINDOWS_EXE" ]; then
WIN_SIZE=$(du -sh "$WINDOWS_EXE" | cut -f1)
else
WIN_SIZE="Unknown"
fi
echo "win_build_size=$WIN_SIZE" >> $GITHUB_OUTPUT
LINUX_EXE=$(find release-assets -path "*/proxy-app-build-Linux-*/proxy_app" -type f | head -1)
if [ -n "$LINUX_EXE" ]; then
LINUX_SIZE=$(du -sh "$LINUX_EXE" | cut -f1)
else
LINUX_SIZE="Unknown"
fi
echo "linux_build_size=$LINUX_SIZE" >> $GITHUB_OUTPUT
MACOS_EXE=$(find release-assets -path "*/proxy-app-build-macOS-*/proxy_app" -type f | head -1)
if [ -n "$MACOS_EXE" ]; then
MACOS_SIZE=$(du -sh "$MACOS_EXE" | cut -f1)
else
MACOS_SIZE="Unknown"
fi
echo "macos_build_size=$MACOS_SIZE" >> $GITHUB_OUTPUT
COMMIT_COUNT=$(git rev-list --count HEAD)
# Generate rich contributor list
if [ -n "${{ steps.changelog.outputs.previous_tag }}" ]; then
echo "✅ Found previous tag, getting contributors since ${{ steps.changelog.outputs.previous_tag }}"
CONTRIBUTOR_LOG=$(git log ${{ steps.changelog.outputs.previous_tag }}..HEAD --format='%ae' | sort -u)
else
echo "⚠️ No previous tag found, getting author of the last commit."
CONTRIBUTOR_LOG=$(git log -1 --format='%ae')
fi
CONTRIBUTORS_LIST=""
while read -r email; do
# Find user by email
USER_INFO=$(gh api "search/users?q=$email+in:email" --jq '.items[0]')
if [ -n "$USER_INFO" ]; then
USERNAME=$(echo "$USER_INFO" | jq -r '.login')
AVATAR_URL=$(echo "$USER_INFO" | jq -r '.avatar_url')
CONTRIBUTORS_LIST="$CONTRIBUTORS_LIST [![$USERNAME](https://images.weserv.nl/?url=$AVATAR_URL&w=32&h=32&fit=cover&mask=circle)](https://github.com/$USERNAME) "
fi
done <<< "$CONTRIBUTOR_LOG"
echo "commit_count=$COMMIT_COUNT" >> $GITHUB_OUTPUT
echo "contributors_list=$CONTRIBUTORS_LIST" >> $GITHUB_OUTPUT
echo "📊 Build metadata:"
echo " - Size (Windows): $WIN_SIZE"
echo " - Size (Linux): $LINUX_SIZE"
echo " - Size (macOS): $MACOS_SIZE"
echo " - Commits: $COMMIT_COUNT"
echo " - Contributors: $CONTRIBUTORS_LIST"
- name: Create Release
shell: bash
run: |
# Prepare changelog content - prefer resolved version if available
if [ -n "${{ steps.resolve_usernames.outputs.changelog_b64 }}" ]; then
echo "${{ steps.resolve_usernames.outputs.changelog_b64 }}" | base64 -d > decoded_changelog.md
CHANGELOG_CONTENT=$(cat decoded_changelog.md)
elif [ "${{ steps.changelog.outputs.has_changelog }}" == "true" ]; then
echo "${{ steps.changelog.outputs.changelog_b64 }}" | base64 -d > decoded_changelog.md
CHANGELOG_CONTENT=$(cat decoded_changelog.md)
else
CHANGELOG_CONTENT="No significant changes detected in this release."
fi
# Prepare the full release notes in a temporary file
if [ -n "${{ steps.changelog.outputs.previous_tag }}" ]; then
CHANGELOG_URL="**Full Changelog**: https://github.com/${{ github.repository }}/compare/${{ steps.changelog.outputs.previous_tag }}...${{ steps.version.outputs.release_tag }}"
else
CHANGELOG_URL=""
fi
# Generate file descriptions table from FILE_DESCRIPTIONS config
FILE_TABLE="| File | Description |
|------|-------------|"
while IFS='|' read -r filename description; do
# Skip empty lines
if [ -n "$filename" ] && [ -n "$description" ]; then
FILE_TABLE="$FILE_TABLE
| \`$filename\` | $description |"
fi
done <<< "${{ env.FILE_DESCRIPTIONS }}"
# List archives
WINDOWS_ARCHIVE=$(echo "${{ steps.archive.outputs.ASSET_PATHS }}" | tr ' ' '\n' | grep 'Windows')
LINUX_ARCHIVE=$(echo "${{ steps.archive.outputs.ASSET_PATHS }}" | tr ' ' '\n' | grep 'Linux')
MACOS_ARCHIVE=$(echo "${{ steps.archive.outputs.ASSET_PATHS }}" | tr ' ' '\n' | grep 'macOS')
ARCHIVE_LIST="- **Windows**: \`$WINDOWS_ARCHIVE\`
- **Linux**: \`$LINUX_ARCHIVE\`
- **macOS**: \`$MACOS_ARCHIVE\`"
cat > releasenotes.md <<-EOF
## Build Information
| Field | Value |
|-------|-------|
| 📦 **Version** | \`${{ steps.version.outputs.version }}\` |
| 💾 **Binary Size** | Win: \`${{ steps.metadata.outputs.win_build_size }}\`, Linux: \`${{ steps.metadata.outputs.linux_build_size }}\`, macOS: \`${{ steps.metadata.outputs.macos_build_size }}\` |
| 🔗 **Commit** | [\`${{ steps.get_sha.outputs.sha }}\`](https://github.com/${{ github.repository }}/commit/${{ github.sha }}) |
| 📅 **Build Date** | \`${{ steps.version.outputs.timestamp }}\` |
| ⚡ **Trigger** | \`${{ github.event_name }}\` |
## 📋 What's Changed
$CHANGELOG_CONTENT
### 📁 Included Files
Each OS-specific archive contains the following files:
$FILE_TABLE
### 📦 Archives
$ARCHIVE_LIST
## 🔗 Useful Links
- 📖 [Documentation](https://github.com/${{ github.repository }}/wiki)
- 🐛 [Report Issues](https://github.com/${{ github.repository }}/issues)
- 💬 [Discussions](https://github.com/${{ github.repository }}/discussions)
- 🌟 [Star this repo](https://github.com/${{ github.repository }}) if you find it useful!
---
> **Note**: This is an automated build release.
$CHANGELOG_URL
EOF
# Set release flags and notes based on the branch
CURRENT_BRANCH="${{ github.ref_name }}"
PRERELEASE_FLAG=""
LATEST_FLAG="--latest"
EXPERIMENTAL_NOTE=""
# Check if the current branch is in the stable branches list
if ! [[ ",${{ env.STABLE_BRANCHES }}," == *",$CURRENT_BRANCH,"* ]]; then
PRERELEASE_FLAG="--prerelease"
LATEST_FLAG="" # Do not mark non-stable branches as 'latest'
# Generate experimental warning from template with placeholder substitution
EXPERIMENTAL_NOTE=$(echo '${{ env.EXPERIMENTAL_WARNING }}' | \
sed "s|{BRANCH}|$CURRENT_BRANCH|g" | \
sed "s|{VERSION}|${{ steps.version.outputs.version }}|g" | \
sed "s|{REPO}|${{ github.repository }}|g")
fi
# Prepend the experimental note if it exists
if [ -n "$EXPERIMENTAL_NOTE" ]; then
echo "$EXPERIMENTAL_NOTE" > releasenotes_temp.md
echo "" >> releasenotes_temp.md
cat releasenotes.md >> releasenotes_temp.md
mv releasenotes_temp.md releasenotes.md
fi
# Create the release using the notes file
gh release create ${{ steps.version.outputs.release_tag }} \
--target ${{ github.sha }} \
--title "${{ steps.version.outputs.release_title }}" \
--notes-file releasenotes.md \
$LATEST_FLAG \
$PRERELEASE_FLAG \
${{ steps.archive.outputs.ASSET_PATHS }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Prune Old Releases
if: always() # Run even if release creation failed (optional, but safer to run only on success usually. Let's stick to default behavior which is success)
# Actually, if release creation failed, we probably don't want to prune.
# But wait, the user might want to prune even if the new release fails? No, usually we prune to make space for the new one or clean up after.
# Let's stick to running only on success of previous steps.
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Pruning config from top-level env section
PRUNE_ENABLED: ${{ env.PRUNE_ENABLED }}
PROTECTED_BRANCHES: ${{ env.PRUNE_PROTECTED_BRANCHES }}
RETENTION_DAYS_FULL: ${{ env.PRUNE_RETENTION_DAYS }}
RETENTION_KEEP_ONE_DAILY_OLDER: ${{ env.PRUNE_KEEP_DAILY_SNAPSHOT }}
RETENTION_MAX_COUNT: ${{ env.PRUNE_MAX_COUNT }}
DRY_RUN: ${{ github.event.inputs.dry_run }}
CURRENT_TAG: ${{ steps.version.outputs.release_tag }}
run: |
# 1. Check if enabled
if [ "$PRUNE_ENABLED" != "true" ]; then
echo "ℹ️ Pruning is disabled."
exit 0
fi
CURRENT_BRANCH="${{ github.ref_name }}"
# 2. Check Protected Branches
IFS=',' read -ra PROTECTED <<< "$PROTECTED_BRANCHES"
for branch in "${PROTECTED[@]}"; do
# Trim whitespace
branch=$(echo "$branch" | xargs)
if [ "$CURRENT_BRANCH" == "$branch" ]; then
echo "🛡️ Branch '$CURRENT_BRANCH' is protected. Skipping pruning."
exit 0
fi
done
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "✂️ Smart Release Pruning"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Configuration:"
echo " • Retention Window: $RETENTION_DAYS_FULL days (Full retention)"
echo " • Keep Daily Snapshot: $RETENTION_KEEP_ONE_DAILY_OLDER"
echo " • Max Total Releases: $RETENTION_MAX_COUNT"
echo " • Dry Run: $DRY_RUN"
echo ""
# Calculate Cutoff Date (YYYY-MM-DD)
# We want to keep releases from Today, Yesterday, ... up to RETENTION_DAYS_FULL days ago.
# So if RETENTION_DAYS_FULL is 2, we keep Today (0), 1 day ago, 2 days ago.
# Anything strictly OLDER than (Current - 2 days) is candidate for pruning.
CUTOFF_DATE=$(date -d "$RETENTION_DAYS_FULL days ago" +%Y-%m-%d)
echo "📅 Cutoff Date: $CUTOFF_DATE (Releases older than this are subject to daily thinning)"
echo ""
# Fetch releases
# We need tagName and createdAt.
# Filter by branch prefix to be safe, though we are on the branch.
# Note: gh release list lists releases for the repository. We need to filter by tag pattern.
# Tag pattern: $BRANCH_NAME/build-*
echo "🔍 Fetching releases for branch '$CURRENT_BRANCH'..."
# Get JSON data
RELEASES_JSON=$(gh release list --repo "${{ github.repository }}" --limit 1000 --json tagName,createdAt,isDraft,isPrerelease)
# Process in a loop to handle logic
# We will build a list of "TO_DELETE" and "KEPT"
# We need to sort releases by date descending (newest first) to handle the "Max Count" logic correctly.
# gh release list usually returns newest first, but let's be sure.
# We'll use jq to filter and sort, then process line by line
# Filter: tagName starts with "$CURRENT_BRANCH/"
FILTERED_RELEASES=$(echo "$RELEASES_JSON" | jq -c --arg branch "$CURRENT_BRANCH/" --arg current_tag "$CURRENT_TAG" '
map(select(.tagName | startswith($branch))) |
map(select(.tagName != $current_tag)) |
sort_by(.createdAt) | reverse
')
COUNT=$(echo "$FILTERED_RELEASES" | jq 'length')
echo "📦 Found $COUNT historical releases (excluding current build)."
if [ "$COUNT" -eq 0 ]; then
echo "✅ No old releases to prune."
exit 0
fi
# Arrays to track status
declare -a TO_DELETE
declare -a KEPT_RELEASES
# Associative array to track "seen days" for daily snapshot logic
declare -A SEEN_DAYS
# Iterate through releases (Newest to Oldest)
while read -r release; do
TAG=$(echo "$release" | jq -r '.tagName')
CREATED_AT=$(echo "$release" | jq -r '.createdAt')
# Convert ISO8601 to YYYY-MM-DD
RELEASE_DATE=$(date -d "$CREATED_AT" +%Y-%m-%d)
# Logic Check
KEEP=false
REASON=""
# Check 1: Is it within the Full Retention Window?
# We compare strings: If RELEASE_DATE >= CUTOFF_DATE
if [[ "$RELEASE_DATE" > "$CUTOFF_DATE" ]] || [[ "$RELEASE_DATE" == "$CUTOFF_DATE" ]]; then
KEEP=true
REASON="Within retention window ($RETENTION_DAYS_FULL days)"
else
# Check 2: Daily Snapshot
if [ "$RETENTION_KEEP_ONE_DAILY_OLDER" == "true" ]; then
if [ -z "${SEEN_DAYS[$RELEASE_DATE]}" ]; then
KEEP=true
REASON="Daily snapshot for $RELEASE_DATE"
SEEN_DAYS[$RELEASE_DATE]="seen"
else
KEEP=false
REASON="Redundant build for $RELEASE_DATE"
fi
else
KEEP=false
REASON="Older than window and snapshots disabled"
fi
fi
if [ "$KEEP" == "true" ]; then
KEPT_RELEASES+=("$TAG")
echo " ✅ KEEP: $TAG ($RELEASE_DATE) - $REASON"
else
TO_DELETE+=("$TAG")
echo " ❌ PRUNE: $TAG ($RELEASE_DATE) - $REASON"
fi
done < <(echo "$FILTERED_RELEASES" | jq -c '.[]')
echo ""
echo "📊 Phase 1 Result: ${#KEPT_RELEASES[@]} kept, ${#TO_DELETE[@]} marked for pruning."
# Phase 2: Max Count Cap
# KEPT_RELEASES is sorted Newest -> Oldest
if [ "${#KEPT_RELEASES[@]}" -gt "$RETENTION_MAX_COUNT" ]; then
echo "⚠️ Total kept releases (${#KEPT_RELEASES[@]}) exceeds limit ($RETENTION_MAX_COUNT). Trimming oldest..."
# The first MAX_COUNT are safe. The rest must go.
# Bash array slicing: ${array[@]:start:length}
# New kept list is just the first N
FINAL_KEPT=("${KEPT_RELEASES[@]:0:$RETENTION_MAX_COUNT}")
# The overflow are added to delete list
OVERFLOW=("${KEPT_RELEASES[@]:$RETENTION_MAX_COUNT}")
for tag in "${OVERFLOW[@]}"; do
TO_DELETE+=("$tag")
echo " ❌ PRUNE (Overflow): $tag"
done
KEPT_RELEASES=("${FINAL_KEPT[@]}")
fi
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🗑️ Executing Deletions (${#TO_DELETE[@]} items)"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
if [ "${#TO_DELETE[@]}" -eq 0 ]; then
echo "✅ Nothing to delete."
exit 0
fi
for tag in "${TO_DELETE[@]}"; do
if [ "$DRY_RUN" == "true" ]; then
echo " [DRY RUN] Would delete: $tag"
else
echo " Deleting: $tag"
gh release delete "$tag" --repo "${{ github.repository }}" --cleanup-tag --yes || echo " ⚠️ Failed to delete $tag"
fi
done
echo ""
echo "✅ Pruning complete."