fix(ui): keep filter buttons visible when BlockerPanel filter returns… #122
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Deploy | |
| on: | |
| push: | |
| branches: [main] | |
| release: | |
| types: [published] | |
| workflow_dispatch: | |
| inputs: | |
| environment: | |
| description: 'Environment to deploy to' | |
| required: true | |
| default: 'staging' | |
| type: choice | |
| options: | |
| - staging | |
| - production | |
| env: | |
| PYTHON_VERSION: '3.11' | |
| NODE_VERSION: '20' | |
| jobs: | |
| # ============================================ | |
| # Run Tests First (Quality Gate) | |
| # ============================================ | |
| test: | |
| name: Run Test Suite | |
| uses: ./.github/workflows/test.yml | |
| # ============================================ | |
| # Deploy to Staging | |
| # ============================================ | |
| deploy-staging: | |
| name: Deploy to Staging | |
| runs-on: ubuntu-latest | |
| needs: test # Temporarily disabled for debugging | |
| if: | | |
| (github.event_name == 'push' && github.ref == 'refs/heads/main') || | |
| (github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'staging') | |
| environment: | |
| name: staging | |
| url: https://dev.codeframeapp.com | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| - name: Set up SSH | |
| uses: webfactory/[email protected] | |
| with: | |
| ssh-private-key: ${{ secrets.SSH_KEY }} | |
| - name: Add server to known hosts | |
| run: | | |
| mkdir -p ~/.ssh | |
| ssh-keyscan -H ${{ secrets.HOST }} >> ~/.ssh/known_hosts | |
| - name: Create environment file | |
| env: | |
| REMOTE_HOST: ${{ secrets.HOST }} | |
| REMOTE_USER: ${{ secrets.USER }} | |
| REMOTE_PATH: ${{ secrets.PROJECT_PATH }} | |
| ENV_ANTHROPIC_KEY: ${{ secrets.ANTHROPIC_API_KEY }} | |
| ENV_OPENAI_KEY: ${{ secrets.OPENAI_API_KEY }} | |
| ENV_DATABASE_PATH: ${{ secrets.DATABASE_PATH }} | |
| ENV_API_HOST: ${{ secrets.API_HOST }} | |
| ENV_API_PORT: ${{ secrets.API_PORT }} | |
| ENV_CORS: ${{ secrets.CORS_ORIGINS }} | |
| ENV_API_URL: ${{ secrets.API_URL }} | |
| ENV_WS_URL: ${{ secrets.WS_URL }} | |
| ENV_LOG_LEVEL: ${{ secrets.LOG_LEVEL }} | |
| ENV_LOG_FILE: ${{ secrets.LOG_FILE }} | |
| ENV_ENVIRONMENT: ${{ secrets.ENVIRONMENT }} | |
| ENV_DEBUG: ${{ secrets.DEBUG }} | |
| ENV_HOT_RELOAD: ${{ secrets.HOT_RELOAD }} | |
| ENV_PM2_FRONTEND_NAME: ${{ secrets.PM2_FRONTEND_NAME }} | |
| ENV_PM2_BACKEND_NAME: ${{ secrets.PM2_BACKEND_NAME }} | |
| run: | | |
| echo "📝 Creating .env.staging file..." | |
| # Build env file content safely using printf (no shell interpretation) | |
| ENV_CONTENT=$(printf '%s\n' \ | |
| "# CodeFRAME Environment Configuration" \ | |
| "# Auto-generated by GitHub Actions deployment" \ | |
| "" \ | |
| "# AI Provider API Keys" \ | |
| "ANTHROPIC_API_KEY=${ENV_ANTHROPIC_KEY}" \ | |
| "OPENAI_API_KEY=${ENV_OPENAI_KEY}" \ | |
| "" \ | |
| "# Database Configuration" \ | |
| "DATABASE_PATH=${ENV_DATABASE_PATH}" \ | |
| "" \ | |
| "# Status Server Configuration" \ | |
| "API_HOST=${ENV_API_HOST}" \ | |
| "API_PORT=${ENV_API_PORT}" \ | |
| "CORS_ALLOWED_ORIGINS=${ENV_CORS}" \ | |
| "" \ | |
| "# Web UI Configuration" \ | |
| "NEXT_PUBLIC_API_URL=${ENV_API_URL}" \ | |
| "NEXT_PUBLIC_WS_URL=${ENV_WS_URL}" \ | |
| "" \ | |
| "# Logging Configuration" \ | |
| "LOG_LEVEL=${ENV_LOG_LEVEL}" \ | |
| "LOG_FILE=${ENV_LOG_FILE}" \ | |
| "" \ | |
| "# Environment & Development Flags" \ | |
| "ENVIRONMENT=${ENV_ENVIRONMENT}" \ | |
| "DEBUG=${ENV_DEBUG}" \ | |
| "HOT_RELOAD=${ENV_HOT_RELOAD}" \ | |
| ) | |
| # Base64 encode to prevent any shell interpretation during transfer | |
| ENV_BASE64=$(echo "$ENV_CONTENT" | base64 -w 0) | |
| # Transfer and decode safely on remote, verify creation | |
| ssh "${REMOTE_USER}@${REMOTE_HOST}" " | |
| set -e | |
| echo '${ENV_BASE64}' | base64 -d > '${REMOTE_PATH}/.env.staging.tmp' | |
| if [ ! -s '${REMOTE_PATH}/.env.staging.tmp' ]; then | |
| echo '❌ Failed to create environment file (empty or missing)' | |
| rm -f '${REMOTE_PATH}/.env.staging.tmp' | |
| exit 1 | |
| fi | |
| mv '${REMOTE_PATH}/.env.staging.tmp' '${REMOTE_PATH}/.env.staging' | |
| chmod 600 '${REMOTE_PATH}/.env.staging' | |
| " | |
| echo "✅ .env.staging created and verified" | |
| - name: Deploy to staging server | |
| env: | |
| BACKEND_NAME: ${{ secrets.PM2_BACKEND_NAME }} | |
| FRONTEND_NAME: ${{ secrets.PM2_FRONTEND_NAME }} | |
| NEXT_PUBLIC_API_URL: ${{ secrets.API_URL }} | |
| NEXT_PUBLIC_WS_URL: ${{ secrets.WS_URL }} | |
| run: | | |
| ssh ${{ secrets.USER }}@${{ secrets.HOST }} "bash -s" << ENDSSH | |
| set -e | |
| echo "🚀 Starting deployment to staging..." | |
| # Navigate to project directory | |
| cd ${{ secrets.PROJECT_PATH }} | |
| # Pull latest code | |
| echo "📥 Pulling latest code..." | |
| git fetch origin main | |
| git reset --hard origin/main | |
| # Backend setup | |
| echo "🐍 Setting up Python backend..." | |
| if ! command -v uv &> /dev/null; then | |
| curl -LsSf https://astral.sh/uv/install.sh | sh | |
| export PATH="\$HOME/.local/bin:\$PATH" | |
| fi | |
| uv venv | |
| source .venv/bin/activate | |
| uv sync | |
| # Frontend setup - export NEXT_PUBLIC vars at build time | |
| echo "📦 Building frontend..." | |
| cd web-ui | |
| npm ci | |
| echo "🔒 Running security audit..." | |
| npm audit --audit-level=critical | |
| # NEXT_PUBLIC vars must be set at build time for Next.js | |
| export NEXT_PUBLIC_API_URL="${NEXT_PUBLIC_API_URL}" | |
| export NEXT_PUBLIC_WS_URL="${NEXT_PUBLIC_WS_URL}" | |
| echo "Building with NEXT_PUBLIC_API_URL=\${NEXT_PUBLIC_API_URL}" | |
| npm run build | |
| cd .. | |
| # Install root dependencies (dotenv for ecosystem config) | |
| echo "📦 Installing PM2 config dependencies..." | |
| npm install | |
| # Ensure logs directory exists | |
| mkdir -p logs | |
| # Restart PM2 services | |
| echo "🔄 Restarting PM2 services..." | |
| if ! command -v pm2 &> /dev/null; then | |
| echo "❌ PM2 not found - please install PM2 globally" | |
| exit 1 | |
| fi | |
| # PM2 process names from GitHub secrets (expanded by runner) | |
| BACKEND_NAME="${BACKEND_NAME}" | |
| FRONTEND_NAME="${FRONTEND_NAME}" | |
| echo "Debug: BACKEND_NAME='\${BACKEND_NAME}'" | |
| echo "Debug: FRONTEND_NAME='\${FRONTEND_NAME}'" | |
| # Function to check if a PM2 process is online | |
| is_process_online() { | |
| pm2 describe "\$1" 2>/dev/null | grep -q "online" | |
| } | |
| # Restart both frontend and backend via config file | |
| if is_process_online "\${BACKEND_NAME}" && is_process_online "\${FRONTEND_NAME}"; then | |
| echo "♻️ Restarting PM2 services..." | |
| pm2 restart "\${BACKEND_NAME}" --update-env | |
| pm2 restart "\${FRONTEND_NAME}" --update-env | |
| else | |
| echo "🚀 Starting PM2 services from config..." | |
| pm2 start ecosystem.staging.config.js | |
| fi | |
| pm2 save | |
| echo "✅ PM2 services updated" | |
| echo "✅ Deployment to staging complete!" | |
| ENDSSH | |
| - name: Verify deployment | |
| run: | | |
| echo "🔍 Verifying staging deployment..." | |
| MAX_ATTEMPTS=12 | |
| SLEEP_SECONDS=5 | |
| for i in $(seq 1 $MAX_ATTEMPTS); do | |
| echo "Health check attempt $i/$MAX_ATTEMPTS..." | |
| if ssh ${{ secrets.USER }}@${{ secrets.HOST }} "curl -sf http://localhost:${{ secrets.API_PORT }}/health"; then | |
| echo "✅ Health check passed on attempt $i" | |
| exit 0 | |
| fi | |
| if [ $i -lt $MAX_ATTEMPTS ]; then | |
| echo "⏳ Waiting ${SLEEP_SECONDS}s before retry..." | |
| sleep $SLEEP_SECONDS | |
| fi | |
| done | |
| echo "❌ Health check failed after $MAX_ATTEMPTS attempts" | |
| exit 1 | |
| - name: Deployment summary | |
| run: | | |
| echo "## Staging Deployment Summary" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Branch**: ${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Commit**: \`${{ github.sha }}\`" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Deployed by**: ${{ github.actor }}" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Time**: $(date -u '+%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_STEP_SUMMARY | |
| # ============================================ | |
| # Deploy to Production | |
| # ============================================ | |
| deploy-production: | |
| name: Deploy to Production | |
| runs-on: ubuntu-latest | |
| # needs: test # Temporarily disabled for debugging | |
| if: | | |
| (github.event_name == 'release') || | |
| (github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'production') | |
| environment: | |
| name: production | |
| url: https://codeframe.example.com | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| - name: Set up SSH | |
| uses: webfactory/[email protected] | |
| with: | |
| ssh-private-key: ${{ secrets.SSH_KEY }} | |
| - name: Add server to known hosts | |
| run: | | |
| mkdir -p ~/.ssh | |
| ssh-keyscan -H ${{ secrets.HOST }} >> ~/.ssh/known_hosts | |
| - name: Create environment file | |
| env: | |
| REMOTE_HOST: ${{ secrets.HOST }} | |
| REMOTE_USER: ${{ secrets.USER }} | |
| REMOTE_PATH: ${{ secrets.PROJECT_PATH }} | |
| ENV_ANTHROPIC_KEY: ${{ secrets.ANTHROPIC_API_KEY }} | |
| ENV_OPENAI_KEY: ${{ secrets.OPENAI_API_KEY }} | |
| ENV_DATABASE_PATH: ${{ secrets.DATABASE_PATH }} | |
| ENV_API_HOST: ${{ secrets.API_HOST }} | |
| ENV_API_PORT: ${{ secrets.API_PORT }} | |
| ENV_CORS: ${{ secrets.CORS_ORIGINS }} | |
| ENV_API_URL: ${{ secrets.API_URL }} | |
| ENV_WS_URL: ${{ secrets.WS_URL }} | |
| ENV_LOG_LEVEL: ${{ secrets.LOG_LEVEL }} | |
| ENV_LOG_FILE: ${{ secrets.LOG_FILE }} | |
| ENV_ENVIRONMENT: ${{ secrets.ENVIRONMENT }} | |
| ENV_DEBUG: ${{ secrets.DEBUG }} | |
| ENV_HOT_RELOAD: ${{ secrets.HOT_RELOAD }} | |
| run: | | |
| echo "📝 Creating .env.production file..." | |
| # Build env file content safely using printf (no shell interpretation) | |
| ENV_CONTENT=$(printf '%s\n' \ | |
| "# CodeFRAME Environment Configuration" \ | |
| "# Auto-generated by GitHub Actions deployment" \ | |
| "" \ | |
| "# AI Provider API Keys" \ | |
| "ANTHROPIC_API_KEY=${ENV_ANTHROPIC_KEY}" \ | |
| "OPENAI_API_KEY=${ENV_OPENAI_KEY}" \ | |
| "" \ | |
| "# Database Configuration" \ | |
| "DATABASE_PATH=${ENV_DATABASE_PATH}" \ | |
| "" \ | |
| "# Status Server Configuration" \ | |
| "API_HOST=${ENV_API_HOST}" \ | |
| "API_PORT=${ENV_API_PORT}" \ | |
| "CORS_ALLOWED_ORIGINS=${ENV_CORS}" \ | |
| "" \ | |
| "# Web UI Configuration" \ | |
| "NEXT_PUBLIC_API_URL=${ENV_API_URL}" \ | |
| "NEXT_PUBLIC_WS_URL=${ENV_WS_URL}" \ | |
| "" \ | |
| "# Logging Configuration" \ | |
| "LOG_LEVEL=${ENV_LOG_LEVEL}" \ | |
| "LOG_FILE=${ENV_LOG_FILE}" \ | |
| "" \ | |
| "# Environment & Development Flags" \ | |
| "ENVIRONMENT=${ENV_ENVIRONMENT}" \ | |
| "DEBUG=${ENV_DEBUG}" \ | |
| "HOT_RELOAD=${ENV_HOT_RELOAD}" \ | |
| ) | |
| # Base64 encode to prevent any shell interpretation during transfer | |
| ENV_BASE64=$(echo "$ENV_CONTENT" | base64 -w 0) | |
| # Transfer and decode safely on remote, verify creation | |
| ssh "${REMOTE_USER}@${REMOTE_HOST}" " | |
| set -e | |
| echo '${ENV_BASE64}' | base64 -d > '${REMOTE_PATH}/.env.production.tmp' | |
| if [ ! -s '${REMOTE_PATH}/.env.production.tmp' ]; then | |
| echo '❌ Failed to create environment file (empty or missing)' | |
| rm -f '${REMOTE_PATH}/.env.production.tmp' | |
| exit 1 | |
| fi | |
| mv '${REMOTE_PATH}/.env.production.tmp' '${REMOTE_PATH}/.env.production' | |
| chmod 600 '${REMOTE_PATH}/.env.production' | |
| " | |
| echo "✅ .env.production created and verified" | |
| - name: Create pre-deployment backup | |
| run: | | |
| ssh ${{ secrets.USER }}@${{ secrets.HOST }} "bash -s" << ENDSSH | |
| set -e | |
| echo "💾 Creating pre-deployment backup..." | |
| PROJECT_PATH="${{ secrets.PROJECT_PATH }}" | |
| BACKUP_BASE="\${PROJECT_PATH}/backups" | |
| TIMESTAMP="\$(date +%Y%m%d-%H%M%S)" | |
| BACKUP_NAME="backup-\${TIMESTAMP}" | |
| TMP_BACKUP="/tmp/\${BACKUP_NAME}" | |
| FINAL_ARCHIVE="\${BACKUP_BASE}/\${BACKUP_NAME}.tar.gz" | |
| RETENTION_COUNT=10 | |
| cd \${PROJECT_PATH} | |
| # Create persistent backup directory with proper permissions | |
| mkdir -p \${BACKUP_BASE} | |
| chmod 700 \${BACKUP_BASE} | |
| # Create temporary staging directory | |
| mkdir -p \${TMP_BACKUP} | |
| # Record current commit | |
| git rev-parse HEAD > \${TMP_BACKUP}/previous_commit.txt | |
| git log -1 --format="%H %s" >> \${TMP_BACKUP}/previous_commit.txt | |
| echo "📝 Current commit: \$(head -1 \${TMP_BACKUP}/previous_commit.txt)" | |
| # Backup database | |
| if [ -f .codeframe/state.db ]; then | |
| cp .codeframe/state.db \${TMP_BACKUP}/ | |
| echo "✅ Database backed up" | |
| fi | |
| # Backup config files | |
| if [ -d .codeframe ]; then | |
| cp -r .codeframe/config.* \${TMP_BACKUP}/ 2>/dev/null || true | |
| cp -r .codeframe/*.json \${TMP_BACKUP}/ 2>/dev/null || true | |
| fi | |
| # Backup environment files | |
| cp .env* \${TMP_BACKUP}/ 2>/dev/null || true | |
| cp ecosystem*.config.js \${TMP_BACKUP}/ 2>/dev/null || true | |
| # Backup recent logs (last 1000 lines each to keep size manageable) | |
| if [ -d logs ]; then | |
| mkdir -p \${TMP_BACKUP}/logs | |
| for logfile in logs/*.log; do | |
| if [ -f "\$logfile" ]; then | |
| tail -1000 "\$logfile" > "\${TMP_BACKUP}/logs/\$(basename \$logfile)" 2>/dev/null || true | |
| fi | |
| done | |
| echo "✅ Logs backed up" | |
| fi | |
| # Create compressed archive atomically | |
| echo "📦 Creating compressed archive..." | |
| tar -czf "\${TMP_BACKUP}.tar.gz" -C /tmp "\${BACKUP_NAME}" || { | |
| echo "❌ Failed to create backup archive" | |
| rm -rf \${TMP_BACKUP} | |
| exit 1 | |
| } | |
| # Move to final location atomically | |
| mv "\${TMP_BACKUP}.tar.gz" "\${FINAL_ARCHIVE}" || { | |
| echo "❌ Failed to move backup to final location" | |
| rm -rf \${TMP_BACKUP} "\${TMP_BACKUP}.tar.gz" | |
| exit 1 | |
| } | |
| # Set proper permissions on archive | |
| chmod 600 "\${FINAL_ARCHIVE}" | |
| # Cleanup temp directory | |
| rm -rf \${TMP_BACKUP} | |
| # Retention policy: keep last N backups | |
| echo "🗑️ Applying retention policy (keeping last \${RETENTION_COUNT} backups)..." | |
| cd \${BACKUP_BASE} | |
| ls -t backup-*.tar.gz 2>/dev/null | tail -n +\$((RETENTION_COUNT + 1)) | xargs -r rm -f | |
| # Report backup status | |
| BACKUP_SIZE=\$(du -h "\${FINAL_ARCHIVE}" | cut -f1) | |
| BACKUP_COUNT=\$(ls -1 backup-*.tar.gz 2>/dev/null | wc -l) | |
| echo "✅ Backup created: \${FINAL_ARCHIVE} (\${BACKUP_SIZE})" | |
| echo "📊 Total backups retained: \${BACKUP_COUNT}" | |
| ENDSSH | |
| - name: Deploy to production server | |
| env: | |
| BACKEND_NAME_PROD: ${{ secrets.PM2_BACKEND_NAME }} | |
| FRONTEND_NAME_PROD: ${{ secrets.PM2_FRONTEND_NAME }} | |
| NEXT_PUBLIC_API_URL: ${{ secrets.API_URL }} | |
| NEXT_PUBLIC_WS_URL: ${{ secrets.WS_URL }} | |
| run: | | |
| ssh ${{ secrets.USER }}@${{ secrets.HOST }} "bash -s" << ENDSSH | |
| set -e | |
| echo "🚀 Starting deployment to production..." | |
| # Navigate to project directory | |
| cd ${{ secrets.PROJECT_PATH }} | |
| # Pull the release tag or main | |
| echo "📥 Pulling latest code..." | |
| git fetch origin --tags | |
| TAG_NAME="${{ github.event.release.tag_name }}" | |
| if [ -n "\$TAG_NAME" ]; then | |
| git checkout \$TAG_NAME | |
| echo "✅ Checked out tag: \$TAG_NAME" | |
| else | |
| git fetch origin main | |
| git reset --hard origin/main | |
| echo "✅ Reset to origin/main" | |
| fi | |
| # Backend setup | |
| echo "🐍 Setting up Python backend..." | |
| if ! command -v uv &> /dev/null; then | |
| curl -LsSf https://astral.sh/uv/install.sh | sh | |
| export PATH="$HOME/.cargo/bin:$PATH" | |
| fi | |
| uv venv | |
| source .venv/bin/activate | |
| uv sync --no-dev | |
| # Frontend setup (production build) - export NEXT_PUBLIC vars at build time | |
| echo "📦 Building frontend for production..." | |
| cd web-ui | |
| npm ci | |
| echo "🔒 Running security audit..." | |
| npm audit --audit-level=critical | |
| # NEXT_PUBLIC vars must be set at build time for Next.js | |
| export NEXT_PUBLIC_API_URL="${NEXT_PUBLIC_API_URL}" | |
| export NEXT_PUBLIC_WS_URL="${NEXT_PUBLIC_WS_URL}" | |
| echo "Building with NEXT_PUBLIC_API_URL=\${NEXT_PUBLIC_API_URL}" | |
| npm run build | |
| cd .. | |
| # Install root dependencies (dotenv for ecosystem config) | |
| echo "📦 Installing PM2 config dependencies..." | |
| npm install | |
| # Run database migrations if any | |
| echo "🗃️ Running database migrations..." | |
| # Add migration command here if needed | |
| # Ensure logs directory exists | |
| mkdir -p logs | |
| # Restart PM2 services | |
| echo "🔄 Restarting PM2 services..." | |
| if ! command -v pm2 &> /dev/null; then | |
| echo "❌ PM2 not found - please install PM2 globally" | |
| exit 1 | |
| fi | |
| # PM2 process names from GitHub secrets (expanded by runner) | |
| BACKEND_NAME="${BACKEND_NAME_PROD}" | |
| FRONTEND_NAME="${FRONTEND_NAME_PROD}" | |
| # Function to check if a PM2 process is online | |
| is_process_online() { | |
| pm2 describe "\$1" 2>/dev/null | grep -q "online" | |
| } | |
| # Handle backend process | |
| if is_process_online "\${BACKEND_NAME}"; then | |
| echo "♻️ Restarting \${BACKEND_NAME}..." | |
| pm2 restart "\${BACKEND_NAME}" | |
| else | |
| echo "🚀 Starting \${BACKEND_NAME}..." | |
| pm2 start ecosystem.production.config.js --only "\${BACKEND_NAME}" | |
| fi | |
| # Handle frontend process | |
| if is_process_online "\${FRONTEND_NAME}"; then | |
| echo "♻️ Restarting \${FRONTEND_NAME}..." | |
| pm2 restart "\${FRONTEND_NAME}" | |
| else | |
| echo "🚀 Starting \${FRONTEND_NAME}..." | |
| pm2 start ecosystem.production.config.js --only "\${FRONTEND_NAME}" | |
| fi | |
| pm2 save | |
| echo "✅ PM2 services updated" | |
| echo "✅ Deployment to production complete!" | |
| ENDSSH | |
| - name: Verify deployment | |
| run: | | |
| echo "🔍 Verifying production deployment..." | |
| MAX_ATTEMPTS=12 | |
| SLEEP_SECONDS=5 | |
| for i in $(seq 1 $MAX_ATTEMPTS); do | |
| echo "Health check attempt $i/$MAX_ATTEMPTS..." | |
| if ssh ${{ secrets.USER }}@${{ secrets.HOST }} "curl -sf http://localhost:${{ secrets.API_PORT }}/health"; then | |
| echo "✅ Health check passed on attempt $i" | |
| exit 0 | |
| fi | |
| if [ $i -lt $MAX_ATTEMPTS ]; then | |
| echo "⏳ Waiting ${SLEEP_SECONDS}s before retry..." | |
| sleep $SLEEP_SECONDS | |
| fi | |
| done | |
| echo "❌ Health check failed after $MAX_ATTEMPTS attempts" | |
| exit 1 | |
| - name: Deployment summary | |
| run: | | |
| echo "## Production Deployment Summary" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Version**: ${{ github.event.release.tag_name || github.ref_name }}" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Commit**: \`${{ github.sha }}\`" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Deployed by**: ${{ github.actor }}" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Time**: $(date -u '+%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_STEP_SUMMARY |