Skip to content

fix(deploy): Fix MCP port mismatch, unpause DAGs, and set MCP URL env… #525

fix(deploy): Fix MCP port mismatch, unpause DAGs, and set MCP URL env…

fix(deploy): Fix MCP port mismatch, unpause DAGs, and set MCP URL env… #525

Workflow file for this run

# =============================================================================
# Pre-commit CI Workflow
# =============================================================================
# Runs pre-commit hooks on all files for comprehensive code quality checks.
#
# Includes:
# - Gitleaks (secret detection)
# - Ruff (Python linting & formatting)
# - pylint-airflow (Airflow DAG checks)
# - airflint (Airflow best practices)
# - ShellCheck (Shell script linting)
# - YAML/JSON validation
# - Custom Qubinode DAG linting (ADR-0045/ADR-0046)
#
# =============================================================================
name: Pre-commit
on:
push:
branches: [main, develop]
pull_request:
branches: [main]
workflow_dispatch:
jobs:
pre-commit:
name: Pre-commit Checks
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.12'
- name: Set up uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
- name: Install pre-commit
run: |
uv pip install --system pre-commit
uv pip install --system pylint pylint-airflow apache-airflow==2.10.4
- name: Run pre-commit
uses: pre-commit/action@v3.0.1
with:
extra_args: --all-files --show-diff-on-failure
# Separate job for Gitleaks with better reporting
# Uses free gitleaks CLI instead of paid gitleaks-action (which requires license for orgs)
gitleaks:
name: Secret Detection (Gitleaks)
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
with:
fetch-depth: 0 # Full history for better detection
- name: Install Gitleaks
run: |
# Install latest gitleaks from GitHub releases
GITLEAKS_VERSION=$(curl -s https://api.github.com/repos/gitleaks/gitleaks/releases/latest | grep '"tag_name"' | sed -E 's/.*"v([^"]+)".*/\1/')
curl -sSfL "https://github.com/gitleaks/gitleaks/releases/download/v${GITLEAKS_VERSION}/gitleaks_${GITLEAKS_VERSION}_linux_x64.tar.gz" | tar -xz
sudo mv gitleaks /usr/local/bin/
gitleaks version
- name: Run Gitleaks
run: |
echo "========================================"
echo "Running Gitleaks Secret Detection"
echo "========================================"
# Run gitleaks with config to reduce false positives
gitleaks detect --source . --config .gitleaks.toml --redact --no-git || {
echo ""
echo "[WARN] Potential secrets detected - review output above"
echo "If these are false positives, update .gitleaks.toml allowlist"
exit 1
}
echo ""
echo "[OK] No secrets detected"
# Separate job for comprehensive Airflow linting
airflow-lint:
name: Airflow DAG Linting
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.12'
- name: Set up uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
- name: Install dependencies
run: |
uv pip install --system apache-airflow==2.10.4
uv pip install --system apache-airflow-providers-postgres
uv pip install --system pylint pylint-airflow airflint
uv pip install --system pyyaml
- name: Run Qubinode DAG linter
run: |
chmod +x airflow/scripts/lint-dags.sh
./airflow/scripts/lint-dags.sh airflow/dags/
- name: Run pylint-airflow
continue-on-error: true # Don't fail on warnings
run: |
echo "========================================"
echo "pylint-airflow Analysis"
echo "========================================"
# Run pylint with airflow plugin on DAG files
for dag in airflow/dags/*.py; do
filename=$(basename "$dag")
# Skip non-DAG files
[[ "$filename" == dag_helpers.py ]] && continue
[[ "$filename" == dag_factory.py ]] && continue
[[ "$filename" == dag_loader.py ]] && continue
[[ "$filename" == dag_logging_mixin.py ]] && continue
echo ""
echo "Checking: $filename"
pylint --load-plugins=pylint_airflow \
--disable=all \
--enable=C83,R83,W83,E83 \
"$dag" 2>/dev/null || true
done
echo ""
echo "[OK] pylint-airflow analysis complete"
- name: Run airflint
continue-on-error: true # Don't fail on suggestions
run: |
echo "========================================"
echo "airflint Best Practices Analysis"
echo "========================================"
airflint airflow/dags/ 2>/dev/null || true
echo ""
echo "[OK] airflint analysis complete"
- name: Validate DAG imports
run: |
echo "========================================"
echo "Validating DAG Imports"
echo "========================================"
export AIRFLOW_HOME=$(pwd)/airflow
export AIRFLOW__CORE__DAGS_FOLDER=$(pwd)/airflow/dags
export AIRFLOW__CORE__LOAD_EXAMPLES=false
export AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=sqlite:///$(pwd)/airflow/airflow.db
cd airflow
# Initialize Airflow DB (use python -m to ensure correct path)
# Allow migrate to fail initially (may need to init first on fresh install)
echo "Initializing Airflow database..."
python -m airflow db migrate 2>&1 || {
echo "[WARN] db migrate failed, trying db init..."
python -m airflow db init 2>&1 || true
}
# List DAGs and check for import errors (auto-confirm with yes)
echo "Listing DAGs..."
yes | python -m airflow dags list 2>&1 | tee dag_list.txt || true
# Check for actual import errors by running list-import-errors
echo "Checking for DAG import errors..."
import_errors=$(yes | python -m airflow dags list-import-errors 2>&1 || true)
# Check if there are real errors (not just "No data found" or graphviz warnings)
if echo "$import_errors" | grep -v "No data found" | grep -v "Could not import graphviz" | grep -qE "Traceback|ImportError|ModuleNotFoundError|SyntaxError"; then
echo "[ERROR] DAG import errors detected:"
echo "$import_errors"
exit 1
fi
echo ""
echo "[OK] All DAGs imported successfully"