diff --git a/.github/actions/action.yml b/.github/actions/action.yml index 0700b662..b09a3c99 100644 --- a/.github/actions/action.yml +++ b/.github/actions/action.yml @@ -1,8 +1,4 @@ -name: Build and Release OnePlus Kernels - -permissions: - contents: write - actions: write +name: 'Build OnePlus Kernel with KernelSU Next' inputs: op_config_json: @@ -10,32 +6,57 @@ inputs: required: true type: string ksun_branch: + description: 'KernelSU Next branch to use' required: true type: string default: stable susfs_commit_hash_or_branch: + description: 'SUSFS branch or commit hash (empty = auto-detect by kernel version)' required: false type: string - default: "" + default: "" optimize_level: + description: 'Compiler optimization level (O2 or O3)' + required: false + type: string + default: O2 + clean: + description: 'Force clean build without ccache acceleration' required: false type: string - default: O2 # Choices: O2 or O3 + default: 'false' outputs: kernel_version: + description: 'Full kernel version string' value: ${{ steps.save_metadata.outputs.kernel_version }} ksu_version: + description: 'KernelSU Next version number' value: ${{ steps.save_metadata.outputs.ksu_version }} susfs_version: + description: 'SUSFS version string' value: ${{ steps.save_metadata.outputs.susfs_version }} image_sha256: + description: 'SHA256 hash of kernel Image' value: ${{ steps.collect_stats.outputs.image_sha256 }} warnings: - value: ${{ steps.collect_stats.outputs.warnings }} + description: 'Number of compiler warnings' + value: ${{ steps.collect_stats.outputs.warnings_count }} + build_time: + description: 'Total build time in seconds' + value: ${{ steps.collect_stats.outputs.build_time }} + ccache_hit_rate: + description: 'ccache hit rate percentage' + value: ${{ steps.ccache_stats.outputs.hit_rate }} + ccache_direct_rate: + description: 'ccache direct hit rate percentage' + value: ${{ steps.ccache_stats.outputs.direct_rate }} + zip_name: + description: 'Name of the generated flashable ZIP' + value: ${{ steps.create_zip.outputs.zip_name }} runs: - using: composite + using: 'composite' steps: - name: Parse op_config_json shell: bash @@ -65,32 +86,26 @@ runs: ip_set="$OP_IP_SET" optimize='${{ inputs.optimize_level }}' - # Initialize an array to collect errors errors=() - # Non-empty checks & Basic format checks - # model: allow start with 'OP' if [[ -z "$model" ]]; then errors+=("Input 'model' cannot be empty") elif [[ ! "$model" =~ ^OP ]]; then errors+=("Input 'model' does not start with 'OP'. Got: '$model'") fi - # soc: allow letters, digits, underscores, dashes (e.g., sm8650) if [[ -z "$soc" ]]; then errors+=("Input 'soc' cannot be empty") elif [[ ! "$soc" =~ ^[A-Za-z0-9_-]+$ ]]; then errors+=("Input 'soc' contains invalid characters. Allowed: letters, digits, underscore, dash. Got: '$soc'") fi - # branch: allow common ref patterns; spaces not allowed if [[ -z "$branch" ]]; then errors+=("Input 'branch' cannot be empty") elif [[ ! "$branch" =~ ^[A-Za-z0-9._/-]+$ ]]; then errors+=("Input 'branch' contains invalid characters. Allowed: letters, digits, ., _, -, /. Got: '$branch'") fi - # manifest: either HTTPS URL ending with .xml, or a filename ending with .xml (no spaces) if [[ -z "$manifest" ]]; then errors+=("Input 'manifest' cannot be empty") elif [[ "$manifest" == http*://* ]]; then @@ -109,70 +124,59 @@ runs: fi fi - # android_version: allow android followed by a number if [[ -z "$android_version" ]]; then errors+=("Input 'android_version' cannot be empty") elif [[ ! "$android_version" =~ ^android[0-9]+$ ]]; then - # Se non è vuoto, controlla il formato specifico errors+=("Input 'android_version' contains invalid characters. Allowed: android followed by a number. Got: '$android_version'") fi - # kernel_version: allow number in X.Y format if [[ -z "$kernel_version" ]]; then errors+=("Input 'kernel_version' cannot be empty") elif [[ ! "$kernel_version" =~ ^[0-9]+\.[0-9]+$ ]]; then errors+=("Input 'kernel_version' contains invalid characters. Allowed: number in X.Y format. Got: '$kernel_version'") fi - # os_version: allow start with 'OOS' if [[ -z "$os_version" ]]; then errors+=("Input 'os_version' cannot be empty") elif [[ ! "$os_version" =~ ^OOS ]]; then errors+=("Input 'os_version' does not start with 'OOS'. Got: '$os_version'") fi - # hmbird: allow 'true' or 'false' if [[ -z "$hmbird" ]]; then errors+=("Input 'hmbird' cannot be empty") elif [[ "$hmbird" != "true" && "$hmbird" != "false" ]]; then errors+=("Input 'hmbird' contains invalid characters. Allowed: 'true' or 'false'. Got: '$hmbird'") fi - # bbg: allow 'true' or 'false' if [[ -z "$bbg" ]]; then errors+=("Input 'bbg' cannot be empty") elif [[ "$bbg" != "true" && "$bbg" != "false" ]]; then errors+=("Input 'bbg' contains invalid characters. Allowed: 'true' or 'false'. Got: '$bbg'") fi - # bbr: allow 'true' or 'false' if [[ -z "$bbr" ]]; then errors+=("Input 'bbr' cannot be empty") elif [[ "$bbr" != "true" && "$bbr" != "false" ]]; then errors+=("Input 'bbr' contains invalid characters. Allowed: 'true' or 'false'. Got: '$bbr'") fi - # ttl: allow 'true' or 'false' if [[ -z "$ttl" ]]; then errors+=("Input 'ttl' cannot be empty") elif [[ "$ttl" != "true" && "$ttl" != "false" ]]; then errors+=("Input 'ttl' contains invalid characters. Allowed: 'true' or 'false'. Got: '$ttl'") fi - # ip_set: allow 'true' or 'false' if [[ -z "$ip_set" ]]; then errors+=("Input 'ip_set' cannot be empty") elif [[ "$ip_set" != "true" && "$ip_set" != "false" ]]; then errors+=("Input 'ip_set' contains invalid characters. Allowed: 'true' or 'false'. Got: '$ip_set'") fi - # Optimize level validation case "$optimize" in O2|O3) ;; *) errors+=("optimize_level must be O2 or O3. Got: '$optimize'"); ;; esac - # Check for errors and act accordingly if [ ${#errors[@]} -ne 0 ]; then echo "Found ${#errors[@]} validation error(s):" >&2 for error in "${errors[@]}"; do @@ -181,55 +185,30 @@ runs: echo "::error::Input validation failed. See logs for details." exit 1 else - echo "Input validation OK." + echo "✅ Input validation passed" fi - echo "::endgroup::" - - - name: Remove Unwanted Softwares - shell: bash - run: | - export DEBIAN_FRONTEND=noninteractive - echo "DEBIAN_FRONTEND=noninteractive" >> "$GITHUB_ENV" - df -h - sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc \ - /usr/local/.ghcup /opt/hostedtoolcache/CodeQL /usr/local/share/powershell \ - /usr/share/swift || true - sudo docker image prune --all --force - echo "Unused directories cleared" - sudo apt-get purge -y aria2 ansible azure-cli shellcheck rpm xorriso zsync \ - esl-erlang firefox gfortran-8 gfortran-9 google-chrome-stable google-cloud-sdk \ - imagemagick libmagickcore-dev libmagickwand-dev libmagic-dev ant ant-optional \ - kubectl mercurial apt-transport-https mono-complete libmysqlclient unixodbc-dev \ - yarn chrpath libssl-dev libxft-dev libfreetype6 libfreetype6-dev libfontconfig1 \ - libfontconfig1-dev snmp pollinate libpq-dev postgresql-client powershell ruby-full \ - sphinxsearch subversion mongodb-org microsoft-edge-stable || true - sudo apt-get purge -y "$(dpkg-query -W -f='${binary:Package}\n' | grep -E '^mysql|^php|^dotnet' || true)" || true - sudo apt-get autoremove -y - sudo apt-get autoclean -y - echo "Package cleanup complete" - df -h + echo "::endgroup::" - name: Install Minimal Dependencies shell: bash run: | set -euo pipefail - echo "::group::Install deps" + echo "::group::Install dependencies" sudo apt-get -o Acquire::Retries=3 update -qq sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ git curl ca-certificates build-essential clang lld flex bison \ libelf-dev libssl-dev libncurses-dev zlib1g-dev liblz4-tool \ - libxml2-utils rsync unzip dwarves file python3 + libxml2-utils rsync unzip dwarves file python3 ccache jq bc dos2unix sudo apt-get clean + echo "✅ Dependencies installed" echo "::endgroup::" - name: Setup Base Environment shell: bash run: | set -euo pipefail - # Derive a unique build directory name CONFIG="$OP_MODEL" echo "CONFIG=$CONFIG" >> "$GITHUB_ENV" - # Install repo tool if missing REPO="/usr/local/bin/repo" if [ ! -x "$REPO" ]; then curl -s https://storage.googleapis.com/git-repo-downloads/repo -o "$REPO" @@ -241,17 +220,20 @@ runs: shell: bash run: | set -euo pipefail + echo "::group::Initialize kernel source" echo "Creating folder for configuration: $CONFIG" mkdir -p "$CONFIG" cd "$CONFIG" echo "Initializing and syncing kernel source..." + if [[ "$OP_MANIFEST" == https://* ]]; then mkdir -p .repo/manifests curl --fail --show-error --location --proto '=https' "$OP_MANIFEST" -o .repo/manifests/temp_manifest.xml - "$REPO" init -u https://github.com/OnePlusOSS/kernel_manifest.git -b oneplus/sm8650 -m temp_manifest.xml --repo-rev=v2.16 --depth=1 --no-clone-bundle --no-tags + "$REPO" init -u https://github.com/OnePlusOSS/kernel_manifest.git -b "$OP_BRANCH" -m temp_manifest.xml --repo-rev=v2.16 --depth=1 --no-clone-bundle --no-tags else "$REPO" init -u https://github.com/OnePlusOSS/kernel_manifest.git -b "$OP_BRANCH" -m "$OP_MANIFEST" --repo-rev=v2.16 --depth=1 --no-clone-bundle --no-tags fi + "$REPO" --version success=false for i in 1 2 3; do @@ -260,19 +242,23 @@ runs: success=true break fi - echo "repo sync attempt $i failed; retrying..." + echo "⚠️ repo sync attempt $i failed; retrying..." sleep 30 done - $success || { echo "repo sync failed after 3 attempts"; exit 1; } + $success || { echo "::error::repo sync failed after 3 attempts"; exit 1; } + echo "✅ Kernel source synced" + echo "::endgroup::" - name: Get Kernel Version Info shell: bash run: | set -euo pipefail + echo "::group::Get kernel version" CONFIG_DIR="$GITHUB_WORKSPACE/$CONFIG" ARTIFACTS_DIR="$CONFIG_DIR/artifacts" mkdir -p "$ARTIFACTS_DIR" cd "$CONFIG_DIR/kernel_platform/common" + CONFIG_FILES=("build.config.common" "build.config.constants") BRANCH_LINE="" for f in "${CONFIG_FILES[@]}"; do @@ -281,52 +267,291 @@ runs: if [ -n "$l" ]; then BRANCH_LINE="$l"; break; fi fi done + if [ -z "$BRANCH_LINE" ]; then - echo "Error: No BRANCH= found"; exit 1 + echo "::error::No BRANCH= found in config files"; exit 1 fi + BRANCH_VALUE="${BRANCH_LINE#*=}" ANDROID_VERSION="${BRANCH_VALUE%-*}" + if [ -z "$ANDROID_VERSION" ]; then - echo "Could not parse android version from BRANCH=$BRANCH_VALUE" + echo "::error::Could not parse android version from BRANCH=$BRANCH_VALUE" exit 1 fi + VERSION=$(grep '^VERSION *=' Makefile | awk '{print $3}') PATCHLEVEL=$(grep '^PATCHLEVEL *=' Makefile | awk '{print $3}') SUBLEVEL=$(grep '^SUBLEVEL *=' Makefile | awk '{print $3}') FULL_VERSION="$VERSION.$PATCHLEVEL.$SUBLEVEL" + cd "$ARTIFACTS_DIR" echo "$ANDROID_VERSION-$FULL_VERSION" > "${OP_MODEL}_${OP_OS_VERSION}.txt" echo "$OP_OS_VERSION" >> "${OP_MODEL}_${OP_OS_VERSION}.txt" - echo "ANDROID_VER=$ANDROID_VERSION" >> "$GITHUB_ENV" - echo "KERNEL_VER=$VERSION.$PATCHLEVEL" >> "$GITHUB_ENV" - echo "KERNEL_FULL_VER=$ANDROID_VERSION-$FULL_VERSION" >> "$GITHUB_ENV" - echo "SUSFS_KERNEL_BRANCH=gki-$ANDROID_VERSION-$VERSION.$PATCHLEVEL" >> "$GITHUB_ENV" + + { + echo "ANDROID_VER=$ANDROID_VERSION" + echo "KERNEL_VER=$VERSION.$PATCHLEVEL" + echo "TKERNEL_VER=$FULL_VERSION" + echo "KERNEL_FULL_VER=$ANDROID_VERSION-$FULL_VERSION" + echo "SUSFS_KERNEL_BRANCH=gki-$ANDROID_VERSION-$VERSION.$PATCHLEVEL" + } >> "$GITHUB_ENV" + + echo "✅ Detected: $ANDROID_VERSION-$FULL_VERSION" + echo " SUSFS Branch: gki-$ANDROID_VERSION-$VERSION.$PATCHLEVEL" + echo "::endgroup::" + + - name: 🏷️ Set Build Identity (Hardcoded) + shell: bash + run: | + set -euo pipefail + echo "::group::Set build user/host" + + BUILD_USER="OnePlus" + BUILD_HOST="ubuntu-build" + + echo "BUILD_USER=$BUILD_USER" >> "$GITHUB_ENV" + echo "BUILD_HOST=$BUILD_HOST" >> "$GITHUB_ENV" + + echo "" + echo "✅ Build identity configured:" + echo " User: $BUILD_USER" + echo " Host: $BUILD_HOST" + echo "::endgroup::" + + - name: Detect Clang + shell: bash + run: | + set -euo pipefail + echo "::group::Detect Clang" + KP="$GITHUB_WORKSPACE/$CONFIG/kernel_platform" + CLANG_FOUND=false + for base in "$KP/prebuilts" "$KP/prebuilts-master"; do + [ -d "$base/clang/host/linux-x86" ] || continue + latest=$(ls -d "$base"/clang/host/linux-x86/clang-r*/ 2>/dev/null | sort -V | tail -n1 || true) + if [ -n "$latest" ] && [ -x "$latest/bin/clang" ]; then + CLANG_BIN="$latest/bin" + CLANG_FOUND=true + break + fi + done + if ! $CLANG_FOUND && command -v clang >/dev/null 2>&1; then + CLANG_BIN="$(dirname "$(command -v clang)")" + CLANG_FOUND=true + echo "Using system clang." + fi + $CLANG_FOUND || { echo "::error::No clang toolchain found"; exit 1; } + echo "CLANG_BIN_PATH=$CLANG_BIN" >> "$GITHUB_ENV" + CLANG_VERSION="$("$CLANG_BIN/clang" --version | head -n1)" + echo "CLANG_VERSION=$CLANG_VERSION" >> "$GITHUB_ENV" + echo "✅ Detected Clang: $CLANG_VERSION" + echo "::endgroup::" + + - name: Derive Clang Short Version + shell: bash + run: | + set -euo pipefail + echo "::group::Derive Clang fingerprint" + + short="$("${CLANG_BIN_PATH}/clang" --version | sed -n '1s/.*clang-r\([0-9.]\+\).*/\1/p')" + + if [ -z "$short" ]; then + short="$("${CLANG_BIN_PATH}/clang" --version | sha256sum | cut -c1-8)" + echo "⚠️ Could not extract clang-r version, using hash: $short" + else + echo "✅ Clang version fingerprint: $short" + fi + + echo "CLANG_VERSION_SHORT=$short" >> "$GITHUB_ENV" + + CLANG_FULL_VERSION="$("${CLANG_BIN_PATH}/clang" --version | head -n1)" + echo "CLANG_FULL_VERSION=$CLANG_FULL_VERSION" >> "$GITHUB_ENV" + + echo "Full Clang version: $CLANG_FULL_VERSION" + echo "::endgroup::" + + - name: Set Cache Environment + shell: bash + run: | + set -euo pipefail + echo "::group::Configure ccache environment" + + CACHE_DIR="$HOME/.ccache" + echo "CCACHE_DIR=$CACHE_DIR" >> "$GITHUB_ENV" + echo "CCACHE_MAXSIZE=8G" >> "$GITHUB_ENV" + + mkdir -p "$CACHE_DIR" + + echo "Cache configuration:" + echo " Directory: $CACHE_DIR" + echo " Max size: 12G" + echo " Android: ${ANDROID_VER:-unknown}" + echo " Kernel: ${KERNEL_VER:-unknown}" + echo " OOS Version: ${OP_OS_VERSION:-unknown}" + echo " Clang: ${CLANG_VERSION_SHORT:-unknown}" + echo " Device: ${OP_MODEL:-unknown}" + echo "::endgroup::" + + - name: Cache ccache + uses: actions/cache@v4 + with: + path: ${{ env.CCACHE_DIR }} + key: ccache-ksun-v3-${{ env.ANDROID_VER }}-${{ env.KERNEL_VER }}-${{ env.OP_OS_VERSION }}-${{ env.CLANG_VERSION_SHORT }}-${{ env.OP_MODEL }} + restore-keys: | + ccache-ksun-v3-${{ env.ANDROID_VER }}-${{ env.KERNEL_VER }}-${{ env.OP_OS_VERSION }}-${{ env.CLANG_VERSION_SHORT }}-${{ env.OP_MODEL }}- + ccache-ksun-v3-${{ env.ANDROID_VER }}-${{ env.KERNEL_VER }}-${{ env.OP_OS_VERSION }}-${{ env.CLANG_VERSION_SHORT }}- + ccache-ksun-v3-${{ env.ANDROID_VER }}-${{ env.KERNEL_VER }}-${{ env.OP_OS_VERSION }}- + ccache-ksun-v3-${{ env.ANDROID_VER }}-${{ env.KERNEL_VER }}- + ccache-ksun-v3-${{ env.ANDROID_VER }}- + ccache-ksun-v3- + + - name: Configure ccache + shell: bash + run: | + set -euo pipefail + echo "::group::Configure ccache for maximum performance" + + export CCACHE_DIR="$HOME/.ccache" + export CCACHE_MAXSIZE="8G" + + ccache -M "$CCACHE_MAXSIZE" + + export CCACHE_COMPILERCHECK="content" + export CCACHE_BASEDIR="${GITHUB_WORKSPACE}" + export CCACHE_NOHASHDIR="true" + + ccache -o compression=true + ccache -o compression_level=3 + ccache -o direct_mode=true + ccache -o hash_dir=false + ccache -o file_clone=true + ccache -o inode_cache=true + ccache -o umask=002 + + if ccache --help 2>&1 | grep -q 'depend_mode'; then + ccache -o depend_mode=true + fi + + ccache -o sloppiness=file_macro,time_macros,include_file_mtime,include_file_ctime,pch_defines,system_headers,locale + + export CCACHE_IGNOREOPTIONS="--sysroot*" + + { + echo "CCACHE_COMPILERCHECK=content" + echo "CCACHE_BASEDIR=${GITHUB_WORKSPACE}" + echo "CCACHE_NOHASHDIR=true" + echo "CCACHE_IGNOREOPTIONS=$CCACHE_IGNOREOPTIONS" + } >> "$GITHUB_ENV" + + echo "✅ ccache configured for production" + echo "" + echo "Configuration applied:" + echo " Max size: 12G (persisted)" + echo " Base dir: ${GITHUB_WORKSPACE}" + echo " Compression: zstd level 3" + echo " Direct mode: enabled" + echo " File cloning: enabled" + echo " Inode cache: enabled" + echo "" + + if [ -d "$CCACHE_DIR" ] && find "$CCACHE_DIR" -type f -print -quit 2>/dev/null | grep -q .; then + echo "✅ Restored cache found" + echo "" + echo "Pre-build cache statistics:" + ccache -s | head -n 20 + else + echo "⚠️ No existing cache - building from scratch" + echo "Cache will be populated for future builds" + fi + + echo "::endgroup::" + + - name: Handle Clean Build Option + if: ${{ inputs.clean == 'true' }} + shell: bash + run: | + set -euo pipefail + echo "::group::Clean build requested" + echo "⚠️ Clean build mode enabled - ccache will be bypassed" + + echo "CCACHE_DISABLE=1" >> "$GITHUB_ENV" + + echo "✅ ccache disabled for clean build (no cache/stat wipe)" + echo " Shared cache and statistics preserved for concurrent jobs" + echo "::endgroup::" - name: Clone AnyKernel3 and Other Dependencies shell: bash run: | set -euo pipefail + echo "::group::Clone dependencies" echo "Cloning AnyKernel3 and other dependencies..." + ANYKERNEL_BRANCH="gki-2.0" - if [[ -z "${{ inputs.susfs_commit_hash_or_branch }}" ]]; then - SUSFS_BRANCH="${{ env.SUSFS_KERNEL_BRANCH }}" + + # Determine SUSFS branch/commit to use + SUSFS_INPUT="${{ inputs.susfs_commit_hash_or_branch }}" + + # Default SUSFS branches for each kernel version + declare -A DEFAULT_SUSFS=( + ["android12-5.10"]="gki-android12-5.10" + ["android13-5.15"]="gki-android13-5.15" + ["android14-6.1"]="gki-android14-6.1" + ["android15-6.6"]="gki-android15-6.6" + ) + + # Determine which SUSFS branch to use + KERNEL_KEY="${{ env.ANDROID_VER }}-${{ env.KERNEL_VER }}" + if [ -z "$SUSFS_INPUT" ] || [ "$SUSFS_INPUT" = "next" ]; then + # Empty or "next": use kernel-specific default + SUSFS_BRANCH="${DEFAULT_SUSFS[$KERNEL_KEY]:-gki-android14-6.1}" + echo "ℹ️ Using default SUSFS branch for $KERNEL_KEY: $SUSFS_BRANCH" else - SUSFS_BRANCH="${{ inputs.susfs_commit_hash_or_branch }}" + # User provided custom branch/commit + SUSFS_BRANCH="$SUSFS_INPUT" + echo "🔧 Using custom SUSFS ref: $SUSFS_BRANCH" fi + echo "Using AnyKernel3 branch: $ANYKERNEL_BRANCH" - echo "Using SUSFS branch: $SUSFS_BRANCH" + echo "Using SUSFS branch/commit: $SUSFS_BRANCH" + git clone --depth=1 https://github.com/TheWildJames/AnyKernel3.git -b "$ANYKERNEL_BRANCH" git clone --depth=1 https://github.com/TheWildJames/kernel_patches.git git clone https://gitlab.com/simonpunk/susfs4ksu.git + cd susfs4ksu - if git rev-parse --verify "origin/$SUSFS_BRANCH" >/dev/null 2>&1 || git rev-parse --verify "$SUSFS_BRANCH" >/dev/null 2>&1; then + + # Try to checkout the specified branch/commit + if git rev-parse --verify "origin/$SUSFS_BRANCH" >/dev/null 2>&1; then + # It's a remote branch + echo "✅ Checking out remote branch: $SUSFS_BRANCH" + git checkout "$SUSFS_BRANCH" + elif git rev-parse --verify "$SUSFS_BRANCH" >/dev/null 2>&1; then + # It's a local ref or commit hash + echo "✅ Checking out ref/commit: $SUSFS_BRANCH" git checkout "$SUSFS_BRANCH" - SUSFS_COMMIT_SHA=$(git rev-parse HEAD) - echo "SUSFS_COMMIT_SHA=$SUSFS_COMMIT_SHA" >> $GITHUB_ENV else - echo "Error: SUSFS branch or ref '$SUSFS_BRANCH' not found." + echo "::error::SUSFS branch or commit '$SUSFS_BRANCH' not found in repository" + echo "" + echo "Available remote branches:" + git branch -r | head -n 20 exit 1 fi + + SUSFS_COMMIT_SHA=$(git rev-parse HEAD) + SUSFS_BRANCH_NAME=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "detached") + + echo "SUSFS_COMMIT_SHA=$SUSFS_COMMIT_SHA" >> $GITHUB_ENV + echo "SUSFS_BRANCH_NAME=$SUSFS_BRANCH_NAME" >> $GITHUB_ENV + + echo "" + echo "✅ SUSFS checked out successfully" + echo " Branch/Ref: $SUSFS_BRANCH_NAME" + echo " Commit: $SUSFS_COMMIT_SHA" + + cd .. + echo "✅ Dependencies cloned" + echo "::endgroup::" - name: Clean Up ABI Protected Exports shell: bash @@ -342,17 +567,22 @@ runs: shell: bash run: | set -euo pipefail + echo "::group::Add BBG LSM" cd "$CONFIG/kernel_platform" echo "Adding BBG..." - wget -O- https://github.com/vc-teahouse/Baseband-guard/raw/main/setup.sh | bash + if ! wget -O- https://github.com/vc-teahouse/Baseband-guard/raw/main/setup.sh | bash; then + echo "::warning::BBG setup script failed, continuing anyway" + fi echo "CONFIG_BBG=y" >> common/arch/arm64/configs/gki_defconfig sed -i '/^config LSM$/,/^help$/{ /^[[:space:]]*default/ { /baseband_guard/! s/lockdown/lockdown,baseband_guard/ } }' common/security/Kconfig - + echo "✅ BBG LSM added" + echo "::endgroup::" - name: Add KernelSU Next shell: bash run: | set -euo pipefail + echo "::group::Add KernelSU Next" cd "$CONFIG/kernel_platform" echo "Adding KernelSU Next..." if [ "${{ inputs.ksun_branch }}" = "stable" ]; then @@ -364,12 +594,14 @@ runs: cd KernelSU-Next KSUN_COMMIT_SHA=$(git rev-parse HEAD) echo "KSUN_COMMIT_SHA=$KSUN_COMMIT_SHA" >> $GITHUB_ENV - cd .. + echo "✅ KernelSU Next added (commit: ${KSUN_COMMIT_SHA:0:8})" + echo "::endgroup::" - name: Apply SUSFS Patches shell: bash run: | set -euo pipefail + echo "::group::Apply SUSFS patches" cd "$CONFIG/kernel_platform" echo "Applying SUSFS patches..." cp ../../susfs4ksu/kernel_patches/50_add_susfs_in_${{ env.SUSFS_KERNEL_BRANCH }}.patch ./common/ @@ -412,7 +644,7 @@ runs: patch -p1 --forward < "../../../kernel_patches/next/susfs_fix_patches/$susfs_version/fix_kernel_compat.c.patch" ;; *) - echo "Invalid version: $susfs_version" + echo "::error::Unsupported SUSFS version: $susfs_version" exit 1 ;; esac @@ -500,22 +732,26 @@ runs: rm -rf ext.c ext.h build_policy.c slim.h slim_sysctl.c patch -p1 -F 3 < "../../../../../kernel_patches/oneplus/hmbird/hmbird_files_patch.patch" else - echo "Not OP-ACE-5-PRO / OP13-PJZ / OP13-CPH , skipping fengchi patch" - fi + echo "Hmbird not enabled, skipping fengchi patch" + fi + echo "✅ SUSFS patches applied" + echo "::endgroup::" - name: Apply KSUN Hooks shell: bash run: | set -euo pipefail + echo "::group::Apply KSUN hooks" cd "$CONFIG/kernel_platform/common" patch -p1 < ../../../kernel_patches/next/scope_min_manual_hooks_v1.4.patch + echo "✅ KSUN hooks applied" + echo "::endgroup::" - name: Add KernelSU-Next and SUSFS Configuration Settings shell: bash run: | set -euo pipefail cd "$CONFIG/kernel_platform" - # Remove indentation to avoid leading spaces cat >> common/arch/arm64/configs/gki_defconfig <> "$GITHUB_ENV" - - name: Detect Clang (toolchain for build) - shell: bash - run: | - set -euo pipefail - KP="$GITHUB_WORKSPACE/$CONFIG/kernel_platform" - CLANG_FOUND=false - for base in "$KP/prebuilts" "$KP/prebuilts-master"; do - [ -d "$base/clang/host/linux-x86" ] || continue - latest=$(ls -d "$base"/clang/host/linux-x86/clang-r*/ 2>/dev/null | sort -V | tail -n1 || true) - if [ -n "$latest" ] && [ -x "$latest/bin/clang" ]; then - CLANG_BIN="$latest/bin" - CLANG_FOUND=true - fi - done - if ! $CLANG_FOUND && command -v clang >/dev/null 2>&1; then - CLANG_BIN="$(dirname "$(command -v clang)")" - CLANG_FOUND=true - echo "Using system clang." - fi - $CLANG_FOUND || { echo "No clang toolchain found"; exit 1; } - echo "CLANG_BIN_PATH=$CLANG_BIN" >> "$GITHUB_ENV" - CLANG_VERSION="$("$CLANG_BIN/clang" --version | head -n1)" - echo "CLANG_VERSION=$CLANG_VERSION" >> "$GITHUB_ENV" - echo "Detected Clang: $CLANG_VERSION" - - name: Build Kernel shell: bash env: PYTHONWARNINGS: "ignore:invalid escape sequence" run: | set -euo pipefail - echo "::group::Build kernel" + echo "::group::Build kernel with ccache optimization" + KERNEL_PATH="$GITHUB_WORKSPACE/$CONFIG/kernel_platform" COMMON="$KERNEL_PATH/common" cd "$COMMON" : > "$COMMON/.scmversion" - - # Ensure Python warnings are suppressed for scripts invoked by make + export PYTHONWARNINGS="${PYTHONWARNINGS}" - - if [ -n "${CLANG_BIN_PATH:-}" ] && [ -x "${CLANG_BIN_PATH}/clang" ]; then - export PATH="${CLANG_BIN_PATH}:$PATH" + + echo "Getting last commit timestamp from kernel source..." + COMMIT_TIMESTAMP=$(git log -1 --format=%ct 2>/dev/null || echo "$(date +%s)") + COMMIT_DATE=$(git log -1 --format=%cd --date=format:'%Y-%m-%d %H:%M:%S' 2>/dev/null || date -u '+%Y-%m-%d %H:%M:%S') + COMMIT_HASH=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown") + + export SOURCE_DATE_EPOCH=$COMMIT_TIMESTAMP + export KBUILD_BUILD_TIMESTAMP="$COMMIT_DATE UTC" + export KBUILD_BUILD_USER="${BUILD_USER:-builder}" + export KBUILD_BUILD_HOST="${BUILD_HOST:-github-actions}" + export KBUILD_BUILD_VERSION=1 + + echo "📅 Build timestamp: $KBUILD_BUILD_TIMESTAMP (from commit $COMMIT_HASH)" + echo "🔢 SOURCE_DATE_EPOCH: $SOURCE_DATE_EPOCH" + echo "👤 Build user: $KBUILD_BUILD_USER" + echo "🖥️ Build host: $KBUILD_BUILD_HOST" + + export PATH="/usr/lib/ccache:${CLANG_BIN_PATH}:${PATH}" + + if [ "${{ inputs.clean }}" = "true" ]; then + echo "🧹 Clean build mode - ccache disabled" + export CCACHE_DISABLE=1 + else + echo "🚀 ccache-accelerated build" fi + + WORKSPACE="${GITHUB_WORKSPACE}" + MAP="-fdebug-prefix-map=${WORKSPACE}=." + MPMAP="-fmacro-prefix-map=${WORKSPACE}=." + FPMAP="-ffile-prefix-map=${WORKSPACE}=." + + export KCFLAGS="${KCFLAGS:-} $MAP $MPMAP $FPMAP -no-canonical-prefixes -fdiagnostics-color=never -Qunused-arguments -Wno-unused-command-line-argument" + export KCPPFLAGS="${KCPPFLAGS:-} $MAP $MPMAP $FPMAP" + + export CC="clang" + export CXX="clang++" + export HOSTCC="clang" + export HOSTCXX="clang++" + export LLVM=1 LLVM_IAS=1 export ARCH=arm64 SUBARCH=arm64 export CROSS_COMPILE=aarch64-linux-android- export CROSS_COMPILE_COMPAT=arm-linux-androideabi- - export LD=ld.lld HOSTLD=ld.lld AR=llvm-ar NM=llvm-nm OBJCOPY=llvm-objcopy OBJDUMP=llvm-objdump STRIP=llvm-strip HOSTCC=clang HOSTCXX=clang++ - export CC=clang + export LD=ld.lld HOSTLD=ld.lld AR=llvm-ar NM=llvm-nm + export OBJCOPY=llvm-objcopy OBJDUMP=llvm-objdump STRIP=llvm-strip + OUT=out mkdir -p "$OUT" + + echo "============================================" + echo "🔍 Compiler & ccache Verification" + echo "============================================" + echo "PATH: ${PATH:0:200}..." + echo "which clang: $(which clang)" + echo "CC: $CC" + echo "CXX: $CXX" + echo "CCACHE_BASEDIR: ${CCACHE_BASEDIR:-not set}" + echo "" + + if [ "${{ inputs.clean }}" != "true" ]; then + echo "ccache wrapper test:" + clang --version 2>&1 | head -n1 || true + echo "" + echo "ccache configuration:" + ccache -p | grep -E "(compression|direct_mode|readonly_direct|depend|sloppiness)" || true + echo "" + echo "ccache pre-build status:" + ccache -s | head -n 20 + else + echo "⚠️ Clean build - ccache bypassed" + fi + echo "============================================" + make O="$OUT" gki_defconfig - - # LOCALVERSION branding + if [ -n "${CUSTOM_LOCALVERSION:-}" ]; then scripts/config --file "$OUT/.config" --set-str LOCALVERSION "${CUSTOM_LOCALVERSION}" scripts/config --file "$OUT/.config" -d LOCALVERSION_AUTO || true sed -i 's/scm_version="$(scm_version --short)"/scm_version=""/' scripts/setlocalversion fi - - # Optimize level config and flags + if [ "${{ inputs.optimize_level }}" = "O3" ]; then scripts/config --file "$OUT/.config" -d CC_OPTIMIZE_FOR_PERFORMANCE scripts/config --file "$OUT/.config" -e CC_OPTIMIZE_FOR_PERFORMANCE_O3 @@ -697,25 +967,140 @@ runs: scripts/config --file "$OUT/.config" -d CC_OPTIMIZE_FOR_PERFORMANCE_O3 KCFLAGS_EXTRA="-O2" fi - - # Consistent flags; include -pipe and disable stack protector - KCFLAGS="-Wno-error -pipe -fno-stack-protector ${KCFLAGS_EXTRA}" - KCPPFLAGS="-DCONFIG_OPTIMIZE_INLINING" - - # Regenerate defaults after config edits + + scripts/config --file "$OUT/.config" -e LTO_CLANG_THIN + scripts/config --file "$OUT/.config" -e LTO_CLANG make O="$OUT" olddefconfig - - echo "Starting build with $(nproc --all) threads..." + + KCFLAGS="$KCFLAGS -Wno-error -pipe -fno-stack-protector ${KCFLAGS_EXTRA}" + KCPPFLAGS="$KCPPFLAGS -DCONFIG_OPTIMIZE_INLINING" + + echo "============================================" + echo "🔧 Build Configuration" + echo "============================================" + echo "Device: ${OP_MODEL}" + echo "Kernel: ${KERNEL_FULL_VER}" + echo "Threads: $(nproc --all)" + echo "Optimization: ${{ inputs.optimize_level }}" + echo "KCFLAGS: ${KCFLAGS:0:150}..." + echo "KCPPFLAGS: ${KCPPFLAGS:0:150}..." + echo "Timestamp: $KBUILD_BUILD_TIMESTAMP" + echo "Workspace: $WORKSPACE" + + if [ "${{ inputs.clean }}" != "true" ]; then + echo "ccache: ENABLED" + echo " Dir: $CCACHE_DIR" + echo " Base: $CCACHE_BASEDIR" + echo " Max size: $CCACHE_MAXSIZE" + echo " Compression: zstd level 3" + echo " Features: readonly_direct, file_clone, inode_cache, depend" + else + echo "ccache: DISABLED (clean build)" + fi + echo "============================================" + + if [ "${{ inputs.clean }}" != "true" ]; then + if [ ! -d "$CCACHE_DIR" ] || [ -z "$(find "$CCACHE_DIR" -type f 2>/dev/null | head -n1)" ]; then + echo "🔥 Priming cache with header preprocessing..." + make -C "$COMMON" O="$OUT" headers_install prepare 2>&1 | head -n 20 || true + echo "" + fi + fi + + BUILD_START=$(date +%s) set -o pipefail + make -j"$(nproc --all)" O="$OUT" KCFLAGS="$KCFLAGS" KCPPFLAGS="$KCPPFLAGS" 2>&1 | tee build.log - + + BUILD_END=$(date +%s) + BUILD_TIME=$((BUILD_END - BUILD_START)) + + echo "BUILD_START=$BUILD_START" >> "$GITHUB_ENV" + echo "BUILD_END=$BUILD_END" >> "$GITHUB_ENV" + echo "BUILD_TIME=$BUILD_TIME" >> "$GITHUB_ENV" + IMG="$OUT/arch/arm64/boot/Image" if [ ! -f "$IMG" ]; then - echo "Kernel Image missing" + echo "::error::Kernel Image missing" + echo "::group::Last 100 lines of build log" + tail -n 100 build.log || echo "Build log not available" + echo "::endgroup::" exit 1 fi - sha256sum "$IMG" | tee "$OUT/Image.sha256" - echo "::endgroup::" + + IMG_SIZE=$(stat -c%s "$IMG") + + echo "============================================" + echo "✅ Build Completed Successfully" + echo "============================================" + echo "Duration: $((BUILD_TIME / 60))m $((BUILD_TIME % 60))s" + echo "Image: $(numfmt --to=iec-i --suffix=B "$IMG_SIZE")" + + if [ "${{ inputs.clean }}" != "true" ]; then + echo "" + echo "📊 ccache Post-Build Statistics" + echo "============================================" + ccache -s + fi + echo "============================================" + echo "::endgroup::" + + - name: 📊 ccache Post-Build Statistics + id: ccache_stats + if: ${{ always() && inputs.clean != 'true' }} + shell: bash + run: | + set -euo pipefail + echo "::group::ccache final statistics" + + export CCACHE_DIR="${{ env.CCACHE_DIR }}" + + echo "ccache Statistics" + ccache -s + + STATS="$(ccache -s)" + + CACHEABLE=$(echo "$STATS" | awk '/Cacheable calls:/ { + match($0, /[0-9]+/); + print substr($0, RSTART, RLENGTH) + }' | head -n1) + + HITS=$(echo "$STATS" | awk '/^[[:space:]]*Hits:/ { + match($0, /[0-9]+/); + print substr($0, RSTART, RLENGTH) + }' | head -n1) + + DIRECT=$(echo "$STATS" | awk '/Direct:/ { + match($0, /[0-9]+/); + print substr($0, RSTART, RLENGTH) + }' | head -n1) + + if [ "${CACHEABLE:-0}" -gt 0 ]; then + HIT_RATE=$(awk -v h="${HITS:-0 + }" -v c="$CACHEABLE" 'BEGIN{printf "%.1f", (h/c)*100}') + DIRECT_RATE=$(awk -v d="${DIRECT:-0}" -v c="$CACHEABLE" 'BEGIN{printf "%.1f", (d/c)*100}') + else + HIT_RATE="0.0" + DIRECT_RATE="0.0" + fi + + echo "hit_rate=${HIT_RATE}%" >> "$GITHUB_OUTPUT" + echo "direct_rate=${DIRECT_RATE}%" >> "$GITHUB_OUTPUT" + + echo "::endgroup::" + + echo "## 📊 ccache Statistics" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + ccache -s >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Save ccache + if: ${{ success() && inputs.clean != 'true' }} + uses: actions/cache/save@v4 + with: + path: ${{ env.CCACHE_DIR }} + key: ccache-ksun-v3-${{ env.ANDROID_VER }}-${{ env.KERNEL_VER }}-${{ env.OP_OS_VERSION }}-${{ env.CLANG_VERSION_SHORT }}-${{ env.OP_MODEL }}-${{ github.run_id }} - name: Collect Build Stats / Validate Image id: collect_stats @@ -726,66 +1111,112 @@ runs: COMMON="$KERNEL_PATH/common" OUT="$COMMON/out" IMG="$OUT/arch/arm64/boot/Image" - WARNINGS_COUNT=$(grep -i -E 'warning:' "$COMMON/build.log" | wc -l || true) - echo "$WARNINGS_COUNT" > "$OUT/warnings.txt" - file "$IMG" > "$OUT/Image.file" - KERNEL_UNAME=$(strings "$IMG" | grep -E 'Linux version.*#' | tail -n1) - echo "Kernel Uname: $KERNEL_UNAME" - echo "KERNEL_UNAME=$KERNEL_UNAME" >> "$GITHUB_ENV" + + if [ -f "$COMMON/build.log" ]; then + WARNINGS_COUNT="$(grep -ciE '\bwarning:' "$COMMON/build.log" || true)" + [ -n "$WARNINGS_COUNT" ] || WARNINGS_COUNT="0" + else + WARNINGS_COUNT="0" + fi + + if [ ! -f "$IMG" ]; then + echo "::error::Kernel Image not found at: $IMG" + exit 1 + fi + if ! file "$IMG" | grep -qi 'ARM64'; then - echo "Image does not appear to be ARM64:" + echo "::error::Image is not ARM64 format" file "$IMG" exit 1 fi - MIN_SIZE=$((6*1024*1024)) - ACTUAL_SIZE=$(stat -c %s "$IMG") - if [ "$ACTUAL_SIZE" -lt "$MIN_SIZE" ]; then - echo "Image size $ACTUAL_SIZE < $MIN_SIZE (suspicious)"; exit 1 + + IMG_SIZE=$(stat -c %s "$IMG") + MIN_SIZE=6102400 + if [ "$IMG_SIZE" -lt "$MIN_SIZE" ]; then + echo "::error::Image size too small: $(numfmt --to=iec-i --suffix=B "$IMG_SIZE")" + exit 1 fi - IMAGE_SHA256=$(cut -d' ' -f1 "$OUT/Image.sha256") - echo "Image sha256: $IMAGE_SHA256" - echo "warnings_count=$WARNINGS_COUNT" >> "$GITHUB_OUTPUT" - echo "image_sha256=$IMAGE_SHA256" >> "$GITHUB_OUTPUT" + + IMG_SHA256=$(sha256sum "$IMG" | awk '{print $1}') + BUILD_TIME="${BUILD_TIME:-0}" + + KERNEL_UNAME=$(strings "$IMG" | grep -E 'Linux version.*#' | tail -n1) + echo "Kernel Uname: $KERNEL_UNAME" + echo "KERNEL_UNAME=$KERNEL_UNAME" >> "$GITHUB_ENV" + + printf 'warnings_count=%s\n' "$WARNINGS_COUNT" >> "$GITHUB_OUTPUT" + printf 'image_sha256=%s\n' "$IMG_SHA256" >> "$GITHUB_OUTPUT" + printf 'build_time=%s\n' "$BUILD_TIME" >> "$GITHUB_OUTPUT" + + echo "✅ Validation passed:" + echo " - Warnings: $WARNINGS_COUNT" + echo " - Image size: $(numfmt --to=iec-i --suffix=B "$IMG_SIZE")" + echo " - SHA256: $IMG_SHA256" + echo " - Build time: ${BUILD_TIME}s" - name: Create Kernel ZIP id: create_zip shell: bash run: | set -euo pipefail + echo "::group::Create kernel ZIP package" + CONFIG_DIR="$GITHUB_WORKSPACE/$CONFIG" IMAGE_PATH="$CONFIG_DIR/kernel_platform/common/out/arch/arm64/boot/Image" + if [ ! -f "$IMAGE_PATH" ]; then - echo "ERROR: Built Image not found" + echo "::error::Built Image not found at: $IMAGE_PATH" exit 1 fi - - # Put Image into AnyKernel3 + cp "$IMAGE_PATH" "$GITHUB_WORKSPACE/AnyKernel3/Image" cd "$GITHUB_WORKSPACE/AnyKernel3" - - # Optional hmbird patch logic + if [ "$OP_HMBIRD" = true ]; then cp "$GITHUB_WORKSPACE/kernel_patches/oneplus/hmbird/bins/"* ./tools/ 2>/dev/null || true patch -F 3 < "$GITHUB_WORKSPACE/kernel_patches/oneplus/hmbird/ak3_hmbird_patch.patch" fi - + ZIP_NAME="AK3_${OP_MODEL}_${OP_OS_VERSION}_${KERNEL_FULL_VER}_Next_${KSUVER}_${SUSVER}.zip" ARTIFACTS_DIR="$CONFIG_DIR/artifacts" mkdir -p "$ARTIFACTS_DIR" - - echo "Creating flashable zip: $ZIP_NAME" - ( cd "$GITHUB_WORKSPACE/AnyKernel3" && zip -r "$ARTIFACTS_DIR/$ZIP_NAME" ./* >/dev/null ) - - # Keep only the flashable zip and the model metadata file (assumed already created earlier) + + echo "Creating flashable ZIP: $ZIP_NAME" + + if ! zip -r9 "$ZIP_NAME" ./* \ + -x "*.git*" \ + -x "$ZIP_NAME" \ + -x "README.md" 2>&1 | tee /tmp/zip.log; then + echo "::error::Failed to create ZIP package" + exit 1 + fi + + mv "$ZIP_NAME" "$ARTIFACTS_DIR/" + find "$ARTIFACTS_DIR" -maxdepth 1 -type f ! -name "$ZIP_NAME" ! -name "${OP_MODEL}_${OP_OS_VERSION}.txt" -delete - - # Output for later steps (optional) + + ZIP_SIZE=$(stat -c%s "$ARTIFACTS_DIR/$ZIP_NAME") + ZIP_SHA256=$(sha256sum "$ARTIFACTS_DIR/$ZIP_NAME" | awk '{print $1}') + echo "zip_name=$ZIP_NAME" >> "$GITHUB_OUTPUT" + echo "ZIP_NAME=$ZIP_NAME" >> "$GITHUB_ENV" + + echo "" + echo "✅ Kernel ZIP created successfully" + echo " Name: $ZIP_NAME" + echo " Size: $(numfmt --to=iec-i --suffix=B "$ZIP_SIZE")" + echo " SHA256: $ZIP_SHA256" + echo "::endgroup::" - name: Final Build Summary shell: bash run: | set -euo pipefail + + # Get commit SHAs with fallback + KSUN_SHA="${KSUN_COMMIT_SHA:-unknown}" + SUSFS_SHA="${SUSFS_COMMIT_SHA:-unknown}" + { echo "Model: ${{ env.OP_MODEL }}" echo "OS Version: ${{ env.OP_OS_VERSION }}" @@ -793,29 +1224,91 @@ runs: echo "Kernel base: ${{ env.KERNEL_VER }}" echo "Kernel full: ${{ env.KERNEL_FULL_VER }}" echo "Kernel Uname: ${{ env.KERNEL_UNAME }}" + echo "Build User: ${{ env.BUILD_USER }}" + echo "Build Host: ${{ env.BUILD_HOST }}" echo "KSUN Version: ${KSUVER:-unknown}" - echo "KSUN commit SHA: ${{ env.KSUN_COMMIT_SHA }}" + echo "KSUN commit SHA: $KSUN_SHA" echo "SUSFS Version: ${SUSVER:-unknown}" - echo "SUSFS commit SHA: ${{ env.SUSFS_COMMIT_SHA }}" + echo "SUSFS Branch: ${{ env.SUSFS_BRANCH_NAME }}" + echo "SUSFS commit SHA: $SUSFS_SHA" echo "Optimization: ${{ inputs.optimize_level }}" + echo "Clean Build: ${{ inputs.clean }}" + echo "" + echo "=== OnePlus Features ===" + echo "BBR: ${{ env.OP_BBR }}" + echo "BBG: ${{ env.OP_BBG }}" + echo "Hmbird: ${{ env.OP_HMBIRD }}" + echo "TTL: ${{ env.OP_TTL }}" + echo "IP_SET: ${{ env.OP_IP_SET }}" + echo "" echo "Image SHA256: ${{ steps.collect_stats.outputs.image_sha256 }}" echo "Compiler: ${CLANG_VERSION:-unknown}" echo "Warnings Count: ${{ steps.collect_stats.outputs.warnings_count }}" + echo "Build Time: ${{ steps.collect_stats.outputs.build_time }}s" + if [ "${{ inputs.clean }}" != "true" ]; then + echo "ccache Hit Rate: ${{ steps.ccache_stats.outputs.hit_rate }}" + echo "ccache Direct Rate: ${{ steps.ccache_stats.outputs.direct_rate }}" + else + echo "ccache: Disabled (clean build)" + fi } | tee summary.txt + { - echo "### Kernel Build Summary" + echo "### 🎯 Kernel Build Summary" + echo "" + echo "| Metric | Value |" + echo "|--------|-------|" + echo "| **Model** | ${{ env.OP_MODEL }} |" + echo "| **OS Version** | ${{ env.OP_OS_VERSION }} |" + echo "| **Kernel Version** | ${{ steps.save_metadata.outputs.kernel_version }} |" + echo "| **Kernel Uname** | \`${{ env.KERNEL_UNAME }}\` |" + echo "| **Build User** | ${{ env.BUILD_USER }} |" + echo "| **Build Host** | ${{ env.BUILD_HOST }} |" + echo "| **KSUN Version** | ${KSUVER:-unknown} |" + + if [ "$KSUN_SHA" != "unknown" ]; then + echo "| **KSUN Commit** | [\`${KSUN_SHA:0:8}\`](https://github.com/KernelSU-Next/KernelSU-Next/commit/$KSUN_SHA) |" + else + echo "| **KSUN Commit** | unknown |" + fi + + echo "| **SUSFS Version** | ${SUSVER:-unknown} |" + echo "| **SUSFS Branch** | \`${{ env.SUSFS_BRANCH_NAME }}\` |" + + if [ "$SUSFS_SHA" != "unknown" ]; then + echo "| **SUSFS Commit** | [\`${SUSFS_SHA:0:8}\`](https://gitlab.com/simonpunk/susfs4ksu/-/commit/$SUSFS_SHA) |" + else + echo "| **SUSFS Commit** | unknown |" + fi + echo "" - echo "- Model: ${{ env.OP_MODEL }}" - echo "- OS Version: ${{ env.OP_OS_VERSION }}" - echo "- Kernel Version: ${{ steps.save_metadata.outputs.kernel_version }}" - echo "- Kernel Uname: ${{ env.KERNEL_UNAME }}" - echo "- KSUN Version: ${KSUVER:-unknown}" - echo "- KSUN commit SHA: [${{ env.KSUN_COMMIT_SHA }}](https://github.com/KernelSU-Next/KernelSU-Next/commit/${{ env.KSUN_COMMIT_SHA }})" - echo "- SUSFS Version: ${SUSVER:-unknown}" - echo "- SUSFS commit SHA: [${{ env.SUSFS_COMMIT_SHA }}](https://gitlab.com/simonpunk/susfs4ksu/-/commit/${{ env.SUSFS_COMMIT_SHA }})" - echo "- Optimization: ${{ inputs.optimize_level }}" - echo "- Image SHA256: ${{ steps.collect_stats.outputs.image_sha256 }}" - echo "- Warnings Count: ${{ steps.collect_stats.outputs.warnings_count }}" + echo "### 🔧 OnePlus Features" + echo "" + echo "| Feature | Status |" + echo "|---------|--------|" + echo "| **BBR** | ${{ env.OP_BBR }} |" + echo "| **BBG** | ${{ env.OP_BBG }} |" + echo "| **Hmbird** | ${{ env.OP_HMBIRD }} |" + echo "| **TTL** | ${{ env.OP_TTL }} |" + echo "| **IP_SET** | ${{ env.OP_IP_SET }} |" + + echo "" + echo "### 📊 Build Metrics" + echo "" + echo "| Metric | Value |" + echo "|--------|-------|" + echo "| **Optimization** | ${{ inputs.optimize_level }} |" + echo "| **Clean Build** | ${{ inputs.clean }} |" + echo "| **Build Time** | ${{ steps.collect_stats.outputs.build_time }}s |" + + if [ "${{ inputs.clean }}" != "true" ]; then + echo "| **ccache Hit Rate** | ${{ steps.ccache_stats.outputs.hit_rate }} |" + echo "| **ccache Direct Rate** | ${{ steps.ccache_stats.outputs.direct_rate }} |" + fi + + echo "| **Image SHA256** | \`${{ steps.collect_stats.outputs.image_sha256 }}\` |" + echo "| **Warnings** | ${{ steps.collect_stats.outputs.warnings_count }} |" + echo "| **Compiler** | \`${CLANG_VERSION:-unknown}\` |" } >> "$GITHUB_STEP_SUMMARY" - name: Upload Artifacts @@ -824,3 +1317,5 @@ runs: with: name: kernel-${{ env.CONFIG }}_${{ env.OP_OS_VERSION }} path: ${{ env.CONFIG }}/artifacts/ + compression-level: 0 + retention-days: 20 diff --git a/.github/workflows/build-kernel-release.yml b/.github/workflows/build-kernel-release.yml index 65d5f3e3..2a1e9d6b 100644 --- a/.github/workflows/build-kernel-release.yml +++ b/.github/workflows/build-kernel-release.yml @@ -39,22 +39,18 @@ on: type: choice options: [O2, O3] default: O2 + clean_build: + description: 'Clean build (no ccache)' + type: boolean + default: false android12-5_10_susfs_branch_or_commit: description: 'Enter SusFS Branch or commit hash for android12-5.10' type: string - default: '' - android13-5_10_susfs_branch_or_commit: - description: 'Enter SusFS Branch or commit hash for android13-5.10' - type: string - default: '' + default: '' android13-5_15_susfs_branch_or_commit: description: 'Enter SusFS Branch or commit hash for android13-5.15' type: string - default: '' - android14-5_15_susfs_branch_or_commit: - description: 'Enter SusFS Branch or commit hash for android14-5.15' - type: string - default: '' + default: '' android14-6_1_susfs_branch_or_commit: description: 'Enter SusFS Branch or commit hash for android14-6.1' type: string @@ -69,20 +65,22 @@ jobs: runs-on: ubuntu-latest outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} + device_count: ${{ steps.set-matrix.outputs.count }} steps: - - name: Checkout Code (to access configs/) + - name: 📥 Checkout Code (to access configs/) uses: actions/checkout@v4 with: sparse-checkout: | configs/ sparse-checkout-cone-mode: false - - name: Setup OnePlus Model + - name: 🔍 Generate build matrix id: set-matrix shell: bash run: | set -euo pipefail - FILTERED_FOR_ANDROID=false + echo "::group::Matrix generation" + input="${{ github.event.inputs.op_model }}" echo "[" > matrix.json @@ -119,28 +117,89 @@ jobs: jq_filter="map(select(.os_version == \"OOS14\"))" ;; android*-*.*) + # Extract android version and kernel version IFS='-' read -r av kv <<< "$input" + + # Build android*-* only for OOS15 and OOS16 jq_filter="map(select(.os_version == \"OOS15\" or .os_version == \"OOS16\")) | map(select(.android_version == \"$av\" and .kernel_version == \"$kv\"))" - FILTERED_FOR_ANDROID=true + + echo "ℹ️ Android-Kernel filter applied: $av-$kv" + echo " Restricted to: OOS15 and OOS16 only" ;; *) - echo "Warning: Unknown input '$input'. Using empty filter." >&2 + echo "::warning::Unknown input '$input'. Using empty filter." jq_filter="map(select(false))" ;; esac filtered=$(jq -c "$jq_filter" matrix.json) + count=$(jq 'length' <<<"$filtered") - if ! jq -e '. | length > 0' <<< "$filtered" >/dev/null 2>&1; then - echo "Error: No config files found for input '$input' after applying filters!" >&2 - exit 1 + if [ "$count" -eq 0 ]; then + echo "::error::No config files found for input '$input' after applying filters!" + echo "" + echo "Available configurations:" + jq -r '.[] | " - \(.model) (\(.os_version), \(.android_version)-\(.kernel_version))"' matrix.json + exit 1 fi wrapped=$(jq -n --argjson items "$filtered" '{ include: $items }') + echo "✅ Found $count device(s) to build" + echo "" + echo "Selected devices:" + jq -r '.[] | " - \(.model) (\(.os_version), \(.android_version)-\(.kernel_version))"' <<<"$filtered" + echo "matrix<> "$GITHUB_OUTPUT" echo "$wrapped" >> "$GITHUB_OUTPUT" echo "MATRIX_EOF" >> "$GITHUB_OUTPUT" + echo "count=$count" >> "$GITHUB_OUTPUT" + + echo "::endgroup::" + + - name: 📊 Build plan summary + run: | + cat >> $GITHUB_STEP_SUMMARY << 'EOF' + ## 🎯 Build Plan + + **Target:** `${{ inputs.op_model }}` + **Devices:** ${{ steps.set-matrix.outputs.count }} + + **Configuration:** + - KSU Branch: `${{ inputs.ksun_branch }}` + - Optimization: `${{ inputs.optimize_level }}` + - Clean Build/No Ccache: ${{ inputs.clean_build && '✅ Yes' || '❌ No' }} + - Create Release: ${{ inputs.make_release && '✅ Yes' || '❌ No' }} + + **SUSFS Configuration:** + EOF + + # Display SUSFS config for each kernel version + declare -A susfs_inputs=( + ["android12-5.10"]="${{ inputs.android12-5_10_susfs_branch_or_commit }}" + ["android13-5.15"]="${{ inputs.android13-5_15_susfs_branch_or_commit }}" + ["android14-6.1"]="${{ inputs.android14-6_1_susfs_branch_or_commit }}" + ["android15-6.6"]="${{ inputs.android15-6_6_susfs_branch_or_commit }}" + ) + + for key in "android12-5.10" "android13-5.15" "android14-6.1" "android15-6.6"; do + value="${susfs_inputs[$key]}" + + if [ -z "$value" ]; then + echo "- $key: 🔄 auto (\`gki-$key\`)" >> $GITHUB_STEP_SUMMARY + else + echo "- $key: 📌 \`$value\`" >> $GITHUB_STEP_SUMMARY + fi + done + + echo "" >> $GITHUB_STEP_SUMMARY + echo "> **💡 Note:** Empty values auto-map to matching \`gki-androidX-Y.Z\` branches." >> $GITHUB_STEP_SUMMARY + + # Add OOS restriction note for android-kernel filters + if [[ "${{ inputs.op_model }}" == android*-*.* ]]; then + echo "" >> $GITHUB_STEP_SUMMARY + echo "> **⚠️ Android-Kernel Filter:** Only OOS15 and OOS16 devices will be built for \`${{ inputs.op_model }}\`" >> $GITHUB_STEP_SUMMARY + fi build: name: build (${{ matrix.model }}, ${{ matrix.soc }}, ${{ matrix.branch }}, ${{ matrix.manifest }}, ${{ matrix.android_version }}, ${{ matrix.kernel_version }}, ${{ matrix.os_version }}, ${{ inputs.ksun_branch }}) @@ -150,39 +209,206 @@ jobs: fail-fast: false matrix: ${{ fromJSON(needs.set-op-model.outputs.matrix) }} steps: - - name: Resolve SUSFS branch from inputs + - name: 🧹 Emergency Disk Cleanup + run: | + echo "::group::Disk Usage Before Cleanup" + df -h + echo "::endgroup::" + + echo "::group::Removing Unnecessary Software" + sudo rm -rf /usr/share/dotnet + sudo rm -rf /usr/local/lib/android + sudo rm -rf /opt/ghc + sudo rm -rf /opt/hostedtoolcache/CodeQL + sudo apt-get clean + if command -v docker >/dev/null 2>&1; then + docker rmi $(docker images -q) 2>/dev/null || true + fi + echo "::endgroup::" + + echo "::group::Disk Usage After Cleanup" + df -h + AVAIL=$(df -h / | awk 'NR==2 {print $4}') + echo "✅ Available space: $AVAIL" + echo "::endgroup::" + + - name: 🔍 Resolve SUSFS branch from inputs id: susfs shell: bash run: | set -euo pipefail + key="${{ matrix.android_version }}-${{ matrix.kernel_version }}" - declare -A map=( - ["android12-5.10"]="${{ inputs.android12-5_10_susfs_branch_or_commit }}" - ["android13-5.10"]="${{ inputs.android13-5_10_susfs_branch_or_commit }}" - ["android13-5.15"]="${{ inputs.android13-5_15_susfs_branch_or_commit }}" - ["android14-5.15"]="${{ inputs.android14-5_15_susfs_branch_or_commit }}" - ["android14-6.1"]="${{ inputs.android14-6_1_susfs_branch_or_commit }}" - ["android15-6.6"]="${{ inputs.android15-6_6_susfs_branch_or_commit }}" - ) - # Only validate mapping presence; allow empty string value to be passed through. - if [[ -z "${map[$key]+_exists}" ]]; then - echo "Unsupported combo (no mapping): $key" >&2 - exit 1 + + echo "Looking up SUSFS branch for: $key" + + # Map kernel version to input value + case "$key" in + "android12-5.10") + susfs_branch="${{ inputs.android12-5_10_susfs_branch_or_commit }}" + ;; + "android13-5.15") + susfs_branch="${{ inputs.android13-5_15_susfs_branch_or_commit }}" + ;; + "android14-6.1") + susfs_branch="${{ inputs.android14-6_1_susfs_branch_or_commit }}" + ;; + "android15-6.6") + susfs_branch="${{ inputs.android15-6_6_susfs_branch_or_commit }}" + ;; + *) + echo "::error::Unsupported kernel version: $key" + exit 1 + ;; + esac + + # If empty, it will be auto-resolved to gki-* branch in the action + if [ -z "$susfs_branch" ]; then + echo "ℹ️ No custom SUSFS branch specified for $key" + echo " Will auto-select: gki-$key" + else + echo "✅ Using custom SUSFS branch for $key: '$susfs_branch'" fi - echo "susfs_branch=${map[$key]}" >> "$GITHUB_OUTPUT" + + echo "susfs_branch=$susfs_branch" >> "$GITHUB_OUTPUT" - - name: Checkout Code + - name: 📥 Checkout Code uses: actions/checkout@v4 with: fetch-depth: 1 - - name: Build Kernel + - name: 📦 Disk usage (pre-build) + run: | + echo "::group::Disk usage pre-build" + df -h / + du -sh "$GITHUB_WORKSPACE" 2>/dev/null || true + sudo rm -rf /tmp/* || true + echo "::endgroup::" + + - name: ♻️ Configure ccache (bounded) + if: ${{ inputs.clean_build != true }} + run: | + if command -v ccache >/dev/null 2>&1; then + echo "::group::ccache configuration" + ccache -o max_size=8.0G + ccache -o compression=true + ccache -o compression_level=3 + ccache -s + echo "::endgroup::" + fi + + - name: 🔨 Build Kernel + id: build uses: ./.github/actions with: op_config_json: ${{ toJSON(matrix) }} ksun_branch: ${{ inputs.ksun_branch }} susfs_commit_hash_or_branch: ${{ steps.susfs.outputs.susfs_branch }} optimize_level: ${{ inputs.optimize_level }} + clean: ${{ inputs.clean_build }} + + - name: 📊 Build statistics + if: always() + run: | + echo "::group::Build Statistics" + + echo "Device: ${{ matrix.model }}" + echo "OS Version: ${{ matrix.os_version }}" + echo "Kernel: ${{ matrix.android_version }}-${{ matrix.kernel_version }}" + echo "SUSFS Branch: ${{ steps.susfs.outputs.susfs_branch }}" + echo "Status: ${{ job.status }}" + + if [ "${{ steps.build.outcome }}" = "success" ]; then + echo "" + echo "✅ Build completed successfully" + echo "" + echo "Outputs:" + echo " - Kernel: ${{ steps.build.outputs.kernel_version }}" + echo " - KSU Next: v${{ steps.build.outputs.ksu_version }}" + echo " - SUSFS: ${{ steps.build.outputs.susfs_version }}" + echo " - Build time: ${{ steps.build.outputs.build_time }}s" + + if [ "${{ inputs.clean_build }}" != "true" ]; then + echo " - ccache hit rate: ${{ steps.build.outputs.ccache_hit_rate }}" + echo " - ccache direct rate: ${{ steps.build.outputs.ccache_direct_rate }}" + else + echo " - ccache: disabled (clean build)" + fi + + if [ -n "${{ steps.build.outputs.warnings }}" ]; then + echo " - Warnings: ${{ steps.build.outputs.warnings }}" + fi + else + echo "❌ Build failed" + fi + + echo "::endgroup::" + + - name: 📝 Job summary + if: always() + run: | + cat >> $GITHUB_STEP_SUMMARY << EOF + + ### ${{ matrix.model }} (${{ matrix.os_version }}) - ${{ job.status == 'success' && '✅ Success' || '❌ Failed' }} + + **Kernel:** ${{ matrix.android_version }}-${{ matrix.kernel_version }} + **SUSFS Branch:** \`${{ steps.susfs.outputs.susfs_branch || format('(auto: gki-{0}-{1})', matrix.android_version, matrix.kernel_version) }}\` + + EOF + + if [ "${{ steps.build.outcome }}" = "success" ]; then + cat >> $GITHUB_STEP_SUMMARY << EOF + | Metric | Value | + |--------|-------| + | **Kernel** | ${{ steps.build.outputs.kernel_version }} | + | **KSU Next** | v${{ steps.build.outputs.ksu_version }} | + | **SUSFS** | ${{ steps.build.outputs.susfs_version }} | + | **Build Time** | ${{ steps.build.outputs.build_time }}s | + EOF + + if [ "${{ inputs.clean_build }}" != "true" ]; then + cat >> $GITHUB_STEP_SUMMARY << EOF + | **ccache Hit Rate** | ${{ steps.build.outputs.ccache_hit_rate }} | + | **ccache Direct Rate** | ${{ steps.build.outputs.ccache_direct_rate }} | + EOF + fi + + if [ -n "${{ steps.build.outputs.warnings }}" ]; then + echo "| **Warnings** | ${{ steps.build.outputs.warnings }} |" >> $GITHUB_STEP_SUMMARY + fi + + cat >> $GITHUB_STEP_SUMMARY << EOF + + **SHA256:** \`${{ steps.build.outputs.image_sha256 }}\` + EOF + fi + + - name: 🧹 Final cleanup and space report + if: always() + run: | + echo "::group::Cleanup" + + # Remove build artifacts but PRESERVE ccache + sudo rm -rf "$GITHUB_WORKSPACE/out" || true + sudo rm -rf "$GITHUB_WORKSPACE/build" || true + sudo rm -rf "$GITHUB_WORKSPACE/kernel/out" || true + sudo rm -rf "$GITHUB_WORKSPACE/.repo" || true + sudo rm -rf /tmp/* || true + + # Show ccache stats (don't clear it!) + if command -v ccache >/dev/null 2>&1; then + echo "" + echo "📊 ccache statistics after build:" + ccache -s + echo "" + echo "💾 ccache preserved for next build" + fi + + echo "" + echo "💽 Final disk usage:" + df -h / + + echo "::endgroup::" trigger-release: needs: [build] @@ -194,12 +420,12 @@ jobs: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} RELEASE_NAME: '*TEST BUILD* OnePlus Kernels With KernelSU Next & SUSFS v1.5.12 *TEST BUILD*' steps: - - name: Checkout code + - name: 📥 Checkout code uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Generate and Create New Tag + - name: 🏷️ Generate and Create New Tag run: | BASE_TAG="v1.5.12-r0" LATEST_TAG=$(gh api repos/$REPO_OWNER/$REPO_NAME/tags --jq '.[0].name') @@ -214,70 +440,233 @@ jobs: git tag $NEW_TAG git push origin $NEW_TAG - - name: Download Artifacts + - name: 📥 Download Artifacts uses: actions/download-artifact@v4 with: path: ./downloaded-artifacts - - name: Generate Device List and Final Release Notes + - name: 📝 Generate Device List and Final Release Notes id: generate-notes run: | echo "=== Start building the release notes ===" - cat << EOF > release_notes.md - This release contains KernelSU Next and SUSFS v1.5.12 - - Module: - -> https://github.com/sidex15/ksu_module_susfs - - Official Managers: - -> https://github.com/KernelSU-Next/KernelSU-Next - Non-Official Managers: - -> https://github.com/WildKernels/Wild_KSU + # Collect build metadata + declare -A device_info + declare -A ksu_versions + declare -A susfs_commits + declare -A clang_versions - ### Built Devices - - | Model | OS Version | Kernel Version | - |-------|------------|----------------| - EOF - - # Generate table rows for file in $(find downloaded-artifacts -name "*.txt" -type f | sort); do if [ -f "$file" ]; then full_model=$(basename "$file" .txt) model=$(echo "$full_model" | sed -E 's/_[^_]*$//') os_version=$(sed -n '2p' "$file") kernel_version=$(sed -n '1p' "$file") - if [ -n "$model" ] && [ -n "$os_version" ] && [ -n "$kernel_version" ]; then - printf "| %-12s | %-10s | %-16s |\n" "$model" "$os_version" "$kernel_version" >> release_notes.md + + # Try to extract additional info from artifact directory + artifact_dir=$(dirname "$file") + if [ -f "$artifact_dir/build_info.json" ]; then + ksu_ver=$(jq -r '.ksu_version // "N/A"' "$artifact_dir/build_info.json") + susfs_commit=$(jq -r '.susfs_commit // "N/A"' "$artifact_dir/build_info.json") + clang_ver=$(jq -r '.clang_version // "N/A"' "$artifact_dir/build_info.json") else - echo "Warning: Could not read kernel version or OS version from $file. Skipping row for $model." + ksu_ver="N/A" + susfs_commit="N/A" + clang_ver="N/A" fi + + device_info["$model"]="$os_version|$kernel_version" + ksu_versions["$model"]="$ksu_ver" + susfs_commits["$model"]="$susfs_commit" + clang_versions["$model"]="$clang_ver" fi done - # Add features and finalize - cat << 'EOF' >> release_notes.md - - ### Features - - [+] KernelSU-Next / WildKSU Manager Support - - [+] SUSFS v1.5.12 - - [+] Wireguard Support - - [+] Magic Mount Support - - [+] Ptrace message leak fix for kernels < 5.16 - - [+] Manual Hooks [scope_min_manual_hooks_v1.4] - - [+] CONFIG_TMPFS_XATTR Support [Mountify Support] - - [+] BBR v1 Support - - [+] HMBIRD scx support for OnePlus 13 & OnePlus Ace 5 Pro - - [+] Baseband Guard Support (BBG). - - [+] IP Set Support. + # Get workflow inputs for reference + KSUN_BRANCH="${{ inputs.ksun_branch }}" + OPTIMIZE_LEVEL="${{ inputs.optimize_level }}" + CLEAN_BUILD="${{ inputs.clean_build }}" + + # Determine SUSFS branches used + SUSFS_A12="${{ inputs.android12-5_10_susfs_branch_or_commit }}" + SUSFS_A13="${{ inputs.android13-5_15_susfs_branch_or_commit }}" + SUSFS_A14="${{ inputs.android14-6_1_susfs_branch_or_commit }}" + SUSFS_A15="${{ inputs.android15-6_6_susfs_branch_or_commit }}" + + [ -z "$SUSFS_A12" ] && SUSFS_A12="gki-android12-5.10 (auto)" + [ -z "$SUSFS_A13" ] && SUSFS_A13="gki-android13-5.15 (auto)" + [ -z "$SUSFS_A14" ] && SUSFS_A14="gki-android14-6.1 (auto)" + [ -z "$SUSFS_A15" ] && SUSFS_A15="gki-android15-6.6 (auto)" + + cat << EOF > release_notes.md + # 🎯 OnePlus Kernels with KernelSU Next & SUSFS v1.5.12 + + > **Build Date:** $(date -u '+%Y-%m-%d %H:%M:%S UTC') + > **Build ID:** \`${{ github.run_id }}\` + > **Workflow:** [\`${{ github.workflow }}\`](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) + + --- + + ## 📦 Built Devices (${#device_info[@]} total) + + | Model | OS Version | Kernel Version | KSU Next | Features | + |-------|------------|----------------|----------|----------| + EOF + + for model in $(printf '%s\n' "${!device_info[@]}" | sort); do + IFS='|' read -r os_ver kernel_ver <<< "${device_info[$model]}" + ksu_ver="${ksu_versions[$model]}" + + # Determine features based on kernel version and config + features="" + + # Check for HMBIRD support (OP13, OPAce5Pro with android15-6.6) + if [[ "$model" =~ ^(OP13|OPAce5Pro)$ ]] && [[ "$kernel_ver" == "android15-6.6"* ]]; then + features="${features}🐦 HMBIRD " + fi + + # BBR is enabled for all + features="${features}🚀 BBR " + + # BBG is enabled for all + features="${features}🛡️ BBG " + + # TTL support for all + features="${features}⏱️ TTL " + + # IP_SET support for all + features="${features}🔧 IP_SET" + + printf "| %-13s | %-10s | %-16s | v%-8s | %-30s |\n" \ + "$model" "$os_ver" "$kernel_ver" "$ksu_ver" "$features" >> release_notes.md + done + + cat << EOF >> release_notes.md + + --- + + ## 🔧 Build Configuration + + | Component | Version/Setting | + |-----------|----------------| + | **KernelSU Next Branch** | \`$KSUN_BRANCH\` | + | **SUSFS Version** | v1.5.12 | + | **Optimization Level** | \`$OPTIMIZE_LEVEL\` | + | **Clean Build** | $( [ "$CLEAN_BUILD" = "true" ] && echo "✅ Yes (no ccache)" || echo "❌ No (ccache enabled)" ) | + | **Compiler** | Clang (version varies by device) | + + ### 📌 SUSFS Branch Mapping + + | Kernel Version | SUSFS Branch/Commit | + |----------------|---------------------| + | android12-5.10 | \`$SUSFS_A12\` | + | android13-5.15 | \`$SUSFS_A13\` | + | android14-6.1 | \`$SUSFS_A14\` | + | android15-6.6 | \`$SUSFS_A15\` | + + --- + + ## ✨ Features & Capabilities + + ### 🔐 Root Management + - **KernelSU Next** - Next-generation kernel-level root solution + - **SUSFS v1.5.12** - Advanced hiding and security features + - **Magic Mount Support** - Seamless file system modifications + - **Manual Hooks** - scope_min_manual_hooks_v1.4 for better compatibility + + ### 🛡️ Security & Privacy + - **Baseband Guard (BBG)** - LSM-based baseband security + - **SUSFS Hide Features**: + - ✅ SUS_PATH - Hide suspicious paths + - ✅ SUS_MOUNT - Hide mount points + - ✅ SUS_KSTAT - Spoof kernel statistics + - ✅ TRY_UMOUNT - Auto-unmount detection + - ✅ SPOOF_UNAME - Kernel version spoofing + - ✅ SPOOF_CMDLINE - Boot parameters spoofing + - ✅ OPEN_REDIRECT - File access redirection + - ✅ SUS_MAP - Memory mapping protection + - **Ptrace Leak Fix** - For kernels < 5.16 + + ### 🚀 Performance & Networking + - **BBR** - Improved TCP congestion control + - **Wireguard** - Built-in VPN support + - **IP Set Support** - Advanced firewall capabilities + - **TTL Target Support** - Network packet manipulation + - **LTO (Link Time Optimization)** - Optimized binary size and performance + - **ccache-accelerated builds** - Faster compilation times + + ### 🔧 System Features + - **TMPFS_XATTR** - Extended attributes for tmpfs (Mountify support) + - **TMPFS_POSIX_ACL** - POSIX ACLs for tmpfs + - **HMBIRD SCX** - OnePlus 13 & Ace 5 Pro scheduler extensions + + --- + + ## 📱 Manager Applications + + ### Official Manager + - **KernelSU Next Manager** + → [GitHub Release](https://github.com/KernelSU-Next/KernelSU-Next/releases) + + ### Community Managers + - **WildKSU Manager** (Recommended for additional features) + → [GitHub Release](https://github.com/WildKernels/Wild_KSU/releases) + + ### Required Module + - **KSU SUSFS Module** (Required for SUSFS features) + → [GitHub Release](https://github.com/sidex15/ksu_module_susfs/releases) + + --- + + ## 📥 Installation Instructions + + ### Prerequisites + - Unlocked bootloader + - Backup your current boot image + + ### Via Custom Recovery + 1. Download the appropriate ZIP for your device + 2. Flash the ZIP file using Kernel Flasher + 3. Reboot system + 4. Install KernelSU/WildSU Manager + 5. Install SUSFS module from manager + + --- + + ## 📜 Changelog + + ### This Release + - Updated SUSFS to v1.5.12 + - Added HMBIRD support for OP13/Ace5Pro + - Improved ccache build system + - Enhanced SUSFS hiding capabilities + - Added IP_SET and TTL support + - Compiler optimizations ($OPTIMIZE_LEVEL) + + ### Previous Releases + See [Releases Page](${{ github.server_url }}/${{ github.repository }}/releases) + + --- + + ## 🙏 Credits + + - **KernelSU Next Team** - Root solution + - **simonpunk** - SUSFS development + - **OnePlus** - Kernel source code + - **LineageOS Team** - Kernel patches and improvements + - **Community Contributors** - Testing and feedback + + --- + + **⚡ Built with ❤️ by the community** + + > *This is an unofficial build. Use at your own risk.* EOF - # Output for debugging echo "--- Final Release Notes ---" cat release_notes.md - - name: Create GitHub Release + - name: 🚀 Create GitHub Release run: | gh release create "${{ env.NEW_TAG }}" \ --repo "${{ env.REPO_OWNER }}/${{ env.REPO_NAME }}" \ @@ -285,7 +674,7 @@ jobs: --notes-file release_notes.md \ --prerelease - - name: Upload Release Assets Dynamically + - name: 📤 Upload Release Assets Dynamically run: | for file in ./downloaded-artifacts/*/*.zip; do if [ -f "$file" ]; then @@ -293,3 +682,27 @@ jobs: gh release upload "${{ env.NEW_TAG }}" "$file" --clobber fi done + + - name: 📊 Release summary + if: success() + run: | + cat >> $GITHUB_STEP_SUMMARY << EOF + + --- + + ## 🎉 Release Created Successfully + + **Tag:** [\`${{ env.NEW_TAG }}\`](${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ env.NEW_TAG }}) + **Kernels:** $(find ./downloaded-artifacts -name "*.zip" | wc -l) + + ### 📦 Assets + EOF + + for zip in ./downloaded-artifacts/*/*.zip; do + if [ -f "$zip" ]; then + name=$(basename "$zip") + size=$(stat -c%s "$zip") + size_mb=$(echo "scale=2; $size / 1024 / 1024" | bc) + echo "- \`$name\` (${size_mb} MB)" >> $GITHUB_STEP_SUMMARY + fi + done diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml new file mode 100644 index 00000000..fe458c86 --- /dev/null +++ b/.github/workflows/clean-up.yml @@ -0,0 +1,1165 @@ +name: Cleanup CCache 🧹 + +on: + workflow_dispatch: + inputs: + cleanup_type: + type: choice + description: 'Cleanup target' + required: true + default: 'cache_only' + options: + - cache_only # Only clean caches + - runs_only # Only clean workflow runs + - full_cleanup # Both caches and runs + - analyze_only # Just analyze, don't delete + + device_filter: + type: choice + description: 'Device cache to clean' + required: false + default: 'ALL' + options: + - ALL + # Kernel Versions + - android15-6.6 + - android14-6.1 + - android14-5.15 + - android13-5.15 + - android13-5.10 + - android12-5.10 + # OnePlus Phones + - OP13-CPH + - OP13-PJZ + - OP13r + - OP13S + - OP13T + - OP12 + - OP12r + - OP11 + - OP11r + - OP10pro + - OP10t + # OnePlus Nord Series + - OP-Nord-5 + - OP-NORD-4 + - OP-NORD-4-CE + - OP-NORD-CE4-LITE + # OnePlus Ace Series + - OP-ACE-5-PRO + - OP-ACE-5 + - OP-ACE-3-PRO + - OP-ACE-3V + - OP-ACE-2-PRO + - OP-ACE-2 + # OnePlus Tablets & Others + - OP-OPEN + - OP-PAD-3 + - OP-PAD-2-PRO + - OP-PAD-2 + - OP-PAD-PRO + + cache_pattern: + type: choice + description: 'Cache type to clean' + required: false + default: 'all_caches' + options: + - all_caches # All cache types + - ccache_only # Only ccache (kernel builds) + - ccache_stale # Only stale ccache (>14 days) + - apt_only # Only apt packages + - kernel_only # Only kernel-related caches + - old_only # Only caches older than 7 days + + cache_age_days: + description: 'Delete caches older than N days' + required: false + default: '7' + + keep_recent_runs: + description: 'Keep N most recent successful runs' + required: false + default: '3' + + days_to_keep: + description: 'Keep runs from last N days' + required: false + default: '7' + + dry_run: + description: 'Dry run (show what would be deleted)' + required: false + type: boolean + default: false + + force_cleanup: + description: 'Force cleanup even if usage is low' + required: false + type: boolean + default: false + +permissions: + actions: write + contents: read + +jobs: + analyze: + runs-on: ubuntu-latest + outputs: + should_cleanup: ${{ steps.analysis.outputs.should_cleanup }} + total_cache_size: ${{ steps.analysis.outputs.total_size }} + cache_count: ${{ steps.analysis.outputs.cache_count }} + usage_percent: ${{ steps.analysis.outputs.usage_percent }} + ccache_count: ${{ steps.analysis.outputs.ccache_count }} + ccache_size: ${{ steps.analysis.outputs.ccache_size }} + steps: + - name: 📊 Analyze Repository Health + id: analysis + uses: actions/github-script@v7 + with: + script: | + const { owner, repo } = context.repo; + const forceCleanup = '${{ inputs.force_cleanup }}' === 'true'; + + console.log('🔍 Analyzing repository cache health...\n'); + + let page = 1; + let totalCacheSize = 0; + let cacheCount = 0; + let oldestCache = null; + let newestCache = null; + + const cachesByType = { + ccache: { size: 0, count: 0 }, + apt: { size: 0, count: 0 }, + kernel: { size: 0, count: 0 }, + other: { size: 0, count: 0 } + }; + + const cachesByDevice = new Map(); + const cacheAgeDistribution = { + fresh: 0, // < 7 days + recent: 0, // 7-14 days + old: 0, // 14-30 days + stale: 0 // > 30 days + }; + + // Collect all cache data + while (true) { + const res = await github.rest.actions.getActionsCacheList({ + owner, + repo, + per_page: 100, + page: page + }); + + const caches = res.data.actions_caches; + if (!caches || caches.length === 0) break; + + for (const cache of caches) { + totalCacheSize += cache.size_in_bytes; + cacheCount++; + + const cacheDate = new Date(cache.created_at); + const ageDays = Math.floor((new Date() - cacheDate) / (1000 * 60 * 60 * 24)); + + // Age distribution + if (ageDays < 7) cacheAgeDistribution.fresh++; + else if (ageDays < 14) cacheAgeDistribution.recent++; + else if (ageDays < 30) cacheAgeDistribution.old++; + else cacheAgeDistribution.stale++; + + if (!oldestCache || cacheDate < new Date(oldestCache.created_at)) { + oldestCache = cache; + } + if (!newestCache || cacheDate > new Date(newestCache.created_at)) { + newestCache = cache; + } + + // Categorize by type + if (cache.key.startsWith('ccache-')) { + cachesByType.ccache.size += cache.size_in_bytes; + cachesByType.ccache.count++; + + // Extract device name from ccache key + const match = cache.key.match(/ccache-([^-]+)-/); + if (match) { + const device = match[1]; + if (!cachesByDevice.has(device)) { + cachesByDevice.set(device, { + size: 0, + count: 0, + lastAccessed: cache.last_accessed_at, + oldestCache: ageDays, + newestCache: ageDays + }); + } + const deviceData = cachesByDevice.get(device); + deviceData.size += cache.size_in_bytes; + deviceData.count++; + deviceData.oldestCache = Math.max(deviceData.oldestCache, ageDays); + deviceData.newestCache = Math.min(deviceData.newestCache, ageDays); + if (new Date(cache.last_accessed_at) > new Date(deviceData.lastAccessed)) { + deviceData.lastAccessed = cache.last_accessed_at; + } + } + } else if (cache.key.includes('apt')) { + cachesByType.apt.size += cache.size_in_bytes; + cachesByType.apt.count++; + } else if (cache.key.includes('kernel') || cache.key.includes('android')) { + cachesByType.kernel.size += cache.size_in_bytes; + cachesByType.kernel.count++; + } else { + cachesByType.other.size += cache.size_in_bytes; + cachesByType.other.count++; + } + } + + if (caches.length < 100) break; + page++; + } + + // Calculate metrics + const totalGB = (totalCacheSize / 1024 / 1024 / 1024).toFixed(2); + const limit = 10; // 10 GB GitHub limit + const usagePercent = ((totalCacheSize / (limit * 1024 * 1024 * 1024)) * 100).toFixed(1); + + // Determine if cleanup is needed + const shouldCleanup = forceCleanup || parseFloat(usagePercent) > 75; + + // Health status + let healthEmoji = '🟢'; + let healthStatus = 'Healthy'; + if (usagePercent > 90) { + healthEmoji = '🔴'; + healthStatus = 'Critical'; + } else if (usagePercent > 75) { + healthEmoji = '🟡'; + healthStatus = 'Warning'; + } + + // Output for next jobs + core.setOutput('should_cleanup', shouldCleanup.toString()); + core.setOutput('total_size', totalCacheSize.toString()); + core.setOutput('cache_count', cacheCount.toString()); + core.setOutput('usage_percent', usagePercent); + core.setOutput('ccache_count', cachesByType.ccache.count.toString()); + core.setOutput('ccache_size', cachesByType.ccache.size.toString()); + + // Generate detailed summary + let summary = core.summary + .addHeading(`${healthEmoji} Cache Health Analysis - ${healthStatus}`) + .addRaw(`\n### 📊 Overall Statistics\n\n`) + .addTable([ + [{data: 'Metric', header: true}, {data: 'Value', header: true}], + ['Total Caches', cacheCount.toString()], + ['Total Size', `${totalGB} GB`], + ['Limit', `${limit} GB`], + ['Usage', `${usagePercent}%`], + ['Available', `${(limit - parseFloat(totalGB)).toFixed(2)} GB`], + ['Status', healthStatus] + ]); + + // Cache breakdown by type + summary.addRaw(`\n### 🗂️ Cache Breakdown by Type\n\n`) + .addTable([ + [{data: 'Type', header: true}, {data: 'Count', header: true}, {data: 'Size (GB)', header: true}, {data: 'Percentage', header: true}], + ['ccache (Kernel Builds)', + cachesByType.ccache.count.toString(), + (cachesByType.ccache.size / 1024 / 1024 / 1024).toFixed(2), + `${((cachesByType.ccache.size / totalCacheSize) * 100).toFixed(1)}%` + ], + ['Kernel-related', + cachesByType.kernel.count.toString(), + (cachesByType.kernel.size / 1024 / 1024 / 1024).toFixed(2), + `${((cachesByType.kernel.size / totalCacheSize) * 100).toFixed(1)}%` + ], + ['APT Packages', + cachesByType.apt.count.toString(), + (cachesByType.apt.size / 1024 / 1024 / 1024).toFixed(2), + `${((cachesByType.apt.size / totalCacheSize) * 100).toFixed(1)}%` + ], + ['Other', + cachesByType.other.count.toString(), + (cachesByType.other.size / 1024 / 1024 / 1024).toFixed(2), + `${((cachesByType.other.size / totalCacheSize) * 100).toFixed(1)}%` + ] + ]); + + // ccache-specific statistics + if (cachesByType.ccache.count > 0) { + const avgCcacheSize = cachesByType.ccache.size / cachesByType.ccache.count; + const avgCcacheSizeMB = (avgCcacheSize / 1024 / 1024).toFixed(2); + const ccachePercent = ((cachesByType.ccache.size / totalCacheSize) * 100).toFixed(1); + + summary.addRaw(`\n### ⚡ ccache Statistics\n\n`) + .addTable([ + [{data: 'Metric', header: true}, {data: 'Value', header: true}], + ['Total ccache Entries', cachesByType.ccache.count.toString()], + ['Total ccache Size', `${(cachesByType.ccache.size / 1024 / 1024 / 1024).toFixed(2)} GB`], + ['Average Cache Size', `${avgCcacheSizeMB} MB`], + ['Percentage of Total', `${ccachePercent}%`], + ['Unique Devices', cachesByDevice.size.toString()] + ]); + + // ccache recommendations + if (parseFloat(ccachePercent) > 80) { + summary.addRaw(`\n⚠️ **ccache dominates storage** (${ccachePercent}%)\n`) + .addRaw(`- Consider reducing \`CCACHE_MAXSIZE\` in build workflow\n`) + .addRaw(`- Clean stale device caches not actively built\n`) + .addRaw(`- Use \`clean_build\` option occasionally to verify builds\n`); + } + } + + // Age distribution + summary.addRaw(`\n### 📅 Cache Age Distribution\n\n`) + .addTable([ + [{data: 'Age Range', header: true}, {data: 'Count', header: true}, {data: 'Percentage', header: true}], + ['Fresh (< 7 days)', cacheAgeDistribution.fresh.toString(), `${((cacheAgeDistribution.fresh / cacheCount) * 100).toFixed(1)}%`], + ['Recent (7-14 days)', cacheAgeDistribution.recent.toString(), `${((cacheAgeDistribution.recent / cacheCount) * 100).toFixed(1)}%`], + ['Old (14-30 days)', cacheAgeDistribution.old.toString(), `${((cacheAgeDistribution.old / cacheCount) * 100).toFixed(1)}%`], + ['Stale (> 30 days)', cacheAgeDistribution.stale.toString(), `${((cacheAgeDistribution.stale / cacheCount) * 100).toFixed(1)}%`] + ]); + + // Top devices by cache size + if (cachesByDevice.size > 0) { + const topDevices = Array.from(cachesByDevice.entries()) + .sort((a, b) => b[1].size - a[1].size) + .slice(0, 15); + + summary.addRaw(`\n### 📱 Top 15 Devices by Cache Size\n\n`) + .addTable([ + [{data: 'Device', header: true}, {data: 'Caches', header: true}, {data: 'Size (GB)', header: true}, {data: 'Age Range (days)', header: true}], + ...topDevices.map(([device, data]) => [ + device, + data.count.toString(), + (data.size / 1024 / 1024 / 1024).toFixed(2), + `${data.newestCache}-${data.oldestCache}` + ]) + ]); + + // Identify inactive devices + const now = new Date(); + const inactiveThreshold = 30; // days + const inactiveDevices = Array.from(cachesByDevice.entries()) + .filter(([_, data]) => { + const daysSinceAccess = Math.floor((now - new Date(data.lastAccessed)) / (1000 * 60 * 60 * 24)); + return daysSinceAccess > inactiveThreshold; + }) + .sort((a, b) => b[1].size - a[1].size); + + if (inactiveDevices.length > 0) { + summary.addRaw(`\n### 🚫 Inactive Devices (Not accessed in ${inactiveThreshold}+ days)\n\n`) + .addTable([ + [{data: 'Device', header: true}, {data: 'Size (GB)', header: true}, {data: 'Days Since Access', header: true}], + ...inactiveDevices.slice(0, 10).map(([device, data]) => [ + device, + (data.size / 1024 / 1024 / 1024).toFixed(2), + Math.floor((now - new Date(data.lastAccessed)) / (1000 * 60 * 60 * 24)).toString() + ]) + ]); + + const inactiveSizeGB = inactiveDevices.reduce((sum, [_, data]) => sum + data.size, 0) / 1024 / 1024 / 1024; + summary.addRaw(`\n💡 **Potential savings:** ${inactiveSizeGB.toFixed(2)} GB by cleaning inactive devices\n`); + } + } + + // Age information + if (oldestCache && newestCache) { + const oldestDate = new Date(oldestCache.created_at); + const newestDate = new Date(newestCache.created_at); + const daysDiff = Math.floor((newestDate - oldestDate) / (1000 * 60 * 60 * 24)); + + summary.addRaw(`\n### 📅 Cache Age Information\n\n`) + .addTable([ + [{data: 'Metric', header: true}, {data: 'Value', header: true}], + ['Oldest Cache', oldestDate.toISOString().split('T')[0]], + ['Newest Cache', newestDate.toISOString().split('T')[0]], + ['Age Range', `${daysDiff} days`] + ]); + } + + // Recommendations + summary.addRaw(`\n### 💡 Recommendations\n\n`); + + if (parseFloat(usagePercent) > 90) { + summary.addRaw(`- 🔴 **URGENT:** Cache usage is critical (${usagePercent}%)\n`) + .addRaw(`- Run cleanup immediately with \`device_filter: ALL\`\n`) + .addRaw(`- Consider cleaning old device caches\n`) + .addRaw(`- Review ccache size limits in build workflows\n`); + } else if (parseFloat(usagePercent) > 75) { + summary.addRaw(`- 🟡 **WARNING:** Cache usage is high (${usagePercent}%)\n`) + .addRaw(`- Schedule cleanup soon\n`) + .addRaw(`- Consider targeting specific devices\n`); + } else { + summary.addRaw(`- 🟢 Cache usage is healthy (${usagePercent}%)\n`) + .addRaw(`- Regular weekly cleanup recommended\n`) + .addRaw(`- No immediate action required\n`); + } + + // Stale cache recommendations + if (cacheAgeDistribution.stale > 0) { + const staleSizeEstimate = (cacheAgeDistribution.stale / cacheCount) * totalCacheSize; + const staleSizeGB = (staleSizeEstimate / 1024 / 1024 / 1024).toFixed(2); + summary.addRaw(`\n- 📦 **${cacheAgeDistribution.stale} stale caches** (>30 days old)\n`) + .addRaw(`- Estimated size: ~${staleSizeGB} GB\n`) + .addRaw(`- Run with \`cache_pattern: ccache_stale\` to clean\n`); + } + + if (shouldCleanup && !forceCleanup) { + summary.addRaw(`\n⚠️ **Automatic cleanup will proceed** (usage > 75%)\n`); + } else if (forceCleanup) { + summary.addRaw(`\n⚡ **Force cleanup enabled** - proceeding regardless of usage\n`); + } + + summary.write(); + + console.log(`\n✅ Analysis complete:`); + console.log(` - Total: ${totalGB} GB (${usagePercent}%)`); + console.log(` - Caches: ${cacheCount}`); + console.log(` - ccache: ${cachesByType.ccache.count} (${(cachesByType.ccache.size / 1024 / 1024 / 1024).toFixed(2)} GB)`); + console.log(` - Devices: ${cachesByDevice.size}`); + console.log(` - Cleanup needed: ${shouldCleanup}`); + + cleanup-caches: + runs-on: ubuntu-latest + needs: analyze + if: | + always() && + (inputs.cleanup_type == 'cache_only' || inputs.cleanup_type == 'full_cleanup') && + (inputs.cleanup_type != 'analyze_only') + + steps: + - name: 🗑️ Smart Cache Cleanup + uses: actions/github-script@v7 + with: + script: | + const { owner, repo } = context.repo; + const deviceFilter = '${{ inputs.device_filter }}'; + const cachePattern = '${{ inputs.cache_pattern }}'; + const dryRun = '${{ inputs.dry_run }}' === 'true'; + const cacheAgeDays = parseInt('${{ inputs.cache_age_days }}'); + const shouldCleanup = '${{ needs.analyze.outputs.should_cleanup }}' === 'true'; + const forceCleanup = '${{ inputs.force_cleanup }}' === 'true'; + + console.log(`🎯 Configuration:`); + console.log(` - Device Filter: ${deviceFilter}`); + console.log(` - Cache Pattern: ${cachePattern}`); + console.log(` - Age Threshold: ${cacheAgeDays} days`); + console.log(` - Dry Run: ${dryRun}`); + console.log(` - Should Cleanup: ${shouldCleanup}`); + console.log(` - Force Cleanup: ${forceCleanup}\n`); + + // Check if we should proceed + if (!shouldCleanup && !forceCleanup && !dryRun) { + console.log('ℹ️ Cache usage is healthy, skipping cleanup'); + console.log('💡 Use force_cleanup=true to cleanup anyway'); + core.summary + .addHeading('ℹ️ Cleanup Skipped') + .addRaw(`Cache usage is healthy (${${{ needs.analyze.outputs.usage_percent }}}%)\n\n`) + .addRaw('No cleanup needed at this time.\n') + .write(); + return; + } + + const cutoffDate = new Date(); + cutoffDate.setDate(cutoffDate.getDate() - cacheAgeDays); + + // For stale ccache pattern, use 14 days + const staleCutoffDate = new Date(); + staleCutoffDate.setDate(staleCutoffDate.getDate() - 14); + + let totalDeleted = 0; + let totalSize = 0; + let page = 1; + const deletedCaches = []; + const skippedCaches = []; + + // Track statistics by category + const deletionStats = { + ccache: { count: 0, size: 0 }, + apt: { count: 0, size: 0 }, + kernel: { count: 0, size: 0 }, + other: { count: 0, size: 0 } + }; + + // Helper function to check if cache should be deleted + function shouldDeleteCache(cache) { + const cacheKey = cache.key; + const cacheDate = new Date(cache.created_at); + const ageDays = Math.floor((new Date() - cacheDate) / (1000 * 60 * 60 * 24)); + + // Device filter logic + const deviceMatch = + deviceFilter === 'ALL' || + cacheKey.includes(deviceFilter) || + cacheKey.includes(`-${deviceFilter}-`) || + cacheKey.startsWith(`${deviceFilter}-`); + + if (!deviceMatch) { + return { delete: false, reason: 'device_filter' }; + } + + // Age filter for 'old_only' pattern + if (cachePattern === 'old_only' && cacheDate >= cutoffDate) { + return { delete: false, reason: 'too_new' }; + } + + // Cache pattern logic + let patternMatch = false; + let category = 'other'; + + switch (cachePattern) { + case 'ccache_only': + patternMatch = cacheKey.startsWith('ccache-'); + category = 'ccache'; + break; + + case 'ccache_stale': + patternMatch = cacheKey.startsWith('ccache-') && cacheDate < staleCutoffDate; + category = 'ccache'; + break; + + case 'apt_only': + patternMatch = cacheKey.includes('apt-cache') || cacheKey.includes('apt-'); + category = 'apt'; + break; + + case 'kernel_only': + patternMatch = cacheKey.includes('kernel-') || + cacheKey.includes('android') || + cacheKey.startsWith('ccache-'); + if (cacheKey.startsWith('ccache-')) category = 'ccache'; + else category = 'kernel'; + break; + + case 'old_only': + patternMatch = cacheDate < cutoffDate; + if (cacheKey.startsWith('ccache-')) category = 'ccache'; + else if (cacheKey.includes('apt')) category = 'apt'; + else if (cacheKey.includes('kernel')) category = 'kernel'; + break; + + case 'all_caches': + default: + patternMatch = true; + if (cacheKey.startsWith('ccache-')) category = 'ccache'; + else if (cacheKey.includes('apt')) category = 'apt'; + else if (cacheKey.includes('kernel')) category = 'kernel'; + } + + return { + delete: patternMatch, + reason: patternMatch ? 'match' : 'pattern_mismatch', + category: category + }; + } + + // Fetch and process caches + console.log('🔍 Scanning caches...\n'); + + while (true) { + const res = await github.rest.actions.getActionsCacheList({ + owner, + repo, + per_page: 100, + page: page + }); + + const caches = res.data.actions_caches; + if (!caches || caches.length === 0) break; + + for (const cache of caches) { + const decision = shouldDeleteCache(cache); + const sizeMB = (cache.size_in_bytes / 1024 / 1024).toFixed(2); + const agedays = Math.floor((new Date() - new Date(cache.created_at)) / (1000 * 60 * 60 * 24)); + + if (decision.delete) { + deletedCaches.push({ + key: cache.key, + size: sizeMB, + sizeBytes: cache.size_in_bytes, + age: agedays, + created: cache.created_at, + id: cache.id, + category: decision.category + }); + + if (dryRun) { + console.log(`🔍 Would delete: ${cache.key}`); + console.log(` Size: ${sizeMB} MB | Age: ${agedays} days | Type: ${decision.category}`); + } else { + console.log(`🗑️ Deleting: ${cache.key}`); + console.log(` Size: ${sizeMB} MB | Age: ${agedays} days | Type: ${decision.category}`); + + try { + await github.rest.actions.deleteActionsCacheById({ + owner, + repo, + cache_id: cache.id + }); + totalDeleted++; + totalSize += cache.size_in_bytes; + + // Update category stats + deletionStats[decision.category].count++; + deletionStats[decision.category].size += cache.size_in_bytes; + } catch (error) { + console.log(` ⚠️ Failed: ${error.message}`); + skippedCaches.push({ + key: cache.key, + reason: error.message + }); + } + } + } else if (decision.reason !== 'device_filter') { + // Only log non-device-filter skips in verbose mode + // console.log(`⏭️ Skipping: ${cache.key} (${decision.reason})`); + } + } + + if (caches.length < 100) break; + page++; + } + + const sizeMB = (totalSize / 1024 / 1024).toFixed(2); + const sizeGB = (totalSize / 1024 / 1024 / 1024).toFixed(2); + + console.log(`\n✅ ${dryRun ? 'Would delete' : 'Deleted'} ${totalDeleted} caches`); + console.log(`📊 Space ${dryRun ? 'would be' : ''} freed: ${sizeMB} MB (${sizeGB} GB)`); + + // Generate detailed summary + let summary = core.summary + .addHeading(`🧹 Cache Cleanup ${dryRun ? 'Preview' : 'Summary'}`) + .addTable([ + [{data: 'Metric', header: true}, {data: 'Value', header: true}], + ['Caches ' + (dryRun ? 'to Delete' : 'Deleted'), totalDeleted.toString()], + ['Space ' + (dryRun ? 'to Free' : 'Freed'), `${sizeMB} MB (${sizeGB} GB)`], + ['Device Filter', deviceFilter], + ['Cache Pattern', cachePattern], + ['Age Threshold', `${cacheAgeDays} days`] + ]); + + // Deletion breakdown by type + if (totalDeleted > 0 || dryRun) { + summary.addHeading('📊 Deletion Breakdown by Type', 3) + .addTable([ + [{data: 'Type', header: true}, {data: 'Count', header: true}, {data: 'Size (GB)', header: true}, {data: 'Percentage', header: true}], + ['ccache', + deletionStats.ccache.count.toString(), + (deletionStats.ccache.size / 1024 / 1024 / 1024).toFixed(2), + totalSize > 0 ? `${((deletionStats.ccache.size / totalSize) * 100).toFixed(1)}%` : '0%' + ], + ['Kernel-related', + deletionStats.kernel.count.toString(), + (deletionStats.kernel.size / 1024 / 1024 / 1024).toFixed(2), + totalSize > 0 ? `${((deletionStats.kernel.size / totalSize) * 100).toFixed(1)}%` : '0%' + ], + ['APT Packages', + deletionStats.apt.count.toString(), + (deletionStats.apt.size / 1024 / 1024 / 1024).toFixed(2), + totalSize > 0 ? `${((deletionStats.apt.size / totalSize) * 100).toFixed(1)}%` : '0%' + ], + ['Other', + deletionStats.other.count.toString(), + (deletionStats.other.size / 1024 / 1024 / 1024).toFixed(2), + totalSize > 0 ? `${((deletionStats.other.size / totalSize) * 100).toFixed(1)}%` : '0%' + ] + ]); + } + + // Add top 20 largest caches + if (deletedCaches.length > 0) { + const topCaches = deletedCaches + .sort((a, b) => parseFloat(b.size) - parseFloat(a.size)) + .slice(0, 20); + + summary.addHeading('📦 Top 20 Largest Caches ' + (dryRun ? 'to Delete' : 'Deleted'), 3) + .addTable([ + [{data: 'Cache Key', header: true}, {data: 'Size (MB)', header: true}, {data: 'Age (days)', header: true}, {data: 'Type', header: true}], + ...topCaches.map(c => [c.key, c.size, c.age.toString(), c.category]) + ]); + } + + // Add oldest caches + if (deletedCaches.length > 0) { + const oldestCaches = deletedCaches + .sort((a, b) => b.age - a.age) + .slice(0, 10); + + summary.addHeading('📅 Top 10 Oldest Caches ' + (dryRun ? 'to Delete' : 'Deleted'), 3) + .addTable([ + [{data: 'Cache Key', header: true}, {data: 'Age (days)', header: true}, {data: 'Size (MB)', header: true}, {data: 'Type', header: true}], + ...oldestCaches.map(c => [c.key, c.age.toString(), c.size, c.category]) + ]); + } + + // Device breakdown for ccache deletions + if (deletionStats.ccache.count > 0) { + const deviceDeletions = new Map(); + for (const cache of deletedCaches) { + if (cache.category === 'ccache') { + const match = cache.key.match(/ccache-([^-]+)-/); + if (match) { + const device = match[1]; + if (!deviceDeletions.has(device)) { + deviceDeletions.set(device, { count: 0, size: 0 }); + } + const data = deviceDeletions.get(device); + data.count++; + data.size += cache.sizeBytes; + } + } + } + + if (deviceDeletions.size > 0) { + const topDevices = Array.from(deviceDeletions.entries()) + .sort((a, b) => b[1].size - a[1].size) + .slice(0, 10); + + summary.addHeading('📱 Top 10 Devices by Deleted ccache', 3) + .addTable([ + [{data: 'Device', header: true}, {data: 'Caches', header: true}, {data: 'Size (GB)', header: true}], + ...topDevices.map(([device, data]) => [ + device, + data.count.toString(), + (data.size / 1024 / 1024 / 1024).toFixed(2) + ]) + ]); + } + } + + // Add failures if any + if (skippedCaches.length > 0) { + summary.addHeading('⚠️ Failed Deletions', 3) + .addTable([ + [{data: 'Cache Key', header: true}, {data: 'Reason', header: true}], + ...skippedCaches.map(c => [c.key, c.reason]) + ]); + } + + summary.write(); + + cleanup-runs: + runs-on: ubuntu-latest + needs: analyze + if: | + always() && + (inputs.cleanup_type == 'runs_only' || inputs.cleanup_type == 'full_cleanup') && + (inputs.cleanup_type != 'analyze_only') + + steps: + - name: 🗑️ Clean old workflow runs + uses: actions/github-script@v7 + with: + script: | + const { owner, repo } = context.repo; + const keepRecent = parseInt('${{ inputs.keep_recent_runs }}'); + const daysToKeep = parseInt('${{ inputs.days_to_keep }}'); + const dryRun = '${{ inputs.dry_run }}' === 'true'; + const cutoffDate = new Date(); + cutoffDate.setDate(cutoffDate.getDate() - daysToKeep); + + console.log(`📅 Configuration:`); + console.log(` - Keeping runs from: ${cutoffDate.toISOString()}`); + console.log(` - Keeping ${keepRecent} most recent successful runs per workflow`); + console.log(` - Dry Run: ${dryRun}\n`); + + const workflows = await github.rest.actions.listRepoWorkflows({ + owner, + repo + }); + + let totalDeleted = 0; + let totalFailed = 0; + const workflowStats = []; + + for (const workflow of workflows.data.workflows) { + console.log(`\n📋 Processing: ${workflow.name}`); + + let page = 1; + let successfulRuns = []; + let deletedInWorkflow = 0; + let failedInWorkflow = 0; + + const runsByStatus = { + success: 0, + failure: 0, + cancelled: 0, + skipped: 0, + other: 0 + }; + + while (true) { + const runs = await github.rest.actions.listWorkflowRuns({ + owner, + repo, + workflow_id: workflow.id, + per_page: 100, + page: page + }); + + if (runs.data.workflow_runs.length === 0) break; + + for (const run of runs.data.workflow_runs) { + const runDate = new Date(run.created_at); + const ageDays = Math.floor((new Date() - runDate) / (1000 * 60 * 60 * 24)); + + // Count by status + runsByStatus[run.conclusion || 'other']++; + + // Track successful runs + if (run.conclusion === 'success') { + successfulRuns.push(run); + } + + // Determine if should delete + const isOld = runDate < cutoffDate; + const isFailed = run.conclusion === 'failure'; + const isCancelled = run.conclusion === 'cancelled'; + const isSkipped = run.conclusion === 'skipped'; + const tooManySuccessful = run.conclusion === 'success' && + successfulRuns.length > keepRecent; + + const shouldDelete = + (isFailed && isOld) || + (isCancelled && isOld) || + (isSkipped && isOld) || + (tooManySuccessful && isOld); + + if (shouldDelete) { + if (dryRun) { + console.log(`🔍 Would delete: #${run.run_number} (${run.conclusion}, ${ageDays}d old)`); + } else { + console.log(`🗑️ Deleting: #${run.run_number} (${run.conclusion}, ${ageDays}d old)`); + + try { + await github.rest.actions.deleteWorkflowRun({ + owner, + repo, + run_id: run.id + }); + deletedInWorkflow++; + totalDeleted++; + } catch (error) { + console.log(` ⚠️ Failed: ${error.message}`); + failedInWorkflow++; + totalFailed++; + } + } + } + } + + if (runs.data.workflow_runs.length < 100) break; + page++; + } + + if (deletedInWorkflow > 0 || Object.values(runsByStatus).some(v => v > 0)) { + workflowStats.push({ + name: workflow.name, + deleted: deletedInWorkflow, + failed: failedInWorkflow, + stats: runsByStatus + }); + } + } + + console.log(`\n✅ Total runs ${dryRun ? 'to delete' : 'deleted'}: ${totalDeleted}`); + if (totalFailed > 0) { + console.log(`⚠️ Failed deletions: ${totalFailed}`); + } + + // Generate summary + let summary = core.summary + .addHeading(`🧹 Workflow Runs ${dryRun ? 'Preview' : 'Summary'}`) + .addTable([ + [{data: 'Metric', header: true}, {data: 'Value', header: true}], + ['Runs ' + (dryRun ? 'to Delete' : 'Deleted'), totalDeleted.toString()], + ['Failed Deletions', totalFailed.toString()], + ['Kept Recent Successful', keepRecent.toString()], + ['Days Kept', daysToKeep.toString()] + ]); + + if (workflowStats.length > 0) { + summary.addHeading('📊 Per-Workflow Breakdown', 3) + .addTable([ + [ + {data: 'Workflow', header: true}, + {data: 'Deleted', header: true}, + {data: 'Success', header: true}, + {data: 'Failure', header: true}, + {data: 'Cancelled', header: true} + ], + ...workflowStats.map(w => [ + w.name, + w.deleted.toString(), + w.stats.success.toString(), + w.stats.failure.toString(), + w.stats.cancelled.toString() + ]) + ]); + } + + summary.write(); + + final-report: + runs-on: ubuntu-latest + needs: [analyze, cleanup-caches, cleanup-runs] + if: always() + + steps: + - name: 📊 Final Repository Health Report + uses: actions/github-script@v7 + with: + script: | + const { owner, repo } = context.repo; + const dryRun = '${{ inputs.dry_run }}' === 'true'; + + console.log('📊 Generating final health report...\n'); + + // Get current cache usage (after cleanup) + let page = 1; + let totalCacheSize = 0; + let cachesByType = { + ccache: 0, + apt: 0, + kernel: 0, + other: 0 + }; + let cacheCount = 0; + const buildFrequency = new Map(); + const recentCutoff = new Date(); + recentCutoff.setDate(recentCutoff.getDate() - 30); + + while (true) { + const res = await github.rest.actions.getActionsCacheList({ + owner, + repo, + per_page: 100, + page: page + }); + + const caches = res.data.actions_caches; + if (!caches || caches.length === 0) break; + + for (const cache of caches) { + totalCacheSize += cache.size_in_bytes; + cacheCount++; + + if (cache.key.startsWith('ccache-')) { + cachesByType.ccache += cache.size_in_bytes; + + // Track build frequency + const match = cache.key.match(/ccache-([^-]+)-/); + if (match && new Date(cache.last_accessed_at) > recentCutoff) { + const device = match[1]; + buildFrequency.set(device, (buildFrequency.get(device) || 0) + 1); + } + } else if (cache.key.includes('apt')) { + cachesByType.apt += cache.size_in_bytes; + } else if (cache.key.includes('kernel') || cache.key.includes('android')) { + cachesByType.kernel += cache.size_in_bytes; + } else { + cachesByType.other += cache.size_in_bytes; + } + } + + if (caches.length < 100) break; + page++; + } + + const totalGB = (totalCacheSize / 1024 / 1024 / 1024).toFixed(2); + const ccacheGB = (cachesByType.ccache / 1024 / 1024 / 1024).toFixed(2); + const aptGB = (cachesByType.apt / 1024 / 1024 / 1024).toFixed(2); + const kernelGB = (cachesByType.kernel / 1024 / 1024 / 1024).toFixed(2); + const otherGB = (cachesByType.other / 1024 / 1024 / 1024).toFixed(2); + + const limit = 10; // 10 GB + const usagePercent = ((totalCacheSize / (limit * 1024 * 1024 * 1024)) * 100).toFixed(1); + + // Calculate change if we have before data + const beforeSize = parseFloat('${{ needs.analyze.outputs.total_cache_size }}'); + const beforeCount = parseInt('${{ needs.analyze.outputs.cache_count }}'); + const beforePercent = parseFloat('${{ needs.analyze.outputs.usage_percent }}'); + + let changeInfo = ''; + if (beforeSize > 0 && !dryRun) { + const sizeChange = beforeSize - totalCacheSize; + const countChange = beforeCount - cacheCount; + const percentChange = beforePercent - parseFloat(usagePercent); + + const changeGB = (sizeChange / 1024 / 1024 / 1024).toFixed(2); + + changeInfo = `\n### 📈 Cleanup Impact\n\n` + + `- **Space Freed:** ${changeGB} GB\n` + + `- **Caches Removed:** ${countChange}\n` + + `- **Usage Reduced:** ${percentChange.toFixed(1)}%\n`; + } + + // Determine health status + let healthEmoji = '🟢'; + let healthStatus = 'Healthy'; + if (usagePercent > 90) { + healthEmoji = '🔴'; + healthStatus = 'Critical - Immediate Cleanup Needed!'; + } else if (usagePercent > 75) { + healthEmoji = '🟡'; + healthStatus = 'Warning - Consider Cleanup Soon'; + } + + let summary = core.summary + .addHeading(`${healthEmoji} Final Repository Health Report`) + .addRaw(dryRun ? '\n**⚠️ This was a DRY RUN - No changes were made**\n\n' : '\n') + .addTable([ + [{data: 'Cache Type', header: true}, {data: 'Size (GB)', header: true}, {data: 'Percentage', header: true}], + ['ccache (Kernel Builds)', ccacheGB, totalCacheSize > 0 ? `${((cachesByType.ccache / totalCacheSize) * 100).toFixed(1)}%` : '0%'], + ['Kernel-related', kernelGB, totalCacheSize > 0 ? `${((cachesByType.kernel / totalCacheSize) * 100).toFixed(1)}%` : '0%'], + ['APT Packages', aptGB, totalCacheSize > 0 ? `${((cachesByType.apt / totalCacheSize) * 100).toFixed(1)}%` : '0%'], + ['Other', otherGB, totalCacheSize > 0 ? `${((cachesByType.other / totalCacheSize) * 100).toFixed(1)}%` : '0%'], + ['**Total**', `**${totalGB}**`, '**100%**'] + ]) + .addRaw(`\n### 📊 Current Usage Statistics\n\n`) + .addRaw(`- **Total Caches:** ${cacheCount}\n`) + .addRaw(`- **Limit:** ${limit} GB\n`) + .addRaw(`- **Current Usage:** ${totalGB} GB (${usagePercent}%)\n`) + .addRaw(`- **Available:** ${(limit - parseFloat(totalGB)).toFixed(2)} GB\n`) + .addRaw(`- **Status:** ${healthStatus}\n`) + .addRaw(changeInfo); + + // Build activity analysis + if (buildFrequency.size > 0) { + const activeDevices = Array.from(buildFrequency.entries()) + .filter(([_, count]) => count >= 3) + .sort((a, b) => b[1] - a[1]); + + summary.addRaw(`\n### 📱 Build Activity Analysis (Last 30 Days)\n\n`) + .addRaw(`- **Active Devices:** ${activeDevices.length}\n`) + .addRaw(`- **Total Build Events:** ${Array.from(buildFrequency.values()).reduce((a, b) => a + b, 0)}\n`); + + if (activeDevices.length > 0) { + const topActive = activeDevices.slice(0, 10); + summary.addRaw(`\n**Top 10 Most Active Devices:**\n`) + .addTable([ + [{data: 'Device', header: true}, {data: 'Build Events', header: true}], + ...topActive.map(([device, count]) => [device, count.toString()]) + ]); + } + } + + summary.addRaw(`\n### 💡 Next Steps\n\n`); + + if (dryRun) { + summary.addRaw(`- ℹ️ This was a dry run - run again with \`dry_run: false\` to apply changes\n`); + } else if (parseFloat(usagePercent) > 75) { + summary.addRaw(`- ⚠️ **Action Required:** Usage still high, consider additional cleanup\n`) + .addRaw(`- Target specific devices or cache types\n`) + .addRaw(`- Review ccache size limits\n`); + } else { + summary.addRaw(`- ✅ Cache usage is now healthy\n`) + .addRaw(`- Schedule regular cleanup (weekly recommended)\n`) + .addRaw(`- Monitor usage trends\n`); + } + + summary.addRaw(`\n### 🔄 Cleanup Schedule Recommendations\n\n`) + .addRaw(`- **Daily:** If building > 10 devices regularly\n`) + .addRaw(`- **Weekly:** For moderate usage (5-10 devices)\n`) + .addRaw(`- **Monthly:** For light usage (< 5 devices)\n`) + .addRaw(`- **On-demand:** When usage exceeds 75%\n`); + + summary.write(); + + console.log(`\n✅ Final report generated`); + console.log(` - Current usage: ${totalGB} GB (${usagePercent}%)`); + console.log(` - Status: ${healthStatus}`); + + - name: 💡 Quick Action Commands + if: always() + run: | + cat >> $GITHUB_STEP_SUMMARY << 'EOF' + + --- + + ## 🚀 Quick Action Commands + + ### High Usage Cleanup (>75%) + ```bash + # Clean all old caches (>7 days) + gh workflow run cleanup.yml \ + -f cleanup_type=cache_only \ + -f cache_pattern=old_only \ + -f cache_age_days=7 + + # Clean stale ccache (>14 days) + gh workflow run cleanup.yml \ + -f cleanup_type=cache_only \ + -f cache_pattern=ccache_stale + + # Clean specific device + gh workflow run cleanup.yml \ + -f cleanup_type=cache_only \ + -f device_filter=OP13 \ + -f cache_pattern=ccache_only + ``` + + ### Regular Maintenance + ```bash + # Weekly cleanup (recommended) + gh workflow run cleanup.yml \ + -f cleanup_type=full_cleanup \ + -f cache_age_days=14 + + # Analyze only (no changes) + gh workflow run cleanup.yml \ + -f cleanup_type=analyze_only + + # Dry run preview + gh workflow run cleanup.yml \ + -f cleanup_type=cache_only \ + -f dry_run=true + ``` + + ### Emergency Cleanup + ```bash + # Force cleanup everything old + gh workflow run cleanup.yml \ + -f cleanup_type=full_cleanup \ + -f force_cleanup=true \ + -f cache_age_days=3 + + # Clean all ccache immediately + gh workflow run cleanup.yml \ + -f cleanup_type=cache_only \ + -f cache_pattern=ccache_only \ + -f force_cleanup=true + ``` + + ### Device-Specific Cleanup + ```bash + # Clean by kernel version + gh workflow run cleanup.yml \ + -f cleanup_type=cache_only \ + -f device_filter=android15-6.6 \ + -f cache_pattern=ccache_only + + # Clean specific phone model + gh workflow run cleanup.yml \ + -f cleanup_type=cache_only \ + -f device_filter=OP13 \ + -f cache_pattern=all_caches + ``` + + ### Workflow Runs Cleanup + ```bash + # Clean old workflow runs + gh workflow run cleanup.yml \ + -f cleanup_type=runs_only \ + -f days_to_keep=7 \ + -f keep_recent_runs=3 + ``` + + --- + + **💡 Tip:** Use `dry_run=true` first to preview what will be deleted! + + EOF