|
| 1 | +# Based on https://github.com/termux/termux-packages/blob/595969f581655e8cbf65182cf84bd5ffb2cf7b89/.github/workflows/packages.yml |
| 2 | +## |
| 3 | +## Copyright 2020 Termux |
| 4 | +## |
| 5 | +## Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | +## you may not use this file except in compliance with the License. |
| 7 | +## You may obtain a copy of the License at |
| 8 | +## |
| 9 | +## http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | +## |
| 11 | +## Unless required by applicable law or agreed to in writing, software |
| 12 | +## distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | +## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | +## See the License for the specific language governing permissions and |
| 15 | +## limitations under the License. |
| 16 | +## |
| 17 | + |
| 18 | +name: Packages-TUR-AVD |
| 19 | + |
| 20 | +on: |
| 21 | + push: |
| 22 | + branches: |
| 23 | + - tur-avd |
| 24 | + paths: |
| 25 | + - 'tur-avd/**' |
| 26 | + pull_request: |
| 27 | + paths: |
| 28 | + - 'tur-avd/**' |
| 29 | + workflow_dispatch: |
| 30 | + inputs: |
| 31 | + packages: |
| 32 | + description: "A space-separated names of packages selected for rebuilding" |
| 33 | + required: true |
| 34 | + |
| 35 | +jobs: |
| 36 | + build: |
| 37 | + runs-on: macos-latest |
| 38 | + strategy: |
| 39 | + matrix: |
| 40 | + target_arch: |
| 41 | + # Disabled for aarch64 due to arm64-v8a emulator cannot start. |
| 42 | + # - {"TERMUX_ARCH": "aarch64", "EMU_ARCH": "arm64-v8a"} |
| 43 | + - {"TERMUX_ARCH": "arm", "EMU_ARCH": "armeabi-v7a"} |
| 44 | + - {"TERMUX_ARCH": "i686", "EMU_ARCH": "x86"} |
| 45 | + - {"TERMUX_ARCH": "x86_64", "EMU_ARCH": "x86_64"} |
| 46 | + fail-fast: false |
| 47 | + steps: |
| 48 | + - name: Clone repository |
| 49 | + uses: actions/checkout@v3 |
| 50 | + with: |
| 51 | + fetch-depth: 1000 |
| 52 | + path: ${{ github.workspace }} |
| 53 | + - name: Install basic tools |
| 54 | + run: brew install coreutils |
| 55 | + - name: Merge repos |
| 56 | + run: ./setup-environment.sh |
| 57 | + - name: Gather build summary |
| 58 | + run: | |
| 59 | + if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then |
| 60 | + BASE_COMMIT=$(jq --raw-output .pull_request.base.sha "$GITHUB_EVENT_PATH") |
| 61 | + OLD_COMMIT=$(jq --raw-output .commits[0].id "$GITHUB_EVENT_PATH") |
| 62 | + HEAD_COMMIT=$(jq --raw-output .commits[-1].id "$GITHUB_EVENT_PATH") |
| 63 | + if [ "$BASE_COMMIT" = "null" ]; then |
| 64 | + if [ "$OLD_COMMIT" = "$HEAD_COMMIT" ]; then |
| 65 | + # Single-commit push. |
| 66 | + echo "Processing commit: ${HEAD_COMMIT}" |
| 67 | + CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${HEAD_COMMIT}") |
| 68 | + else |
| 69 | + # Multi-commit push. |
| 70 | + OLD_COMMIT="${OLD_COMMIT}~1" |
| 71 | + echo "Processing commit range: ${OLD_COMMIT}..${HEAD_COMMIT}" |
| 72 | + CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${OLD_COMMIT}" "${HEAD_COMMIT}") |
| 73 | + fi |
| 74 | + else |
| 75 | + # Pull requests. |
| 76 | + echo "Processing pull request #$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH"): ${BASE_COMMIT}..HEAD" |
| 77 | + CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${BASE_COMMIT}" "HEAD") |
| 78 | + fi |
| 79 | + fi |
| 80 | + mkdir -p ./artifacts ./debs |
| 81 | + touch ./debs/.placeholder |
| 82 | + echo "File changed: ${CHANGED_FILES}" |
| 83 | + if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then |
| 84 | + for repo_path in $(jq --raw-output 'keys | .[]' repo.json); do |
| 85 | + repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json) |
| 86 | + echo "Processing on repo: ${repo}" |
| 87 | + # Parse changed files and identify new packages and deleted packages. |
| 88 | + # Create lists of those packages that will be passed to upload job for |
| 89 | + # further processing. |
| 90 | + for file in $(echo "${CHANGED_FILES}"); do |
| 91 | + echo "File path: ${file}" |
| 92 | + if ! [[ $file == ${repo_path}/* ]]; then |
| 93 | + # This file does not belong to a package, so ignore it |
| 94 | + continue |
| 95 | + fi |
| 96 | + if [[ $file =~ ^${repo_path}/([.a-z0-9+-]*)/([.a-z0-9+-]*).subpackage.sh$ ]]; then |
| 97 | + # A subpackage was modified, check if it was deleted or just updated |
| 98 | + pkg=${BASH_REMATCH[1]} |
| 99 | + subpkg=${BASH_REMATCH[2]} |
| 100 | + if [ ! -f "${repo_path}/${pkg}/${subpkg}.subpackage.sh" ]; then |
| 101 | + echo "$subpkg" >> ./deleted_${repo}_packages.txt |
| 102 | + fi |
| 103 | + elif [[ $file =~ ^${repo_path}/([.a-z0-9+-]*)/.*$ ]]; then |
| 104 | + # package, check if it was deleted or updated |
| 105 | + pkg=${BASH_REMATCH[1]} |
| 106 | + if [ -d "${repo_path}/${pkg}" ]; then |
| 107 | + echo "$pkg" >> ./built_${repo}_packages.txt |
| 108 | + # If there are subpackages we want to create a list of those |
| 109 | + # as well |
| 110 | + for file in $(find "${repo_path}/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do |
| 111 | + echo "$(basename "${file%%.subpackage.sh}")" >> ./built_${repo}_subpackages.txt |
| 112 | + done |
| 113 | + else |
| 114 | + echo "$pkg" >> ./deleted_${repo}_packages |
| 115 | + fi |
| 116 | + fi |
| 117 | + done |
| 118 | + done |
| 119 | + else |
| 120 | + for pkg in ${{ github.event.inputs.packages }}; do |
| 121 | + repo_paths=$(jq --raw-output 'keys | .[]' repo.json) |
| 122 | + found=false |
| 123 | + for repo_path in $repo_paths; do |
| 124 | + repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json) |
| 125 | + if [ -d "${repo_path}/${pkg}" ]; then |
| 126 | + found=true |
| 127 | + echo "$pkg" >> ./built_${repo}_packages.txt |
| 128 | + for subpkg in $(find "${repo_path}/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do |
| 129 | + echo "$(basename "${subpkg%%.subpackage.sh}")" >> ./built_${repo}_subpackages.txt |
| 130 | + done |
| 131 | + fi |
| 132 | + done |
| 133 | + if [ "$found" != true ]; then |
| 134 | + echo "Package '${pkg}' not found in any of the repo" |
| 135 | + exit 1 |
| 136 | + fi |
| 137 | + done |
| 138 | + fi |
| 139 | + for repo in $(jq --raw-output '.[].name' repo.json); do |
| 140 | + # Fix so that lists do not contain duplicates |
| 141 | + if [ -f ./built_${repo}_packages.txt ]; then |
| 142 | + uniq ./built_${repo}_packages.txt > ./built_${repo}_packages.txt.tmp |
| 143 | + mv ./built_${repo}_packages.txt.tmp ./built_${repo}_packages.txt |
| 144 | + echo "./built_${repo}_packages.txt: " |
| 145 | + cat ./built_${repo}_packages.txt |
| 146 | + fi |
| 147 | + if [ -f ./built_${repo}_subpackages.txt ]; then |
| 148 | + uniq ./built_${repo}_subpackages.txt > ./built_${repo}_subpackages.txt.tmp |
| 149 | + mv ./built_${repo}_subpackages.txt.tmp ./built_${repo}_subpackages.txt |
| 150 | + echo "./built_${repo}_subpackages.txt: " |
| 151 | + cat ./built_${repo}_subpackages.txt |
| 152 | + fi |
| 153 | + if [ -f ./deleted_${repo}_packages.txt ]; then |
| 154 | + uniq ./deleted_${repo}_packages.txt > ./deleted_${repo}_packages.txt.tmp |
| 155 | + mv ./deleted_${repo}_packages.txt.tmp ./deleted_${repo}_packages.txt |
| 156 | + fi |
| 157 | + done |
| 158 | + - name: Lint packages |
| 159 | + run: | |
| 160 | + declare -a package_recipes |
| 161 | + for repo_path in $(jq --raw-output 'keys | .[]' repo.json); do |
| 162 | + repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json) |
| 163 | + if [ -f ./built_${repo}_packages.txt ]; then |
| 164 | + package_recipes="$package_recipes $(cat ./built_${repo}_packages.txt | repo_path=${repo_path} awk '{print ENVIRON["repo_path"]"/"$1"/build.sh"}')" |
| 165 | + fi |
| 166 | + done |
| 167 | + if [ ! -z "$package_recipes" ]; then |
| 168 | + ./scripts/lint-packages.sh $package_recipes |
| 169 | + fi |
| 170 | +
|
| 171 | + # TODO: Generate AVD Snapshot for caching |
| 172 | + # - name: Check AVD Cache |
| 173 | + # uses: actions/cache@v3 |
| 174 | + # id: avd-cache |
| 175 | + # with: |
| 176 | + # path: | |
| 177 | + # ~/.android/avd/* |
| 178 | + # ~/.android/adb* |
| 179 | + # key: avd-24-${{ matrix.build_env.MAJOR_VERSION }} |
| 180 | + |
| 181 | + # - name: Create AVD and Generate Snapshot for Caching |
| 182 | + # if: steps.avd-cache.outputs.cache-hit != 'true' |
| 183 | + # uses: reactivecircus/android-emulator-runner@v2 |
| 184 | + # with: |
| 185 | + # api-level: 24 |
| 186 | + # force-avd-creation: false |
| 187 | + # emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none |
| 188 | + # disable-animations: false |
| 189 | + # script: echo "Generated AVD snapshot for caching." |
| 190 | + |
| 191 | + - name: Build packages |
| 192 | + |
| 193 | + with: |
| 194 | + arch: ${{ matrix.target_arch.EMU_ARCH }} |
| 195 | + api-level: 24 |
| 196 | + force-avd-creation: false |
| 197 | + emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none |
| 198 | + disable-animations: true |
| 199 | + script: env TERMUX_ARCH=${{ matrix.target_arch.TERMUX_ARCH }} ./common-files/action-avd-step-build-packages.sh |
| 200 | + - name: Generate build artifacts |
| 201 | + if: always() |
| 202 | + run: | |
| 203 | + for repo in $(jq --raw-output '.[].name' repo.json); do |
| 204 | + # Put package lists into directory with *.deb files so they will be transferred to |
| 205 | + # upload job. |
| 206 | + test -f ./built_${repo}_packages.txt && mv ./built_${repo}_packages.txt ./debs/ |
| 207 | + test -f ./built_${repo}_subpackages.txt && cat ./built_${repo}_subpackages.txt >> ./debs/built_${repo}_packages.txt \ |
| 208 | + && rm ./built_${repo}_subpackages.txt |
| 209 | + test -f ./deleted_${repo}_packages.txt && mv ./deleted_${repo}_packages.txt ./debs/ |
| 210 | + # Move only debs from built_packages into debs/ folder before |
| 211 | + # creating an archive. |
| 212 | + while read -r pkg; do |
| 213 | + # Match both $pkg.deb and $pkg-static.deb. |
| 214 | + find output \( -name "$pkg_*.deb" -o -name "$pkg-static_*.deb" \) -type f -print0 | xargs -0r mv -t debs/ |
| 215 | + done < <(cat ./debs/built_${repo}_packages.txt) |
| 216 | + done |
| 217 | + # Files containing certain symbols (e.g. ":") will cause failure in actions/upload-artifact. |
| 218 | + # Archiving *.deb files in a tarball to avoid issues with uploading. |
| 219 | + tar cf artifacts/debs-${{ matrix.target_arch.TERMUX_ARCH }}-${{ github.sha }}.tar debs |
| 220 | + rm -rf output |
| 221 | + - name: Checksums for built *.deb files |
| 222 | + if: always() |
| 223 | + run: | |
| 224 | + find debs -type f -name "*.deb" -exec sha256sum "{}" \; | sort -k2 |
| 225 | + - name: Store *.deb files |
| 226 | + if: always() |
| 227 | + uses: actions/upload-artifact@v3 |
| 228 | + with: |
| 229 | + name: debs-${{ matrix.target_arch.TERMUX_ARCH }}-${{ github.sha }} |
| 230 | + path: ./artifacts |
| 231 | + |
| 232 | + upload: |
| 233 | + if: github.event_name != 'pull_request' |
| 234 | + needs: build |
| 235 | + runs-on: ubuntu-latest |
| 236 | + steps: |
| 237 | + - name: Clone repository |
| 238 | + uses: actions/checkout@v3 |
| 239 | + - name: Get *.deb files |
| 240 | + uses: actions/download-artifact@v3 |
| 241 | + with: |
| 242 | + path: ./ |
| 243 | + - name: Upload to a temporary release |
| 244 | + env: |
| 245 | + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} |
| 246 | + run: | |
| 247 | + GITHUB_SHA=${{ github.sha }} |
| 248 | + # for archive in debs-*/debs-{aarch64,arm,i686,x86_64}-${{ github.sha }}.tar; do |
| 249 | + for archive in debs-*/debs-{arm,i686,x86_64}-${{ github.sha }}.tar; do |
| 250 | + gh release upload -R https://github.com/termux-user-repository/tur "0.1" $archive |
| 251 | + echo "$archive uploaded" |
| 252 | + done |
| 253 | + - name: Trigger workflow in dists repository |
| 254 | + env: |
| 255 | + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} |
| 256 | + EVENT: "from_tur" |
| 257 | + ORG: "termux-user-repository" |
| 258 | + REPO: "dists" |
| 259 | + run: | |
| 260 | + curl -d "{\"event_type\": \"${EVENT}\"}" -H "Content-Type: application/json" -H "Authorization: token ${GITHUB_TOKEN}" -H "Accept: application/vnd.github.everest-preview+json" "https://api.github.com/repos/${ORG}/${REPO}/dispatches" |
0 commit comments