Skip to content

Build ISO images

Build ISO images #1

Workflow file for this run

---
name: Build disk images
on:
workflow_dispatch:
inputs:
upload-to-s3:
description: "Upload to S3"
required: false
default: false
type: boolean
platform:
required: true
type: choice
options:
- amd64
- arm64
pull_request:
branches:
- main
paths:
- './disk_config/disk.toml'
- './disk_config/iso.toml'
- './.github/workflows/build-disk.yml'
env:
IMAGE_NAME: ${{ github.event.repository.name }} # output of build.yml, keep in sync
IMAGE_REGISTRY: "ghcr.io/${{ github.repository_owner }}" # do not edit
DEFAULT_TAG: "latest"
BIB_IMAGE: "ghcr.io/lorbuschris/bootc-image-builder:20250608" # "quay.io/centos-bootc/bootc-image-builder:latest" - see https://github.com/osbuild/bootc-image-builder/pull/954
concurrency:
group: ${{ github.workflow }}-${{ github.ref || github.run_id }}
cancel-in-progress: true
jobs:
build:
name: Build disk images
runs-on: ${{ inputs.platform == 'amd64' && 'ubuntu-latest-m' || 'ubuntu-24.04-arm' }}
strategy:
fail-fast: false
matrix:
disk-type: ["qcow2", "anaconda-iso"]
permissions:
contents: read
packages: read
id-token: write
steps:
- name: Prepare environment
run: |
USER_UID=$(id -u)
USER_GID=$(id -g)
# Concatenate the types with a hyphen
DISK_TYPE=$(echo "${{ matrix.disk-type }}" | tr ' ' '-')
# Lowercase the image uri
echo "IMAGE_REGISTRY=${IMAGE_REGISTRY,,}" >> ${GITHUB_ENV}
echo "IMAGE_NAME=${IMAGE_NAME,,}" >> ${GITHUB_ENV}
echo "DISK_TYPE=${DISK_TYPE}" >> ${GITHUB_ENV}
echo "USER_UID=${USER_UID}" >> ${GITHUB_ENV}
echo "USER_GID=${USER_GID}" >> ${GITHUB_ENV}
- name: Install dependencies
if: inputs.platform == 'arm64'
run: |
set -x
sudo apt update -y
sudo apt install -y \
podman
- name: Maximize build space
if: inputs.platform != 'arm64'
uses: ublue-os/remove-unwanted-software@cc0becac701cf642c8f0a6613bbdaf5dc36b259e # v9
with:
remove-codeql: true
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
- name: Build disk images
id: build
uses: osbuild/bootc-image-builder-action@main
with:
builder-image: ${{ env.BIB_IMAGE }}
config-file: ${{ matrix.disk-type == 'anaconda-iso' && './disk_config/iso.toml' || './disk_config/disk.toml' }}
image: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.DEFAULT_TAG }}
chown: ${{ env.USER_UID }}:${{ env.USER_GID }}
types: ${{ matrix.disk-type }}
additional-args: --use-librepo=True
- name: Upload disk images and Checksum to Job Artifacts
if: inputs.upload-to-s3 != true && github.event_name != 'pull_request'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
with:
path: ${{ steps.build.outputs.output-directory }}
if-no-files-found: error
retention-days: 0
compression-level: 0
overwrite: true
- name: Upload to S3
if: inputs.upload-to-s3 == true && github.event_name != 'pull_request'
shell: bash
env:
RCLONE_CONFIG_S3_TYPE: s3
RCLONE_CONFIG_S3_PROVIDER: ${{ secrets.S3_PROVIDER }}
RCLONE_CONFIG_S3_ACCESS_KEY_ID: ${{ secrets.S3_ACCESS_KEY_ID }}
RCLONE_CONFIG_S3_SECRET_ACCESS_KEY: ${{ secrets.S3_SECRET_ACCESS_KEY }}
RCLONE_CONFIG_S3_REGION: ${{ secrets.S3_REGION }}
RCLONE_CONFIG_S3_ENDPOINT: ${{ secrets.S3_ENDPOINT }}
SOURCE_DIR: ${{ steps.build.outputs.output-directory }}
run: |
sudo apt-get update
sudo apt-get install -y rclone
rclone copy $SOURCE_DIR S3:${{ secrets.S3_BUCKET_NAME }}