chnaged branch in workflow 2 #2
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: linux-binary-build | ||
| on: | ||
| workflow_call: | ||
| inputs: | ||
| build_name: | ||
| required: true | ||
| type: string | ||
| description: The build's name | ||
| build_environment: | ||
| required: true | ||
| type: string | ||
| description: The build environment | ||
| runner_prefix: | ||
| required: false | ||
| default: "" | ||
| type: string | ||
| description: prefix for runner label | ||
| runs_on: | ||
| required: false | ||
| default: linux.12xlarge.memory.ephemeral | ||
| type: string | ||
| description: Hardware to run this "build" job on, linux.12xlarge or linux.arm64.2xlarge. | ||
| timeout-minutes: | ||
| required: false | ||
| default: 240 | ||
| type: number | ||
| description: timeout for the job | ||
| ALPINE_IMAGE: | ||
| required: false | ||
| type: string | ||
| default: "308535385114.dkr.ecr.us-east-1.amazonaws.com/tool/alpine" | ||
| description: Alpine image to use | ||
| PYTORCH_ROOT: | ||
| required: true | ||
| type: string | ||
| description: Root directory for the pytorch/pytorch repository | ||
| PACKAGE_TYPE: | ||
| required: true | ||
| type: string | ||
| description: Package type | ||
| DESIRED_CUDA: | ||
| required: true | ||
| type: string | ||
| description: Desired Cuda version | ||
| GPU_ARCH_VERSION: | ||
| required: false | ||
| type: string | ||
| description: GPU Arch version | ||
| GPU_ARCH_TYPE: | ||
| required: true | ||
| type: string | ||
| description: GPU Arch type | ||
| DOCKER_IMAGE: | ||
| required: true | ||
| type: string | ||
| description: Docker image to use | ||
| DOCKER_IMAGE_TAG_PREFIX: | ||
| required: true | ||
| type: string | ||
| description: Docker image tag to use | ||
| LIBTORCH_CONFIG: | ||
| required: false | ||
| type: string | ||
| description: Desired libtorch config (for libtorch builds only) | ||
| LIBTORCH_VARIANT: | ||
| required: false | ||
| type: string | ||
| description: Desired libtorch variant (for libtorch builds only) | ||
| DESIRED_PYTHON: | ||
| required: false | ||
| type: string | ||
| description: Desired python version | ||
| PYTORCH_EXTRA_INSTALL_REQUIREMENTS: | ||
| required: false | ||
| type: string | ||
| description: Extra install requirements | ||
| default: "" | ||
| secrets: | ||
| github-token: | ||
| required: true | ||
| description: Github Token | ||
| permissions: | ||
| id-token: write | ||
| jobs: | ||
| build: | ||
| runs-on: ${{ inputs.runner_prefix}}${{ inputs.runs_on }} | ||
| timeout-minutes: ${{ inputs.timeout-minutes }} | ||
| env: | ||
| PYTORCH_ROOT: ${{ inputs.PYTORCH_ROOT }} | ||
| PACKAGE_TYPE: ${{ inputs.PACKAGE_TYPE }} | ||
| # TODO: This is a legacy variable that we eventually want to get rid of in | ||
| # favor of GPU_ARCH_VERSION | ||
| DESIRED_CUDA: ${{ inputs.DESIRED_CUDA }} | ||
| GPU_ARCH_VERSION: ${{ inputs.GPU_ARCH_VERSION }} | ||
| GPU_ARCH_TYPE: ${{ inputs.GPU_ARCH_TYPE }} | ||
| DOCKER_IMAGE: ${{ inputs.DOCKER_IMAGE }} | ||
| SKIP_ALL_TESTS: 1 | ||
| LIBTORCH_CONFIG: ${{ inputs.LIBTORCH_CONFIG }} | ||
| LIBTORCH_VARIANT: ${{ inputs.LIBTORCH_VARIANT }} | ||
| DESIRED_PYTHON: ${{ inputs.DESIRED_PYTHON }} | ||
| PYTORCH_EXTRA_INSTALL_REQUIREMENTS: ${{ inputs.PYTORCH_EXTRA_INSTALL_REQUIREMENTS }} | ||
| ALPINE_IMAGE: ${{ inputs.ALPINE_IMAGE }} | ||
| AWS_DEFAULT_REGION: us-east-1 | ||
| BINARY_ENV_FILE: /tmp/env | ||
| BUILD_ENVIRONMENT: ${{ inputs.build_environment }} | ||
| GITHUB_TOKEN: ${{ secrets.github-token }} | ||
| PR_NUMBER: ${{ github.event.pull_request.number }} | ||
| PYTORCH_FINAL_PACKAGE_DIR: /artifacts | ||
| SHA1: ${{ github.event.pull_request.head.sha || github.sha }} | ||
| steps: | ||
| - name: Make the env permanent during this workflow (but not the secrets) | ||
| shell: bash | ||
| run: | | ||
| { | ||
| echo "PYTORCH_ROOT=${{ env.PYTORCH_ROOT }}" | ||
| echo "PACKAGE_TYPE=${{ env.PACKAGE_TYPE }}" | ||
| echo "DESIRED_CUDA=${{ env.DESIRED_CUDA }}" | ||
| echo "GPU_ARCH_VERSION=${{ env.GPU_ARCH_VERSION }}" | ||
| echo "GPU_ARCH_TYPE=${{ env.GPU_ARCH_TYPE }}" | ||
| echo "DOCKER_IMAGE=${{ env.DOCKER_IMAGE }}" | ||
| echo "SKIP_ALL_TESTS=${{ env.SKIP_ALL_TESTS }}" | ||
| echo "LIBTORCH_CONFIG=${{ env.LIBTORCH_CONFIG }}" | ||
| echo "LIBTORCH_VARIANT=${{ env.LIBTORCH_VARIANT }}" | ||
| echo "DESIRED_PYTHON=${{ env.DESIRED_PYTHON }}" | ||
| echo "PYTORCH_EXTRA_INSTALL_REQUIREMENTS=${{ env.PYTORCH_EXTRA_INSTALL_REQUIREMENTS }}" | ||
| echo "ALPINE_IMAGE=${{ env.ALPINE_IMAGE }}" | ||
| echo "AWS_DEFAULT_REGION=${{ env.AWS_DEFAULT_REGION }}" | ||
| echo "BINARY_ENV_FILE=${{ env.BINARY_ENV_FILE }}" | ||
| echo "BUILD_ENVIRONMENT=${{ env.BUILD_ENVIRONMENT }}" | ||
| echo "BUILD_NAME=${{ env.BUILD_NAME }}" | ||
| echo "PR_NUMBER=${{ env.PR_NUMBER }}" | ||
| echo "PYTORCH_FINAL_PACKAGE_DIR=${{ env.PYTORCH_FINAL_PACKAGE_DIR }}" | ||
| echo "SHA1=${{ env.SHA1 }}" | ||
| } >> "${GITHUB_ENV} }}" | ||
| - name: List the env | ||
| shell: bash | ||
| run: env | ||
| - name: "[FB EMPLOYEES] Enable SSH (Click me for login details)" | ||
| if: inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' | ||
| uses: pytorch/test-infra/.github/actions/setup-ssh@main | ||
| continue-on-error: true | ||
| with: | ||
| github-secret: ${{ secrets.github-token }} | ||
| - name: Checkout PyTorch | ||
| uses: pytorch/pytorch/.github/actions/checkout-pytorch@main | ||
| with: | ||
| no-sudo: ${{ inputs.build_environment == 'linux-aarch64-binary-manywheel' || inputs.build_environment == 'linux-s390x-binary-manywheel' || inputs.build_environment == 'linux-ppc64le-binary-manywheel' }} | ||
| - name: Setup Linux | ||
| if: inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' | ||
| uses: ./.github/actions/setup-linux | ||
| - name: Chown workspace | ||
| if: inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' | ||
| uses: ./.github/actions/chown-workspace | ||
| with: | ||
| ALPINE_IMAGE: ${{ inputs.ALPINE_IMAGE }} | ||
| - name: Clean workspace | ||
| shell: bash | ||
| run: | | ||
| set -eux | ||
| rm -rf "${GITHUB_WORKSPACE}" | ||
| mkdir "${GITHUB_WORKSPACE}" | ||
| <<<<<<< HEAD | ||
| if [[ ${{ inputs.build_environment }} == 'linux-aarch64-binary-manywheel' ]] || [[ ${{ inputs.build_environment }} == 'linux-s390x-binary-manywheel' ]] || [[ ${{ inputs.build_environment }} == 'linux-ppc64le-binary-manywheel' ]]; then | ||
| ======= | ||
| if [[ ${{ inputs.build_environment }} == 'linux-aarch64-binary-manywheel' ]] || [[ ${{ inputs.build_environment }} == 'linux-s390x-binary-manywheel' ]] || [[ ${{ inputs.build_environment }} == 'linux-ppc64le-binary-manywheel' ]] ; then | ||
| >>>>>>> 2048c7e20c1af26aef41fe8de2d7dcaf386f2c20 | ||
| rm -rf "${RUNNER_TEMP}/artifacts" | ||
| mkdir "${RUNNER_TEMP}/artifacts" | ||
| fi | ||
| - name: Checkout PyTorch to pytorch dir | ||
| uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 | ||
| with: | ||
| ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} | ||
| submodules: recursive | ||
| path: pytorch | ||
| show-progress: false | ||
| - name: Clean PyTorch checkout | ||
| run: | | ||
| # Remove any artifacts from the previous checkouts | ||
| git clean -fxd | ||
| working-directory: pytorch | ||
| - name: Check if the job is disabled | ||
| id: filter | ||
| uses: ./pytorch/.github/actions/filter-test-configs | ||
| with: | ||
| github-token: ${{ secrets.GITHUB_TOKEN }} | ||
| # NB: Use a mock test matrix with a default value here. After filtering, if the | ||
| # returned matrix is empty, it means that the job is disabled | ||
| test-matrix: | | ||
| { include: [ | ||
| { config: "default" }, | ||
| ]} | ||
| - name: configure aws credentials | ||
| id: aws_creds | ||
| if: ${{ steps.filter.outputs.is-test-matrix-empty == 'False' && inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' && startsWith(github.event.ref, 'refs/tags/ciflow/') }} | ||
| uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 # v4.1.0 | ||
| with: | ||
| role-to-assume: arn:aws:iam::308535385114:role/gha_workflow_s3_and_ecr_read_only | ||
| aws-region: us-east-1 | ||
| role-duration-seconds: 18000 | ||
| - name: Calculate docker image | ||
| id: calculate-docker-image | ||
| <<<<<<< HEAD | ||
| if: ${{ steps.filter.outputs.is-test-matrix-empty == 'False' && inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' }} | ||
| ======= | ||
| if: ${{ steps.filter.outputs.is-test-matrix-empty == 'False' && inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' }} | ||
| >>>>>>> 2048c7e20c1af26aef41fe8de2d7dcaf386f2c20 | ||
| uses: pytorch/test-infra/.github/actions/calculate-docker-image@main | ||
| with: | ||
| # If doing this in main or release branch, use docker.io. Otherwise | ||
| # use ECR | ||
| docker-registry: ${{ startsWith(github.event.ref, 'refs/tags/ciflow/') && '308535385114.dkr.ecr.us-east-1.amazonaws.com' || 'docker.io' }} | ||
| docker-image-name: ${{ inputs.DOCKER_IMAGE }} | ||
| custom-tag-prefix: ${{ inputs.DOCKER_IMAGE_TAG_PREFIX }} | ||
| # The build.sh script in this folder is not actually the correct one, | ||
| # this is just needed for sha calculation | ||
| docker-build-dir: .ci/docker | ||
| working-directory: pytorch | ||
| - name: Pull Docker image | ||
| if: ${{ steps.filter.outputs.is-test-matrix-empty == 'False' && inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' }} | ||
| uses: pytorch/test-infra/.github/actions/pull-docker-image@main | ||
| with: | ||
| docker-image: ${{ steps.calculate-docker-image.outputs.docker-image }} | ||
| <<<<<<< HEAD | ||
| - name: Build Docker image for ppc64le | ||
| run: | | ||
| docker build --network=host -f ./pytorch/.ci/docker/manywheel/Dockerfile_ppc64le -t pytorch/manylinuxppc64le-builder:cpu-ppc64le-main . | ||
| ======= | ||
| # - name: Build Docker image for ppc64le | ||
| # run: | | ||
| # docker build --network=host -f ./pytorch/.ci/docker/manywheel/Dockerfile_ppc64le -t pytorch/manylinuxppc64le-builder:cpu-ppc64le-main . | ||
| - name: Download Docker image artifact | ||
| uses: actions/download-artifact@v4 | ||
| with: | ||
| name: ppc64le-builder-image | ||
| path: /tmp | ||
| - name: Load Docker image | ||
| run: | | ||
| gunzip -c /tmp/ppc64le-builder.tar.gz | docker load | ||
| >>>>>>> 2048c7e20c1af26aef41fe8de2d7dcaf386f2c20 | ||
| - name: Build PyTorch binary | ||
| if: ${{ steps.filter.outputs.is-test-matrix-empty == 'False' }} | ||
| env: | ||
| DOCKER_IMAGE: ${{ steps.calculate-docker-image.outputs.docker-image || format('{0}:{1}', inputs.DOCKER_IMAGE, inputs.DOCKER_IMAGE_TAG_PREFIX) }} | ||
| run: | | ||
| set -x | ||
| mkdir -p artifacts/ | ||
| container_name=$(docker run \ | ||
| -e BINARY_ENV_FILE \ | ||
| -e BUILD_ENVIRONMENT \ | ||
| -e DESIRED_CUDA \ | ||
| -e DESIRED_PYTHON \ | ||
| -e GITHUB_ACTIONS \ | ||
| -e GPU_ARCH_TYPE \ | ||
| -e GPU_ARCH_VERSION \ | ||
| -e LIBTORCH_VARIANT \ | ||
| -e PACKAGE_TYPE \ | ||
| -e PYTORCH_FINAL_PACKAGE_DIR \ | ||
| -e PYTORCH_ROOT \ | ||
| -e SKIP_ALL_TESTS \ | ||
| -e PYTORCH_EXTRA_INSTALL_REQUIREMENTS \ | ||
| --tty \ | ||
| --detach \ | ||
| -v "${GITHUB_WORKSPACE}/pytorch:/pytorch" \ | ||
| -v "${RUNNER_TEMP}/artifacts:/artifacts" \ | ||
| -w / \ | ||
| "${DOCKER_IMAGE}" | ||
| ) | ||
| docker exec -t -w "${PYTORCH_ROOT}" "${container_name}" bash -c "bash .circleci/scripts/binary_populate_env.sh" | ||
| # Unified build script for all architectures (x86_64, aarch64, s390x) | ||
| docker exec -t "${container_name}" bash -c "source ${BINARY_ENV_FILE} && bash /pytorch/.ci/${{ inputs.PACKAGE_TYPE }}/build.sh" | ||
| - name: Chown artifacts | ||
| if: ${{ steps.filter.outputs.is-test-matrix-empty == 'False' && inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' }} | ||
| shell: bash | ||
| run: | | ||
| # Ensure the working directory gets chowned back to the current user | ||
| docker run --rm -v "${RUNNER_TEMP}/artifacts:/v" -w /v "${ALPINE_IMAGE}" chown -R "$(id -u):$(id -g)" . | ||
| - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 | ||
| if: ${{ steps.filter.outputs.is-test-matrix-empty == 'False' }} | ||
| with: | ||
| name: ${{ inputs.build_name }} | ||
| if-no-files-found: error | ||
| path: | ||
| ${{ runner.temp }}/artifacts/* | ||
| - name: Teardown Linux | ||
| if: always() && inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' | ||
| uses: pytorch/test-infra/.github/actions/teardown-linux@main | ||
| - name: Chown workspace | ||
| if: always() && inputs.build_environment != 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' | ||
| uses: ./pytorch/.github/actions/chown-workspace | ||
| with: | ||
| ALPINE_IMAGE: ${{ inputs.ALPINE_IMAGE }} | ||
| - name: Cleanup docker | ||
| <<<<<<< HEAD | ||
| if: always() && inputs.build_environment == 'linux-s390x-binary-manywheel' && inputs.build_environment != 'linux-ppc64le-binary-manywheel' | ||
| ======= | ||
| if: always() && inputs.build_environment == 'linux-s390x-binary-manywheel' && inputs.build_environment == 'linux-ppc64le-binary-manywheel' | ||
| >>>>>>> 2048c7e20c1af26aef41fe8de2d7dcaf386f2c20 | ||
| shell: bash | ||
| run: | | ||
| # on s390x stop the container for clean worker stop | ||
| # ignore expansion of "docker ps -q" since it could be empty | ||
| # shellcheck disable=SC2046 | ||
| docker stop $(docker ps -q) || true | ||