Skip to content

Commit 1800843

Browse files
first changes after merge
1 parent f79689b commit 1800843

21 files changed

+792
-63
lines changed
Lines changed: 138 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,138 @@
1+
# Use the manylinux_2_28 base image for ppc64le
2+
FROM quay.io/pypa/manylinux_2_28_ppc64le as base
3+
4+
# Language variables
5+
ENV LC_ALL=C.UTF-8
6+
ENV LANG=C.UTF-8
7+
ENV LANGUAGE=C.UTF-8
8+
9+
ARG DEVTOOLSET_VERSION=14
10+
11+
# Create symbolic links for Python 3.12
12+
#RUN ln -sf /opt/python/cp312-cp312/bin/python3.12 /usr/bin/python3 && \
13+
# ln -sf /opt/python/cp312-cp312/bin/python3.12 /usr/bin/python
14+
15+
# Install required system dependencies
16+
RUN yum -y install epel-release && \
17+
yum -y update && \
18+
yum install -y \
19+
sudo \
20+
autoconf \
21+
automake \
22+
bison \
23+
bzip2 \
24+
curl \
25+
diffutils \
26+
file \
27+
git \
28+
make \
29+
patch \
30+
perl \
31+
unzip \
32+
util-linux \
33+
wget \
34+
which \
35+
xz \
36+
yasm \
37+
less \
38+
zstd \
39+
libgomp \
40+
gcc-toolset-${DEVTOOLSET_VERSION}-gcc \
41+
gcc-toolset-${DEVTOOLSET_VERSION}-gcc-c++ \
42+
gcc-toolset-${DEVTOOLSET_VERSION}-binutils \
43+
gcc-toolset-${DEVTOOLSET_VERSION}-gcc-gfortran \
44+
cmake \
45+
ninja-build \
46+
rust \
47+
cargo \
48+
llvm-devel \
49+
libzstd-devel \
50+
python3.12-devel \
51+
python3.12-setuptools \
52+
python3.12-pip \
53+
python3-virtualenv \
54+
python3.12-pyyaml \
55+
python3.12-numpy \
56+
python3.12-wheel \
57+
python3.12-cryptography \
58+
blas-devel \
59+
openblas-devel \
60+
lapack-devel \
61+
atlas-devel \
62+
libjpeg-devel \
63+
libxslt-devel \
64+
libxml2-devel \
65+
openssl-devel \
66+
valgrind
67+
68+
69+
# Ensure the correct Python version is used
70+
#ENV PATH=/opt/python/cp312-cp312/bin:$PATH
71+
# Add gcc-toolset to the path
72+
ENV PATH=/opt/rh/gcc-toolset-${DEVTOOLSET_VERSION}/root/usr/bin:$PATH
73+
ENV LD_LIBRARY_PATH=/opt/rh/gcc-toolset-${DEVTOOLSET_VERSION}/root/usr/lib64:/opt/rh/gcc-toolset-${DEVTOOLSET_VERSION}/root/usr/lib:$LD_LIBRARY_PATH
74+
75+
# Configure git to avoid safe directory issues
76+
RUN git config --global --add safe.directory "*"
77+
78+
# installed python doesn't have development parts. Rebuild it from scratch
79+
RUN /bin/rm -rf /opt/_internal /opt/python /usr/local/*/*
80+
81+
# EPEL for cmake
82+
FROM base as patchelf
83+
84+
# Instead of ADD ./common/install_cpython.sh ...
85+
RUN git clone --depth 1 --branch temp-gha-runner-v2 https://github.com/sandeepgupta12/pytorch.git /tmp/pytorch && \
86+
cp /tmp/pytorch/.ci/docker/common/install_patchelf.sh /build_scripts/install_patchelf.sh
87+
# Install patchelf
88+
# ADD ./common/install_patchelf.sh install_patchelf.sh
89+
#RUN bash ./install_patchelf.sh && rm install_patchelf.sh
90+
#ENV CPYTHON_VERSIONS="3.10.1"
91+
#RUN echo "CPYTHON_VERSIONS=$CPYTHON_VERSIONS" && bash -x /build_scripts/install_cpython.sh
92+
#ENV SSL_CERT_FILE=""
93+
#RUN curl -fL https://bootstrap.pypa.io/get-pip.py -o /tmp/get-pip.py
94+
RUN bash /build_scripts/install_patchelf.sh && rm -r /build_scripts
95+
RUN cp $(which patchelf) /patchelf
96+
97+
FROM patchelf as python
98+
99+
# Clone only required scripts from the PyTorch repo
100+
RUN mkdir -p /build_scripts && \
101+
cp -r /tmp/pytorch/.ci/docker/manywheel/build_scripts/* /build_scripts/ && \
102+
cp /tmp/pytorch/.ci/docker/common/install_cpython.sh /build_scripts/install_cpython.sh && \
103+
rm -rf /tmp/pytorch
104+
# build python
105+
#COPY manywheel/build_scripts /build_scripts
106+
#ADD ./common/install_cpython.sh /build_scripts/install_cpython.sh
107+
ENV SSL_CERT_FILE=
108+
# Run the build script
109+
RUN bash /build_scripts/build.sh && rm -r /build_scripts
110+
#RUN bash build_scripts/build.sh && rm -r build_scripts
111+
#RUN bash build_scripts/build.sh || (echo "Checksum verification failed!" && exit 1)
112+
113+
FROM base as final
114+
COPY --from=python /opt/python /opt/python
115+
COPY --from=python /opt/_internal /opt/_internal
116+
COPY --from=python /opt/python/cp39-cp39/bin/auditwheel /usr/local/bin/auditwheel
117+
COPY --from=patchelf /usr/local/bin/patchelf /usr/local/bin/patchelf
118+
119+
RUN alternatives --set python /usr/bin/python3.12
120+
RUN alternatives --set python3 /usr/bin/python3.12
121+
122+
RUN pip-3.12 install typing_extensions
123+
124+
# Install required Python packages
125+
#RUN pip install --upgrade pip
126+
#RUN pip install typing_extensions pyyaml setuptools
127+
128+
# Install test dependencies
129+
RUN dnf install -y \
130+
protobuf-devel \
131+
protobuf-c-devel \
132+
protobuf-lite-devel \
133+
wget \
134+
patch
135+
136+
# Set default entrypoint
137+
ENTRYPOINT []
138+
CMD ["/bin/bash"]

.ci/docker/manywheel/build.sh

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,12 @@ case ${image} in
5555
DOCKER_GPU_BUILD_ARG=""
5656
MANY_LINUX_VERSION="s390x"
5757
;;
58+
manylinuxppc64le-builder:cpu-ppc64le)
59+
TARGET=final
60+
GPU_IMAGE=redhat/ubi9
61+
DOCKER_GPU_BUILD_ARG=""
62+
MANY_LINUX_VERSION="ppc64le"
63+
;;
5864
manylinux2_28-builder:cuda11*)
5965
TARGET=cuda_final
6066
GPU_IMAGE=amd64/almalinux:8
@@ -102,7 +108,7 @@ if [[ -n ${MANY_LINUX_VERSION} && -z ${DOCKERFILE_SUFFIX} ]]; then
102108
DOCKERFILE_SUFFIX=_${MANY_LINUX_VERSION}
103109
fi
104110
# Only activate this if in CI
105-
if [ "$(uname -m)" != "s390x" ] && [ -v CI ]; then
111+
if [ "$(uname -m)" != "s390x" ] && "$(uname -m)" != "ppc64le" ] && [ -v CI ]; then
106112
# TODO: Remove LimitNOFILE=1048576 patch once https://github.com/pytorch/test-infra/issues/5712
107113
# is resolved. This patch is required in order to fix timing out of Docker build on Amazon Linux 2023.
108114
sudo sed -i s/LimitNOFILE=infinity/LimitNOFILE=1048576/ /usr/lib/systemd/system/docker.service

.ci/docker/manywheel/build_scripts/build.sh

Lines changed: 32 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ AUTOCONF_HASH=954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969
2020
# the final image after compiling Python
2121
PYTHON_COMPILE_DEPS="zlib-devel bzip2-devel ncurses-devel sqlite-devel readline-devel tk-devel gdbm-devel libpcap-devel xz-devel libffi-devel"
2222

23-
if [ "$(uname -m)" != "s390x" ] ; then
23+
if [ "$(uname -m)" != "s390x" && "$(uname -m)" != "ppc64le" ] ; then
2424
PYTHON_COMPILE_DEPS="${PYTHON_COMPILE_DEPS} db4-devel"
2525
else
2626
PYTHON_COMPILE_DEPS="${PYTHON_COMPILE_DEPS} libdb-devel"
@@ -39,7 +39,37 @@ yum -y install bzip2 make git patch unzip bison yasm diffutils \
3939
${PYTHON_COMPILE_DEPS}
4040

4141
# Install newest autoconf
42-
build_autoconf $AUTOCONF_ROOT $AUTOCONF_HASH
42+
# If the architecture is not ppc64le, use the existing build_autoconf function
43+
if [ "$(uname -m)" != "ppc64le" ] ; then
44+
build_autoconf $AUTOCONF_ROOT $AUTOCONF_HASH
45+
else
46+
# Download and extract Autoconf
47+
curl -sLO http://ftp.gnu.org/gnu/autoconf/$AUTOCONF_ROOT.tar.gz
48+
49+
# Verify the integrity of the downloaded file using SHA-256 checksum
50+
echo "$AUTOCONF_HASH $AUTOCONF_ROOT.tar.gz" | sha256sum -c -
51+
52+
# Extract the downloaded tarball
53+
tar -xzf $AUTOCONF_ROOT.tar.gz
54+
cd $AUTOCONF_ROOT
55+
56+
# Update config.guess and config.sub scripts to ensure proper architecture detection
57+
curl -sLo build-aux/config.guess https://git.savannah.gnu.org/cgit/config.git/plain/config.guess
58+
curl -sLo build-aux/config.sub https://git.savannah.gnu.org/cgit/config.git/plain/config.sub
59+
60+
chmod +x build-aux/config.guess build-aux/config.sub
61+
62+
# Configure the Autoconf build system with the correct host type for ppc64le
63+
./configure --host=powerpc64le-pc-linux-gnu
64+
65+
# Build and install
66+
make -j$(nproc)
67+
make install
68+
69+
# Clean up
70+
cd ..
71+
rm -rf $AUTOCONF_ROOT $AUTOCONF_ROOT.tar.gz
72+
fi
4373
autoconf --version
4474

4575
# Compile the latest Python releases.

.ci/docker/manywheel/build_scripts/manylinux1-check.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ def is_manylinux1_compatible():
55
# Only Linux, and only x86-64 / i686
66
from distutils.util import get_platform
77

8-
if get_platform() not in ["linux-x86_64", "linux-i686", "linux-s390x"]:
8+
if get_platform() not in ["linux-x86_64", "linux-i686", "linux-s390x", "linux-ppc64le"]:
99
return False
1010

1111
# Check for presence of _manylinux module

.ci/manywheel/build.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ case "${GPU_ARCH_TYPE:-BLANK}" in
1515
rocm)
1616
bash "${SCRIPTPATH}/build_rocm.sh"
1717
;;
18-
cpu | cpu-cxx11-abi | cpu-s390x)
18+
cpu | cpu-cxx11-abi | cpu-s390x | cpu-ppc64le)
1919
bash "${SCRIPTPATH}/build_cpu.sh"
2020
;;
2121
xpu)

.ci/manywheel/build_common.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -367,7 +367,7 @@ for pkg in /$WHEELHOUSE_DIR/torch_no_python*.whl /$WHEELHOUSE_DIR/torch*linux*.w
367367
done
368368

369369
# create Manylinux 2_28 tag this needs to happen before regenerate the RECORD
370-
if [[ $PLATFORM == "manylinux_2_28_x86_64" && $GPU_ARCH_TYPE != "cpu-s390x" && $GPU_ARCH_TYPE != "xpu" ]]; then
370+
if [[ $PLATFORM == "manylinux_2_28_x86_64" && $GPU_ARCH_TYPE != "cpu-s390x" && $GPU_ARCH_TYPE != "cpu-ppc64le" && $GPU_ARCH_TYPE != "xpu" ]]; then
371371
wheel_file=$(echo $(basename $pkg) | sed -e 's/-cp.*$/.dist-info\/WHEEL/g')
372372
sed -i -e s#linux_x86_64#"${PLATFORM}"# $wheel_file;
373373
fi
@@ -412,7 +412,7 @@ for pkg in /$WHEELHOUSE_DIR/torch_no_python*.whl /$WHEELHOUSE_DIR/torch*linux*.w
412412
fi
413413

414414
# Rename wheel for Manylinux 2_28
415-
if [[ $PLATFORM == "manylinux_2_28_x86_64" && $GPU_ARCH_TYPE != "cpu-s390x" && $GPU_ARCH_TYPE != "xpu" ]]; then
415+
if [[ $PLATFORM == "manylinux_2_28_x86_64" && $GPU_ARCH_TYPE != "cpu-s390x" && $GPU_ARCH_TYPE != "cpu-ppc64le" && $GPU_ARCH_TYPE != "xpu" ]]; then
416416
pkg_name=$(echo $(basename $pkg) | sed -e s#linux_x86_64#"${PLATFORM}"#)
417417
zip -rq $pkg_name $PREIX*
418418
rm -f $pkg

.ci/manywheel/build_cpu.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@ elif [[ "$OS_NAME" == *"AlmaLinux"* ]]; then
3636
elif [[ "$OS_NAME" == *"Ubuntu"* ]]; then
3737
if [[ "$(uname -m)" == "s390x" ]]; then
3838
LIBGOMP_PATH="/usr/lib/s390x-linux-gnu/libgomp.so.1"
39+
elif [[ "$(uname -m)" == "ppc64le" ]]; then
40+
LIBGOMP_PATH="/usr/lib64/libgomp.so.1"
3941
else
4042
LIBGOMP_PATH="/usr/lib/x86_64-linux-gnu/libgomp.so.1"
4143
fi

.ci/pytorch/build.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,7 @@ fi
237237

238238
# Do not change workspace permissions for ROCm and s390x CI jobs
239239
# as it can leave workspace with bad permissions for cancelled jobs
240-
if [[ "$BUILD_ENVIRONMENT" != *rocm* && "$BUILD_ENVIRONMENT" != *s390x* && -d /var/lib/jenkins/workspace ]]; then
240+
if [[ "$BUILD_ENVIRONMENT" != *rocm* && "$BUILD_ENVIRONMENT" != *s390x* && "$BUILD_ENVIRONMENT" != *ppc64le* && -d /var/lib/jenkins/workspace ]]; then
241241
# Workaround for dind-rootless userid mapping (https://github.com/pytorch/ci-infra/issues/96)
242242
WORKSPACE_ORIGINAL_OWNER_ID=$(stat -c '%u' "/var/lib/jenkins/workspace")
243243
cleanup_workspace() {
@@ -416,6 +416,6 @@ if [[ "$BUILD_ENVIRONMENT" != *libtorch* && "$BUILD_ENVIRONMENT" != *bazel* ]];
416416
python tools/stats/export_test_times.py
417417
fi
418418
# don't do this for bazel or s390x as they don't use sccache
419-
if [[ "$BUILD_ENVIRONMENT" != *s390x* && "$BUILD_ENVIRONMENT" != *-bazel-* ]]; then
419+
if [[ "$BUILD_ENVIRONMENT" != *s390x* && "$BUILD_ENVIRONMENT" != *ppc64le* && "$BUILD_ENVIRONMENT" != *-bazel-* ]]; then
420420
print_sccache_stats
421421
fi

.ci/pytorch/check_binary.sh

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ fi
184184
if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then
185185
echo "Checking that MKL is available"
186186
build_and_run_example_cpp check-torch-mkl
187-
elif [[ "$(uname -m)" != "arm64" && "$(uname -m)" != "s390x" ]]; then
187+
elif [[ "$(uname -m)" != "arm64" && "$(uname -m)" != "s390x" && "$(uname -m)" != "ppc64le" ]]; then
188188
if [[ "$(uname)" != 'Darwin' || "$PACKAGE_TYPE" != *wheel ]]; then
189189
if [[ "$(uname -m)" == "aarch64" ]]; then
190190
echo "Checking that MKLDNN is available on aarch64"
@@ -208,7 +208,7 @@ if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then
208208
echo "Checking that XNNPACK is available"
209209
build_and_run_example_cpp check-torch-xnnpack
210210
else
211-
if [[ "$(uname)" != 'Darwin' || "$PACKAGE_TYPE" != *wheel ]] && [[ "$(uname -m)" != "s390x" ]]; then
211+
if [[ "$(uname)" != 'Darwin' || "$PACKAGE_TYPE" != *wheel ]] && [[ "$(uname -m)" != "s390x" && "$(uname -m)" != "ppc64le" ]]; then
212212
echo "Checking that XNNPACK is available"
213213
pushd /tmp
214214
python -c 'import torch.backends.xnnpack; exit(0 if torch.backends.xnnpack.enabled else 1)'
@@ -237,7 +237,7 @@ if [[ "$OSTYPE" == "msys" ]]; then
237237
fi
238238

239239
# Test that CUDA builds are setup correctly
240-
if [[ "$DESIRED_CUDA" != 'cpu' && "$DESIRED_CUDA" != 'xpu' && "$DESIRED_CUDA" != 'cpu-cxx11-abi' && "$DESIRED_CUDA" != *"rocm"* && "$(uname -m)" != "s390x" ]]; then
240+
if [[ "$DESIRED_CUDA" != 'cpu' && "$DESIRED_CUDA" != 'xpu' && "$DESIRED_CUDA" != 'cpu-cxx11-abi' && "$DESIRED_CUDA" != *"rocm"* && "$(uname -m)" != "s390x" && "$(uname -m)" != "ppc64le" ]]; then
241241
if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then
242242
build_and_run_example_cpp check-torch-cuda
243243
else
@@ -311,7 +311,7 @@ if [[ "$(uname)" == 'Linux' && "$PACKAGE_TYPE" == 'manywheel' ]]; then
311311
# gcc 11 - CUDA 11.8, xpu, rocm
312312
# gcc 13 - CUDA 12.6, 12.8 and cpu
313313
# Please see issue for reference: https://github.com/pytorch/pytorch/issues/152426
314-
if [[ "$(uname -m)" == "s390x" ]]; then
314+
if [[ "$(uname -m)" == "s390x" && "$(uname -m)" != "ppc64le" ]]; then
315315
cxx_abi="19"
316316
elif [[ "$DESIRED_CUDA" != 'xpu' && "$DESIRED_CUDA" != 'rocm'* ]]; then
317317
cxx_abi="18"

.github/actions/test-pytorch-binary/action.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ runs:
3434
)
3535
3636
echo "CONTAINER_NAME=${container_name}" >> "$GITHUB_ENV"
37-
if [[ "${GPU_ARCH_TYPE}" != "rocm" && "${BUILD_ENVIRONMENT}" != "linux-aarch64-binary-manywheel" && "${BUILD_ENVIRONMENT}" != "linux-s390x-binary-manywheel" && "${GPU_ARCH_TYPE}" != "xpu" ]]; then
37+
if [[ "${GPU_ARCH_TYPE}" != "rocm" && "${BUILD_ENVIRONMENT}" != "linux-aarch64-binary-manywheel" && "${BUILD_ENVIRONMENT}" != "linux-s390x-binary-manywheel" && "${BUILD_ENVIRONMENT}" != "linux-ppc64le-binary-manywheel" && "${GPU_ARCH_TYPE}" != "xpu" ]]; then
3838
# Propagate download.pytorch.org IP to container. This is only needed on Linux non aarch64 runner
3939
grep download.pytorch.org /etc/hosts | docker exec -i "${container_name}" bash -c "/bin/cat >> /etc/hosts"
4040
fi
@@ -45,9 +45,9 @@ runs:
4545
docker exec -t "${container_name}" bash -c "source ${BINARY_ENV_FILE} && bash -x /run.sh"
4646
4747
- name: Cleanup docker
48-
if: always() && (env.BUILD_ENVIRONMENT == 'linux-s390x-binary-manywheel' || env.GPU_ARCH_TYPE == 'xpu')
48+
if: always() && (env.BUILD_ENVIRONMENT == 'linux-s390x-binary-manywheel' || env.BUILD_ENVIRONMENT != 'linux-ppc64le-binary-manywheel' || env.GPU_ARCH_TYPE == 'xpu')
4949
shell: bash
5050
run: |
51-
# on s390x or xpu stop the container for clean worker stop
51+
# on s390x or ppc64le or xpu stop the container for clean worker stop
5252
# shellcheck disable=SC2046
5353
docker stop "${{ env.CONTAINER_NAME }}" || true

0 commit comments

Comments
 (0)