Skip to content

Commit 9fe50c4

Browse files
hcho3trivialfisrazdoburdin
authored
[backport] [CI] Build a CPU-only wheel under name xgboost-cpu (dmlc#10603) (dmlc#10614)
* [CI] Build a CPU-only wheel under name `xgboost-cpu` (dmlc#10603) * [CI] Fix test environment. (dmlc#10609) * [CI] Fix test environment. * Remove shell. * Remove. * Update Dockerfile.i386 * replace channel for sycl dependencies (dmlc#10576) Co-authored-by: Dmitry Razdoburdin <> * Optionally skip cupy on windows. (dmlc#10611) --------- Co-authored-by: Jiaming Yuan <[email protected]> Co-authored-by: Dmitry Razdoburdin <[email protected]>
1 parent ea7bd91 commit 9fe50c4

16 files changed

+209
-118
lines changed

.github/workflows/i386.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ jobs:
2323
with:
2424
submodules: 'true'
2525
- name: Set up Docker Buildx
26-
uses: docker/setup-buildx-action@v3
26+
uses: docker/setup-buildx-action@v3.4.0
2727
with:
2828
driver-opts: network=host
2929
- name: Build and push container

dev/release-artifacts.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
33
tqdm, sh are required to run this script.
44
"""
5+
56
import argparse
67
import os
78
import shutil
@@ -106,6 +107,15 @@ def make_pysrc_wheel(
106107
if not os.path.exists(dist):
107108
os.mkdir(dist)
108109

110+
# Apply patch to remove NCCL dependency
111+
# Save the original content of pyproject.toml so that we can restore it later
112+
with DirectoryExcursion(ROOT):
113+
with open("python-package/pyproject.toml", "r") as f:
114+
orig_pyproj_lines = f.read()
115+
with open("tests/buildkite/remove_nccl_dep.patch", "r") as f:
116+
patch_lines = f.read()
117+
subprocess.run(["patch", "-p0"], input=patch_lines, text=True)
118+
109119
with DirectoryExcursion(os.path.join(ROOT, "python-package")):
110120
subprocess.check_call(["python", "-m", "build", "--sdist"])
111121
if rc is not None:
@@ -117,6 +127,10 @@ def make_pysrc_wheel(
117127
target = os.path.join(dist, name)
118128
shutil.move(src, target)
119129

130+
with DirectoryExcursion(ROOT):
131+
with open("python-package/pyproject.toml", "w") as f:
132+
print(orig_pyproj_lines, file=f, end="")
133+
120134

121135
def download_py_packages(
122136
branch: str, major: int, minor: int, commit_hash: str, outdir: str

doc/install.rst

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,19 @@ Capabilities of binary wheels for each platform:
7676
| Windows | |tick| | |cross| |
7777
+---------------------+---------+----------------------+
7878

79+
Minimal installation (CPU-only)
80+
*******************************
81+
The default installation with ``pip`` will install the full XGBoost package, including the support for the GPU algorithms and federated learning.
82+
83+
You may choose to reduce the size of the installed package and save the disk space, by opting to install ``xgboost-cpu`` instead:
84+
85+
.. code-block:: bash
86+
87+
pip install xgboost-cpu
88+
89+
The ``xgboost-cpu`` variant will have drastically smaller disk footprint, but does not provide some features, such as the GPU algorithms and
90+
federated learning.
91+
7992
Conda
8093
*****
8194

python-package/pyproject.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,13 @@ build-backend = "packager.pep517"
77

88
[project]
99
name = "xgboost"
10-
version = "2.1.0"
10+
description = "XGBoost Python Package"
11+
readme = { file = "README.rst", content-type = "text/x-rst" }
1112
authors = [
1213
{ name = "Hyunsu Cho", email = "[email protected]" },
1314
{ name = "Jiaming Yuan", email = "[email protected]" }
1415
]
15-
description = "XGBoost Python Package"
16-
readme = { file = "README.rst", content-type = "text/x-rst" }
16+
version = "2.1.0"
1717
requires-python = ">=3.8"
1818
license = { text = "Apache-2.0" }
1919
classifiers = [

python-package/xgboost/testing/__init__.py

Lines changed: 11 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
get_cancer,
4646
get_digits,
4747
get_sparse,
48+
make_batches,
4849
memory,
4950
)
5051

@@ -161,7 +162,16 @@ def no_cudf() -> PytestSkip:
161162

162163

163164
def no_cupy() -> PytestSkip:
164-
return no_mod("cupy")
165+
skip_cupy = no_mod("cupy")
166+
if not skip_cupy["condition"] and system() == "Windows":
167+
import cupy as cp
168+
169+
# Cupy might run into issue on Windows due to missing compiler
170+
try:
171+
cp.array([1, 2, 3]).sum()
172+
except Exception: # pylint: disable=broad-except
173+
skip_cupy["condition"] = True
174+
return skip_cupy
165175

166176

167177
def no_dask_cudf() -> PytestSkip:
@@ -247,34 +257,6 @@ def as_arrays(
247257
return X, y, w
248258

249259

250-
def make_batches(
251-
n_samples_per_batch: int,
252-
n_features: int,
253-
n_batches: int,
254-
use_cupy: bool = False,
255-
*,
256-
vary_size: bool = False,
257-
) -> Tuple[List[np.ndarray], List[np.ndarray], List[np.ndarray]]:
258-
X = []
259-
y = []
260-
w = []
261-
if use_cupy:
262-
import cupy
263-
264-
rng = cupy.random.RandomState(1994)
265-
else:
266-
rng = np.random.RandomState(1994)
267-
for i in range(n_batches):
268-
n_samples = n_samples_per_batch + i * 10 if vary_size else n_samples_per_batch
269-
_X = rng.randn(n_samples, n_features)
270-
_y = rng.randn(n_samples)
271-
_w = rng.uniform(low=0, high=1, size=n_samples)
272-
X.append(_X)
273-
y.append(_y)
274-
w.append(_w)
275-
return X, y, w
276-
277-
278260
def make_regression(
279261
n_samples: int, n_features: int, use_cupy: bool
280262
) -> Tuple[ArrayLike, ArrayLike, ArrayLike]:

python-package/xgboost/testing/data.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
Callable,
1010
Dict,
1111
Generator,
12+
List,
1213
NamedTuple,
1314
Optional,
1415
Tuple,
@@ -501,6 +502,36 @@ def get_mq2008(
501502
)
502503

503504

505+
def make_batches( # pylint: disable=too-many-arguments,too-many-locals
506+
n_samples_per_batch: int,
507+
n_features: int,
508+
n_batches: int,
509+
use_cupy: bool = False,
510+
*,
511+
vary_size: bool = False,
512+
random_state: int = 1994,
513+
) -> Tuple[List[np.ndarray], List[np.ndarray], List[np.ndarray]]:
514+
"""Make batches of dense data."""
515+
X = []
516+
y = []
517+
w = []
518+
if use_cupy:
519+
import cupy # pylint: disable=import-error
520+
521+
rng = cupy.random.RandomState(random_state)
522+
else:
523+
rng = np.random.RandomState(random_state)
524+
for i in range(n_batches):
525+
n_samples = n_samples_per_batch + i * 10 if vary_size else n_samples_per_batch
526+
_X = rng.randn(n_samples, n_features)
527+
_y = rng.randn(n_samples)
528+
_w = rng.uniform(low=0, high=1, size=n_samples)
529+
X.append(_X)
530+
y.append(_y)
531+
w.append(_w)
532+
return X, y, w
533+
534+
504535
RelData = Tuple[sparse.csr_matrix, npt.NDArray[np.int32], npt.NDArray[np.int32]]
505536

506537

tests/buildkite/build-manylinux2014-aarch64.sh

Lines changed: 0 additions & 33 deletions
This file was deleted.

tests/buildkite/build-manylinux2014-x86_64.sh

Lines changed: 0 additions & 33 deletions
This file was deleted.
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
#!/bin/bash
2+
3+
set -euo pipefail
4+
5+
if [ $# -ne 1 ]; then
6+
echo "Usage: $0 {x86_64,aarch64}"
7+
exit 1
8+
fi
9+
10+
arch=$1
11+
12+
source tests/buildkite/conftest.sh
13+
14+
WHEEL_TAG="manylinux2014_${arch}"
15+
command_wrapper="tests/ci_build/ci_build.sh ${WHEEL_TAG}"
16+
python_bin="/opt/python/cp310-cp310/bin/python"
17+
18+
echo "--- Build binary wheel for ${WHEEL_TAG}"
19+
# Patch to add warning about manylinux2014 variant
20+
patch -p0 < tests/buildkite/remove_nccl_dep.patch
21+
patch -p0 < tests/buildkite/manylinux2014_warning.patch
22+
$command_wrapper bash -c \
23+
"cd python-package && ${python_bin} -m pip wheel --no-deps -v . --wheel-dir dist/"
24+
git checkout python-package/pyproject.toml python-package/xgboost/core.py # discard the patch
25+
26+
$command_wrapper auditwheel repair --plat ${WHEEL_TAG} python-package/dist/*.whl
27+
$command_wrapper ${python_bin} tests/ci_build/rename_whl.py \
28+
--wheel-path wheelhouse/*.whl \
29+
--commit-hash ${BUILDKITE_COMMIT} \
30+
--platform-tag ${WHEEL_TAG}
31+
rm -rf python-package/dist/
32+
mkdir python-package/dist/
33+
mv -v wheelhouse/*.whl python-package/dist/
34+
35+
echo "--- Build binary wheel for ${WHEEL_TAG} (CPU only)"
36+
# Patch to rename pkg to xgboost-cpu
37+
patch -p0 < tests/buildkite/remove_nccl_dep.patch
38+
patch -p0 < tests/buildkite/cpu_only_pypkg.patch
39+
$command_wrapper bash -c \
40+
"cd python-package && ${python_bin} -m pip wheel --no-deps -v . --wheel-dir dist/"
41+
git checkout python-package/pyproject.toml # discard the patch
42+
43+
$command_wrapper auditwheel repair --plat ${WHEEL_TAG} python-package/dist/xgboost_cpu-*.whl
44+
$command_wrapper ${python_bin} tests/ci_build/rename_whl.py \
45+
--wheel-path wheelhouse/xgboost_cpu-*.whl \
46+
--commit-hash ${BUILDKITE_COMMIT} \
47+
--platform-tag ${WHEEL_TAG}
48+
rm -v python-package/dist/xgboost_cpu-*.whl
49+
mv -v wheelhouse/xgboost_cpu-*.whl python-package/dist/
50+
51+
echo "--- Upload Python wheel"
52+
for wheel in python-package/dist/*.whl
53+
do
54+
buildkite-agent artifact upload "${wheel}"
55+
done
56+
if [[ ($is_pull_request == 0) && ($is_release_branch == 1) ]]
57+
then
58+
for wheel in python-package/dist/*.whl
59+
do
60+
aws s3 cp "${wheel}" s3://xgboost-nightly-builds/${BRANCH_NAME}/ \
61+
--acl public-read --no-progress
62+
done
63+
fi

tests/buildkite/cpu_only_pypkg.patch

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
diff --git python-package/README.rst python-package/README.rst
2+
index 1fc0bb5a0..f1c68470b 100644
3+
--- python-package/README.rst
4+
+++ python-package/README.rst
5+
@@ -1,20 +1,15 @@
6+
-======================
7+
-XGBoost Python Package
8+
-======================
9+
+=================================
10+
+XGBoost Python Package (CPU only)
11+
+=================================
12+
13+
|PyPI version|
14+
15+
-Installation
16+
-============
17+
+The ``xgboost-cpu`` package provides for a minimal installation, with no support for the GPU algorithms
18+
+or federated learning. It is provided to allow XGBoost to be installed in a space-constrained
19+
+environments.
20+
21+
-From `PyPI <https://pypi.python.org/pypi/xgboost>`_
22+
----------------------------------------------------
23+
+Note. ``xgboost-cpu`` package is only provided for x86_64 (amd64) Linux and Windows platforms.
24+
+For other platforms, please install ``xgboost`` from https://pypi.org/project/xgboost/.
25+
26+
-For a stable version, install using ``pip``::
27+
-
28+
- pip install xgboost
29+
-
30+
-.. |PyPI version| image:: https://badge.fury.io/py/xgboost.svg
31+
- :target: http://badge.fury.io/py/xgboost
32+
-
33+
-For building from source, see `build <https://xgboost.readthedocs.io/en/latest/build.html>`_.
34+
+Note. ``xgboost-cpu`` does not provide an sdist (source distribution). You may install sdist
35+
+from https://pypi.org/project/xgboost/.
36+
diff --git python-package/pyproject.toml python-package/pyproject.toml
37+
index 46c1451c2..c5dc908d9 100644
38+
--- python-package/pyproject.toml
39+
+++ python-package/pyproject.toml
40+
@@ -6,7 +6,7 @@ backend-path = ["."]
41+
build-backend = "packager.pep517"
42+
43+
[project]
44+
-name = "xgboost"
45+
+name = "xgboost-cpu"
46+
description = "XGBoost Python Package"
47+
readme = { file = "README.rst", content-type = "text/x-rst" }
48+
authors = [
49+
@@ -82,3 +82,6 @@ class-attribute-naming-style = "snake_case"
50+
51+
# Allow single-letter variables
52+
variable-rgx = "[a-zA-Z_][a-z0-9_]{0,30}$"
53+
+
54+
+[tool.hatch.build.targets.wheel]
55+
+packages = ["xgboost/"]

0 commit comments

Comments
 (0)