Skip to content
This repository was archived by the owner on Sep 4, 2025. It is now read-only.

Commit d976df3

Browse files
njhilldtrifiro
authored andcommitted
[Core] Make Ray an optional "extras" requirement
Still included in built docker images
1 parent 7431143 commit d976df3

File tree

4 files changed

+18
-8
lines changed

4 files changed

+18
-8
lines changed

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ RUN ldconfig /usr/local/cuda-12.4/compat/
104104
# install vllm wheel first, so that torch etc will be installed
105105
RUN --mount=type=bind,from=build,src=/workspace/dist,target=/vllm-workspace/dist \
106106
--mount=type=cache,target=/root/.cache/pip \
107-
pip install dist/*.whl --verbose
107+
pip install "$(echo dist/*.whl)[ray]" --verbose
108108
#################### vLLM installation IMAGE ####################
109109

110110

requirements-cuda.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
-r requirements-common.txt
33

44
# Dependencies for NVIDIA GPUs
5-
ray >= 2.9
65
nvidia-ml-py # for pynvml package
76
vllm-nccl-cu12>=2.18,<2.19 # for downloading nccl library
87
torch == 2.3.0

requirements-rocm.txt

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
# Common dependencies
22
-r requirements-common.txt
33

4-
# Dependencies for AMD GPUs
5-
ray == 2.9.3
4+
# No specific dependencies currently for AMD GPUs

setup.py

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import subprocess
77
import sys
88
from shutil import which
9-
from typing import Dict, List
9+
from typing import Dict, List, Optional
1010

1111
import torch
1212
from packaging.version import Version, parse
@@ -380,6 +380,20 @@ def _read_requirements(filename: str) -> List[str]:
380380
return requirements
381381

382382

383+
def get_extra_requirements() -> Optional[Dict[str, List[str]]]:
384+
extras = {"tensorizer": ["tensorizer>=2.9.0"]}
385+
if _is_cuda():
386+
extras["ray"] = ["ray>=2.9"]
387+
elif _is_hip():
388+
extras["ray"] = ["ray==2.9.3"]
389+
elif _is_neuron() or _is_cpu():
390+
pass
391+
else:
392+
raise ValueError(
393+
"Unsupported platform, please use CUDA, ROCM or Neuron.")
394+
return extras
395+
396+
383397
ext_modules = []
384398

385399
if _is_cuda():
@@ -425,9 +439,7 @@ def _read_requirements(filename: str) -> List[str]:
425439
python_requires=">=3.8",
426440
install_requires=get_requirements(),
427441
ext_modules=ext_modules,
428-
extras_require={
429-
"tensorizer": ["tensorizer>=2.9.0"],
430-
},
442+
extras_require=get_extra_requirements(),
431443
cmdclass={"build_ext": cmake_build_ext} if not _is_neuron() else {},
432444
package_data=package_data,
433445
)

0 commit comments

Comments
 (0)