Skip to content

Latest commit

 

History

History
53 lines (41 loc) · 1.6 KB

File metadata and controls

53 lines (41 loc) · 1.6 KB

Using uv

# 1. 기존 venv 삭제
rm -rf ~/.venv-vllm-nightly

# 2. 새 venv 생성 (conda 비활성화 필수!)
conda deactivate
export PATH="$HOME/.local/bin:$PATH"
uv venv --python 3.12 ~/.venv-vllm-nightly
source ~/.venv-vllm-nightly/bin/activate
export PATH="$HOME/.venv-vllm-nightly/bin:$PATH"

# 3. CUDA PyTorch 먼저 설치
uv pip install torch torchvision torchaudio \
    --index-url https://download.pytorch.org/whl/cu124

# 4. vLLM nightly 설치
uv pip install vllm \
    --extra-index-url https://wheels.vllm.ai/nightly \
    --prerelease=allow \
    --index-strategy unsafe-best-match

# 5. transformers 최신 + 의존성 포함
uv pip install "transformers>=5.0" "huggingface_hub>=1.5.0"

# 6. 확인
python -c "import transformers; print('transformers:', transformers.__version__)"
python -c "import torch; print('torch:', torch.__version__)"
python -c "import vllm; print('vllm:', vllm.__version__)"

deactivate

Using mamba

# 1. conda 환경만 먼저 생성 (python + pip만)
mamba create -n vllm-nightly python=3.12 pip -y

# 2. pip으로 패키지 설치 (올바른 인덱스 지정)
mamba run -n vllm-nightly pip install \
    torch torchvision torchaudio \
    --index-url https://download.pytorch.org/whl/cu124

mamba run -n vllm-nightly pip install vllm \
    --extra-index-url https://wheels.vllm.ai/nightly \
    --pre

mamba run -n vllm-nightly pip install "transformers>=5.0" "huggingface_hub>=1.5.0"

# 3. 확인
mamba run -n vllm-nightly python -c "import transformers, vllm, torch; print(f'transformers={transformers.__version__}, vllm={vllm.__version__}, torch={torch.__version__}')"