Skip to content

Commit a39caf8

Browse files
authored
bump vllm==0.8.5 for qwen3 support (axolotl-ai-cloud#2583) [skip ci]
1 parent 07e4f2e commit a39caf8

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,13 +67,13 @@ def parse_requirements(extras_require_map):
6767
if (major, minor) >= (2, 7):
6868
_install_requires.pop(_install_requires.index(xformers_version))
6969
# _install_requires.append("xformers==0.0.29.post3") # xformers seems to be hard pinned to 2.6.0
70-
extras_require_map["vllm"] = ["vllm==0.8.4"]
70+
extras_require_map["vllm"] = ["vllm==0.8.5"]
7171
elif (major, minor) >= (2, 6):
7272
_install_requires.pop(_install_requires.index(xformers_version))
7373
_install_requires.append(
7474
"xformers==0.0.29.post2"
7575
) # vllm needs post2 w torch 2.6
76-
extras_require_map["vllm"] = ["vllm==0.8.4"]
76+
extras_require_map["vllm"] = ["vllm==0.8.5"]
7777
elif (major, minor) >= (2, 5):
7878
_install_requires.pop(_install_requires.index(xformers_version))
7979
if patch == 0:

0 commit comments

Comments
 (0)