use vllm 0.19.0 for torch 2.10.0 (#3582)

This commit is contained in:
Wing Lian
2026-04-07 08:09:49 -07:00
committed by GitHub
parent 149178ddb7
commit 7c56809c7f

View File

@@ -89,7 +89,7 @@ def parse_requirements(extras_require_map):
] ]
if not install_xformers: if not install_xformers:
_install_requires.pop(_install_requires.index(xformers_version)) _install_requires.pop(_install_requires.index(xformers_version))
extras_require_map["vllm"] = ["vllm>=0.17.1"] extras_require_map["vllm"] = ["vllm>=0.19.0"]
elif (major, minor) >= (2, 9): elif (major, minor) >= (2, 9):
extras_require_map.pop("fbgemm-gpu") extras_require_map.pop("fbgemm-gpu")
extras_require_map["fbgemm-gpu"] = [ extras_require_map["fbgemm-gpu"] = [