Last active
September 4, 2025 05:25
-
-
Save vadimkantorov/fe63f8628ff6cad460e934e1d7ed650b to your computer and use it in GitHub Desktop.
Install and pin nighly vllm using pyproject.toml and uv
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # git clone https://gist.github.com/vadimkantorov/fe63f8628ff6cad460e934e1d7ed650b | |
| # cd fe63f8628ff6cad460e934e1d7ed650b | |
| # uv venv | |
| # uv sync | |
| # https://github.com/vllm-project/vllm/pull/20358#issuecomment-3247178818 | |
| # https://github.com/vllm-project/vllm/issues/9244 | |
| # https://github.com/astral-sh/uv/issues/8082 | |
| # https://github.com/vllm-project/vllm/issues/24126 | |
| [build-system] | |
| requires = ["setuptools>=65", "wheel"] | |
| build-backend = "setuptools.build_meta" | |
| [project] | |
| name = "test_vllm" | |
| version = "0.0.0.1" | |
| requires-python = ">=3.12,<3.13" | |
| dependencies = [ | |
| "torch==2.8.0", | |
| "flash-attn==2.8.3", | |
| "vllm==0.10.2rc2.dev39+g930a24144", # "vllm" | |
| ] | |
| [tool.uv.sources] | |
| vllm = { index = "vllm_commit" } | |
| flash-attn = { url = "https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.3/flash_attn-2.8.3+cu12torch2.8cxx11abiTRUE-cp312-cp312-linux_x86_64.whl" } | |
| [[tool.uv.index]] | |
| name = "vllm_commit" | |
| url = "https://wheels.vllm.ai/930a24144c073a08cfecabd75a242e713bc4f57e" | |
| #[[tool.uv.index]] | |
| #name = "vllm_nightly" | |
| #url = "https://wheels.vllm.ai/nighlty" | |
| [tool.uv] | |
| override-dependencies = ["outlines-core==0.2.10"] |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment