Skip to content

Commit

Permalink
[CI/Build] migrate static project metadata from setup.py to pyproject…
Browse files Browse the repository at this point in the history
  • Loading branch information
dtrifiro authored Feb 18, 2025
1 parent 3809458 commit a02c86b
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 48 deletions.
36 changes: 35 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,42 @@ requires = [
]
build-backend = "setuptools.build_meta"

[project]
name = "vllm"
authors = [{name = "vLLM Team"}]
license = { "file"= "LICENSE" }
readme = "README.md"
description = "A high-throughput and memory-efficient inference and serving engine for LLMs"
classifiers = [
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"License :: OSI Approved :: Apache Software License",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Information Analysis",
]
requires-python = ">=3.9"
dynamic = [ "version", "dependencies", "optional-dependencies"]

[project.urls]
Homepage="https://github.com/vllm-project/vllm"
Documentation="https://vllm.readthedocs.io/en/latest/"
Slack="http://slack.vllm.ai/"

[project.scripts]
vllm = "vllm.entrypoints.cli.main:main"

[tool.setuptools_scm]
# version_file = "vllm/_version.py" # currently handled by `setup.py:get_version()`
version_file = "vllm/_version.py"

[tool.setuptools.packages.find]
where = ["."]
exclude = ["benchmarks", "csrc", "docs", "examples", "tests*"]
namespaces = false

[tool.yapfignore]
ignore_patterns = [
Expand Down
51 changes: 4 additions & 47 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch
from packaging.version import Version, parse
from setuptools import Extension, find_packages, setup
from setuptools import Extension, setup
from setuptools.command.build_ext import build_ext
from setuptools_scm import get_version
from torch.utils.cpp_extension import CUDA_HOME, ROCM_HOME
Expand Down Expand Up @@ -499,9 +499,7 @@ def get_gaudi_sw_version():


def get_vllm_version() -> str:
version = get_version(
write_to="vllm/_version.py", # TODO: move this to pyproject.toml
)
version = get_version()
sep = "+" if "+" not in version else "." # dev versions might contain +

if _no_device():
Expand Down Expand Up @@ -549,16 +547,6 @@ def get_vllm_version() -> str:
return version


def read_readme() -> str:
"""Read the README file if present."""
p = get_path("README.md")
if os.path.isfile(p):
with open(get_path("README.md"), encoding="utf-8") as f:
return f.read()
else:
return ""


def get_requirements() -> List[str]:
"""Get Python package dependencies from requirements.txt."""

Expand Down Expand Up @@ -649,36 +637,10 @@ def _read_requirements(filename: str) -> List[str]:
}

setup(
name="vllm",
# static metadata should rather go in pyproject.toml
version=get_vllm_version(),
author="vLLM Team",
license="Apache 2.0",
description=("A high-throughput and memory-efficient inference and "
"serving engine for LLMs"),
long_description=read_readme(),
long_description_content_type="text/markdown",
url="https://github.com/vllm-project/vllm",
project_urls={
"Homepage": "https://github.com/vllm-project/vllm",
"Documentation": "https://vllm.readthedocs.io/en/latest/",
},
classifiers=[
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"License :: OSI Approved :: Apache Software License",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=find_packages(exclude=("benchmarks", "csrc", "docs", "examples",
"tests*")),
python_requires=">=3.9",
install_requires=get_requirements(),
ext_modules=ext_modules,
install_requires=get_requirements(),
extras_require={
"tensorizer": ["tensorizer>=2.9.0"],
"runai": ["runai-model-streamer", "runai-model-streamer-s3", "boto3"],
Expand All @@ -687,9 +649,4 @@ def _read_requirements(filename: str) -> List[str]:
},
cmdclass=cmdclass,
package_data=package_data,
entry_points={
"console_scripts": [
"vllm=vllm.entrypoints.cli.main:main",
],
},
)

0 comments on commit a02c86b

Please sign in to comment.