Revert "[build] modernize build-backend: setuptools.build_meta:__legacy__ -> setuptools.build_meta (#155998)"

This reverts commit 404008e3efdabeaf5b140a3aff77131461c33a0a.

Reverted https://github.com/pytorch/pytorch/pull/155998 on behalf of https://github.com/malfet due to Broke inductor_cpp, wrapper see e472daa809/1 ([comment](https://github.com/pytorch/pytorch/pull/155998#issuecomment-3032915058))
This commit is contained in:
PyTorch MergeBot
2025-07-03 16:47:07 +00:00
parent e472daa809
commit 2e64e45b0b
4 changed files with 144 additions and 169 deletions

152
setup.py
View File

@ -244,7 +244,6 @@ if sys.platform == "win32" and sys.maxsize.bit_length() == 31:
import platform
# Also update `project.requires-python` in pyproject.toml when changing this
python_min_version = (3, 9, 0)
python_min_version_str = ".".join(map(str, python_min_version))
if sys.version_info < python_min_version:
@ -273,28 +272,6 @@ import setuptools.command.sdist
import setuptools.errors
from setuptools import Command, Extension, find_packages, setup
from setuptools.dist import Distribution
CWD = Path(__file__).absolute().parent
# Add the current directory to the Python path so that we can import `tools`.
# This is required when running this script with a PEP-517-enabled build backend.
#
# From the PEP-517 documentation: https://peps.python.org/pep-0517
#
# > When importing the module path, we do *not* look in the directory containing
# > the source tree, unless that would be on `sys.path` anyway (e.g. because it
# > is specified in `PYTHONPATH`).
#
sys.path.insert(0, str(CWD)) # this only affects the current process
# Add the current directory to PYTHONPATH so that we can import `tools` in subprocesses
os.environ["PYTHONPATH"] = os.pathsep.join(
[
str(CWD),
os.getenv("PYTHONPATH", ""),
]
).rstrip(os.pathsep)
from tools.build_pytorch_libs import build_pytorch
from tools.generate_torch_version import get_torch_version
from tools.setup_helpers.cmake import CMake, CMakeValue
@ -387,8 +364,8 @@ RUN_BUILD_DEPS = True
# see if the user passed a quiet flag to setup.py arguments and respect
# that in our parts of the build
EMIT_BUILD_WARNING = False
RERUN_CMAKE = str2bool(os.environ.pop("CMAKE_FRESH", None))
CMAKE_ONLY = str2bool(os.environ.pop("CMAKE_ONLY", None))
RERUN_CMAKE = str2bool(os.getenv("CMAKE_FRESH"))
CMAKE_ONLY = str2bool(os.getenv("CMAKE_ONLY"))
filtered_args = []
for i, arg in enumerate(sys.argv):
if arg == "--cmake":
@ -430,6 +407,7 @@ else:
setuptools.distutils.log.warn = report # type: ignore[attr-defined]
# Constant known variables used throughout this file
CWD = Path(__file__).absolute().parent
TORCH_DIR = CWD / "torch"
TORCH_LIB_DIR = TORCH_DIR / "lib"
THIRD_PARTY_DIR = CWD / "third_party"
@ -1106,12 +1084,14 @@ def configure_extension_build() -> tuple[
# pypi cuda package that requires installation of cuda runtime, cudnn and cublas
# should be included in all wheels uploaded to pypi
pytorch_extra_install_requires = os.getenv("PYTORCH_EXTRA_INSTALL_REQUIREMENTS")
if pytorch_extra_install_requires:
report(f"pytorch_extra_install_requirements: {pytorch_extra_install_requires}")
extra_install_requires.extend(
map(str.strip, pytorch_extra_install_requires.split("|"))
pytorch_extra_install_requirements = os.getenv(
"PYTORCH_EXTRA_INSTALL_REQUIREMENTS", ""
)
if pytorch_extra_install_requirements:
report(
f"pytorch_extra_install_requirements: {pytorch_extra_install_requirements}"
)
extra_install_requires += pytorch_extra_install_requirements.split("|")
# Cross-compile for M1
if IS_DARWIN:
@ -1147,15 +1127,10 @@ def configure_extension_build() -> tuple[
################################################################################
ext_modules: list[Extension] = []
# packages that we want to install into site-packages and include them in wheels
includes = ["torch", "torch.*", "torchgen", "torchgen.*"]
# exclude folders that they look like Python packages but are not wanted in wheels
excludes = ["tools", "tools.*", "caffe2", "caffe2.*"]
if cmake_cache_vars["BUILD_FUNCTORCH"]:
includes.extend(["functorch", "functorch.*"])
else:
if not cmake_cache_vars["BUILD_FUNCTORCH"]:
excludes.extend(["functorch", "functorch.*"])
packages = find_packages(include=includes, exclude=excludes)
packages = find_packages(exclude=excludes)
C = Extension(
"torch._C",
libraries=main_libraries,
@ -1233,7 +1208,6 @@ def main() -> None:
"Conflict: 'BUILD_LIBTORCH_WHL' and 'BUILD_PYTHON_ONLY' can't both be 1. "
"Set one to 0 and rerun."
)
install_requires = [
"filelock",
"typing-extensions>=4.10.0",
@ -1243,8 +1217,9 @@ def main() -> None:
"jinja2",
"fsspec",
]
if BUILD_PYTHON_ONLY:
install_requires += [f"{LIBTORCH_PKG_NAME}=={TORCH_VERSION}"]
install_requires.append(f"{LIBTORCH_PKG_NAME}=={get_torch_version()}")
if str2bool(os.getenv("USE_PRIORITIZED_TEXT_FOR_LD")):
gen_linker_script(
@ -1274,7 +1249,7 @@ def main() -> None:
try:
dist.parse_command_line()
except setuptools.errors.BaseError as e:
print(e, file=sys.stderr)
print(e)
sys.exit(1)
mirror_files_into_torchgen()
@ -1290,6 +1265,16 @@ def main() -> None:
) = configure_extension_build()
install_requires += extra_install_requires
extras_require = {
"optree": ["optree>=0.13.0"],
"opt-einsum": ["opt-einsum>=3.3"],
"pyyaml": ["pyyaml"],
}
# Read in README.md for our long_description
long_description = (CWD / "README.md").read_text(encoding="utf-8")
version_range_max = max(sys.version_info[1], 13) + 1
torch_package_data = [
"py.typed",
"bin/*",
@ -1332,18 +1317,22 @@ def main() -> None:
]
if not BUILD_LIBTORCH_WHL:
torch_package_data += [
"lib/libtorch_python.so",
"lib/libtorch_python.dylib",
"lib/libtorch_python.dll",
]
torch_package_data.extend(
[
"lib/libtorch_python.so",
"lib/libtorch_python.dylib",
"lib/libtorch_python.dll",
]
)
if not BUILD_PYTHON_ONLY:
torch_package_data += [
"lib/*.so*",
"lib/*.dylib*",
"lib/*.dll",
"lib/*.lib",
]
torch_package_data.extend(
[
"lib/*.so*",
"lib/*.dylib*",
"lib/*.dll",
"lib/*.lib",
]
)
# XXX: Why not use wildcards ["lib/aotriton.images/*", "lib/aotriton.images/**/*"] here?
aotriton_image_path = TORCH_DIR / "lib" / "aotriton.images"
aks2_files = [
@ -1353,15 +1342,19 @@ def main() -> None:
]
torch_package_data += aks2_files
if get_cmake_cache_vars()["USE_TENSORPIPE"]:
torch_package_data += [
"include/tensorpipe/*.h",
"include/tensorpipe/**/*.h",
]
torch_package_data.extend(
[
"include/tensorpipe/*.h",
"include/tensorpipe/**/*.h",
]
)
if get_cmake_cache_vars()["USE_KINETO"]:
torch_package_data += [
"include/kineto/*.h",
"include/kineto/**/*.h",
]
torch_package_data.extend(
[
"include/kineto/*.h",
"include/kineto/**/*.h",
]
)
torchgen_package_data = [
"packaged/*",
"packaged/**/*",
@ -1369,11 +1362,9 @@ def main() -> None:
package_data = {
"torch": torch_package_data,
}
exclude_package_data = {}
if not BUILD_LIBTORCH_WHL:
package_data["torchgen"] = torchgen_package_data
exclude_package_data["torchgen"] = ["*.py[co]"]
else:
# no extensions in BUILD_LIBTORCH_WHL mode
ext_modules = []
@ -1381,16 +1372,47 @@ def main() -> None:
setup(
name=TORCH_PACKAGE_NAME,
version=TORCH_VERSION,
description=(
"Tensors and Dynamic neural networks in Python with strong GPU acceleration"
),
long_description=long_description,
long_description_content_type="text/markdown",
ext_modules=ext_modules,
cmdclass=cmdclass,
packages=packages,
entry_points=entry_points,
install_requires=install_requires,
extras_require=extras_require,
package_data=package_data,
exclude_package_data=exclude_package_data,
# Disable automatic inclusion of data files because we want to
# explicitly control with `package_data` above.
include_package_data=False,
# TODO fix later Manifest.IN file was previously ignored
include_package_data=False, # defaults to True with pyproject.toml file
url="https://pytorch.org/",
download_url="https://github.com/pytorch/pytorch/tags",
author="PyTorch Team",
author_email="packages@pytorch.org",
python_requires=f">={python_min_version_str}",
# PyPI package information.
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
"Programming Language :: C++",
"Programming Language :: Python :: 3",
]
+ [
f"Programming Language :: Python :: 3.{i}"
for i in range(python_min_version[1], version_range_max)
],
license="BSD-3-Clause",
keywords="pytorch, machine learning",
)
if EMIT_BUILD_WARNING:
print_box(build_update_message)