mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 13:44:15 +08:00
Adds
- `torch::stable::accelerator::DeviceGuard`: `std::unique_ptr` to `DeviceGuardOpauqe` mostly copied from the below (but made generic)
50eac811a6/torch/csrc/inductor/aoti_runtime/utils_cuda.h (L30-L46)
- constructor `DeviceGuard(DeviceIndex)` (**this matches aoti but defers from the actual c10 DeviceGuard constructor that takes in device**)
- `set_index(DeviceIndex)`
- `torch::stable::accelerator::Stream`: `std::shared_ptr` to `StreamOpaque`
- constructor `Stream(StreamHandle stream)` (similar to torch::stable::Tensor)
- `id() -> StreamId`
- `getCurrentStream(DeviceIndex device_index) -> stable::accelerator::Stream`
Pull Request resolved: https://github.com/pytorch/pytorch/pull/159679
Approved by: https://github.com/guangyey, https://github.com/janeyx99
75 lines
2.0 KiB
Python
75 lines
2.0 KiB
Python
import distutils.command.clean
|
|
import shutil
|
|
from pathlib import Path
|
|
|
|
from setuptools import find_packages, setup
|
|
|
|
import torch
|
|
from torch.utils.cpp_extension import BuildExtension, CppExtension, CUDAExtension
|
|
|
|
|
|
ROOT_DIR = Path(__file__).parent
|
|
CSRC_DIR = ROOT_DIR / "libtorch_agnostic" / "csrc"
|
|
|
|
|
|
class clean(distutils.command.clean.clean):
|
|
def run(self):
|
|
# Run default behavior first
|
|
distutils.command.clean.clean.run(self)
|
|
|
|
# Remove extension
|
|
for path in (ROOT_DIR / "libtorch_agnostic").glob("**/*.so"):
|
|
path.unlink()
|
|
# Remove build and dist and egg-info directories
|
|
dirs = [
|
|
ROOT_DIR / "build",
|
|
ROOT_DIR / "dist",
|
|
ROOT_DIR / "libtorch_agnostic.egg-info",
|
|
]
|
|
for path in dirs:
|
|
if path.exists():
|
|
shutil.rmtree(str(path), ignore_errors=True)
|
|
|
|
|
|
def get_extension():
|
|
extra_compile_args = {
|
|
"cxx": ["-fdiagnostics-color=always"],
|
|
}
|
|
|
|
extension = CppExtension
|
|
# allow including <cuda_runtime.h>
|
|
if torch.cuda.is_available():
|
|
extra_compile_args["cxx"].append("-DLAE_USE_CUDA")
|
|
extension = CUDAExtension
|
|
|
|
sources = list(CSRC_DIR.glob("**/*.cpp"))
|
|
|
|
return [
|
|
extension(
|
|
"libtorch_agnostic._C",
|
|
sources=sorted(str(s) for s in sources),
|
|
py_limited_api=True,
|
|
extra_compile_args=extra_compile_args,
|
|
extra_link_args=[],
|
|
)
|
|
]
|
|
|
|
|
|
setup(
|
|
name="libtorch_agnostic",
|
|
version="0.0",
|
|
author="PyTorch Core Team",
|
|
description="Example of libtorch agnostic extension",
|
|
packages=find_packages(exclude=("test",)),
|
|
package_data={"libtorch_agnostic": ["*.dll", "*.dylib", "*.so"]},
|
|
install_requires=[
|
|
"torch",
|
|
],
|
|
ext_modules=get_extension(),
|
|
cmdclass={
|
|
"build_ext": BuildExtension.with_options(no_python_abi_suffix=True),
|
|
"clean": clean,
|
|
},
|
|
options={"bdist_wheel": {"py_limited_api": "cp39"}},
|
|
)
|