Fix refcount handling for dtype, layout and memory format (#125271)

Finish fixing https://github.com/pytorch/pytorch/issues/124868
re-use our wrap() utils as much as possible and NewRef in other places.
Pull Request resolved: https://github.com/pytorch/pytorch/pull/125271
Approved by: https://github.com/colesbury
This commit is contained in:
albanD
2024-05-02 02:34:30 +00:00
committed by PyTorch MergeBot
parent 4731130ea8
commit b119e1bcc2
10 changed files with 28 additions and 24 deletions

View File

@ -440,8 +440,7 @@ void initPythonBindings(PyObject* module) {
"dtype",
[](const TensorMetadata& metadata) {
return py::reinterpret_borrow<py::object>(
torch::autograd::utils::wrap(
torch::getTHPDtype(metadata.dtype_)));
torch::autograd::utils::wrap(metadata.dtype_));
})
.def_readonly("dim", &TensorMetadata::dim_)
.def_readonly("sizes", &TensorMetadata::sizes_)