mirror of
https://github.com/pytorch/pytorch.git
synced 2025-11-06 09:17:11 +08:00
Fix refcount handling for dtype, layout and memory format (#125271)
Finish fixing https://github.com/pytorch/pytorch/issues/124868 re-use our wrap() utils as much as possible and NewRef in other places. Pull Request resolved: https://github.com/pytorch/pytorch/pull/125271 Approved by: https://github.com/colesbury
This commit is contained in:
@ -440,8 +440,7 @@ void initPythonBindings(PyObject* module) {
|
||||
"dtype",
|
||||
[](const TensorMetadata& metadata) {
|
||||
return py::reinterpret_borrow<py::object>(
|
||||
torch::autograd::utils::wrap(
|
||||
torch::getTHPDtype(metadata.dtype_)));
|
||||
torch::autograd::utils::wrap(metadata.dtype_));
|
||||
})
|
||||
.def_readonly("dim", &TensorMetadata::dim_)
|
||||
.def_readonly("sizes", &TensorMetadata::sizes_)
|
||||
|
||||
Reference in New Issue
Block a user