mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Pointer to the nonzero limit ticket (#124244)
For the nonzero impl limits we are still asking at runtime to fill a new ticket but we had already more then one. So I am pointing to the current open ticket. Pull Request resolved: https://github.com/pytorch/pytorch/pull/124244 Approved by: https://github.com/ezyang
This commit is contained in:
@ -112,7 +112,7 @@ void nonzero_cuda_out_impl(const Tensor& self, Tensor& out){
|
|||||||
|
|
||||||
Tensor& nonzero_out_cuda(const Tensor& self, Tensor& out){
|
Tensor& nonzero_out_cuda(const Tensor& self, Tensor& out){
|
||||||
TORCH_CHECK(self.numel() < std::numeric_limits<int>::max(), "nonzero is not supported for tensors with more than INT_MAX elements, \
|
TORCH_CHECK(self.numel() < std::numeric_limits<int>::max(), "nonzero is not supported for tensors with more than INT_MAX elements, \
|
||||||
file a support request");
|
See https://github.com/pytorch/pytorch/issues/51871");
|
||||||
TORCH_CHECK(out.dtype() == at::kLong, "Expected object of scalar type ", at::kLong, " as out, but got ", out.dtype());
|
TORCH_CHECK(out.dtype() == at::kLong, "Expected object of scalar type ", at::kLong, " as out, but got ", out.dtype());
|
||||||
TORCH_CHECK(self.device() == out.device(), "expected self and out to be on the same device, but got out on ",
|
TORCH_CHECK(self.device() == out.device(), "expected self and out to be on the same device, but got out on ",
|
||||||
out.device(), " and self on ", self.device());
|
out.device(), " and self on ", self.device());
|
||||||
|
@ -266,7 +266,7 @@ Tensor& nonzero_out_mps(const Tensor& self, Tensor& out_) {
|
|||||||
|
|
||||||
TORCH_CHECK(self.numel() < std::numeric_limits<int>::max(),
|
TORCH_CHECK(self.numel() < std::numeric_limits<int>::max(),
|
||||||
"nonzero is not supported for tensors with more than INT_MAX elements, \
|
"nonzero is not supported for tensors with more than INT_MAX elements, \
|
||||||
file a support request");
|
See https://github.com/pytorch/pytorch/issues/51871");
|
||||||
TORCH_CHECK(
|
TORCH_CHECK(
|
||||||
out_.dtype() == at::kLong, "Expected object of scalar type ", at::kLong, " as out, but got ", out_.dtype());
|
out_.dtype() == at::kLong, "Expected object of scalar type ", at::kLong, " as out, but got ", out_.dtype());
|
||||||
TORCH_CHECK(self.device() == out_.device(),
|
TORCH_CHECK(self.device() == out_.device(),
|
||||||
|
Reference in New Issue
Block a user