mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 21:49:24 +08:00
[PyTorch] Make TensorImpl::sizes() customizable and disable it for NestedTensorImpl (#73817)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/73817 NestedTensorImpl doesn't have sizes(). Silently getting wrong results back from it is not conducive to efficient software development. Make it throw while allowing sizes() to be inlined in the common case anyway, just like is_contiguous(). Thanks ezyang for the reminder that we could do this. ghstack-source-id: 151302903 Test Plan: Updated test_nestedtensor.py Reviewed By: ezyang Differential Revision: D34660829 fbshipit-source-id: 1289f21127d6a8359893f9174f3c430a290f2c7f (cherry picked from commit 7098b9fcfbd25a03bac19e1148426ff073810edd)
This commit is contained in:
committed by
PyTorch MergeBot
parent
4646caede9
commit
90be8fa279
@ -133,14 +133,15 @@ class TestNestedTensor(TestCase):
|
||||
RuntimeError, "numel is disabled", lambda: a1.numel(),
|
||||
)
|
||||
|
||||
@unittest.skipIf(IS_FBCODE, "size is not virtual in fbcode.")
|
||||
@torch.inference_mode()
|
||||
def test_size(self):
|
||||
for constructor in _iter_constructors():
|
||||
a1 = constructor([])
|
||||
self.assertRaisesRegex(
|
||||
RuntimeError,
|
||||
"NestedTensorImpl doesn't support sizes",
|
||||
"Tensors of type NestedTensorImpl do not have sizes"
|
||||
if IS_FBCODE
|
||||
else "NestedTensorImpl doesn't support sizes",
|
||||
lambda: a1.size(),
|
||||
)
|
||||
|
||||
|
Reference in New Issue
Block a user