mirror of
https://github.com/pytorch/pytorch.git
synced 2025-11-05 00:14:54 +08:00
Revert "Remove python key when setting functional tensor metadata (#81401)"
This reverts commit b0199c06f604dcfaf59bd59ecee9f638ef0e5c3f. Reverted https://github.com/pytorch/pytorch/pull/81401 on behalf of https://github.com/clee2000 due to broke trunk win force_on_cpu tests https://github.com/pytorch/pytorch/runs/7329017706?check_suite_focus=true
This commit is contained in:
@ -29,7 +29,7 @@ void FunctionalTensorWrapper::set_constructor_metadata() {
|
||||
// All of the keys corresponding to functorch transforms should not be copied over.
|
||||
// Functorch transforms all have their own wrapper tensors (e.g. BatchedTensorImpl) which expect
|
||||
// to participate in the functorch transforms.
|
||||
key_set_ = key_set_ - c10::functorch_transforms_ks - c10::python_ks;
|
||||
key_set_ = key_set_ - c10::functorch_transforms_ks;
|
||||
}
|
||||
|
||||
FunctionalTensorWrapper::FunctionalTensorWrapper(const Tensor& value)
|
||||
|
||||
@ -110,10 +110,6 @@ class TestFunctionalization(TestCase):
|
||||
out_functional_unwrapped = torch._from_functional_tensor(out_functional_)
|
||||
self.assertEqual(out_ref_, out_functional_unwrapped)
|
||||
|
||||
def test_save_for_backwards_segfault(self):
|
||||
inp = torch._to_functional_tensor(LoggingTensor(torch.randn(2, 2))).requires_grad_(True)
|
||||
inp.exp()
|
||||
|
||||
def test_multiple_views_of_same_base(self):
|
||||
def f(x):
|
||||
y = x.view(-1)
|
||||
|
||||
Reference in New Issue
Block a user