Fix double dispatch to Python for detach (#163671)

This fixes #71725.

Differential Revision: [D83857880](https://our.internmc.facebook.com/intern/diff/D83857880)
Pull Request resolved: https://github.com/pytorch/pytorch/pull/163671
Approved by: https://github.com/ezyang, https://github.com/albanD
This commit is contained in:
Scott Wolchok
2025-10-07 11:56:30 -07:00
committed by PyTorch MergeBot
parent e3ae80fc03
commit c32118dc3e
10 changed files with 49 additions and 90 deletions

View File

@ -4926,7 +4926,6 @@ Running aten.expand.default from within SumBackward0
Running aten.div.Tensor from within DivBackward0
Running aten.mul.Tensor from within MulBackward0
Running aten.detach.default from within AccumulateGrad
Running aten.detach.default from within AccumulateGrad
Done""",
)
@ -7199,9 +7198,7 @@ for shape in [(1,), ()]:
lambda x: x.exp(), x, use_reentrant=False, context_fn=context_fn
)
out.backward()
self.assertEqual(
verbose_mode.operators, ["exp.default", "detach.default", "detach.default"]
)
self.assertEqual(verbose_mode.operators, ["exp.default", "detach.default"])
with self.assertRaisesRegex(
Exception, "only supported when use_reentrant=False"