mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
[autograd] disable backward/grad for complex scalar output (#92753)
Fixes https://github.com/pytorch/pytorch/issues/92750 Pull Request resolved: https://github.com/pytorch/pytorch/pull/92753 Approved by: https://github.com/ezyang
This commit is contained in:
committed by
PyTorch MergeBot
parent
b5ff41a47a
commit
3b966a6ce3
@ -1099,6 +1099,13 @@ TEST(TensorTest, BackwardNonScalarOutputs) {
|
||||
y.backward(), "grad can be implicitly created only for scalar outputs");
|
||||
}
|
||||
|
||||
TEST(TensorTest, BackwardComplexScalarOutput) {
|
||||
auto x = torch::randn({5, 5}, torch::requires_grad());
|
||||
auto y = (x * c10::Scalar(c10::complex<float>(0, 0.5))).sum();
|
||||
ASSERT_THROWS_WITH(
|
||||
y.backward(), "grad can be computed only for real scalar outputs");
|
||||
}
|
||||
|
||||
TEST(TensorTest, IsLeaf) {
|
||||
auto x = torch::tensor({5}, torch::dtype(torch::kFloat).requires_grad(true));
|
||||
auto y = x * x;
|
||||
|
Reference in New Issue
Block a user