Fix error message of autograd (#123154)

This PR updates the error message in autograd when an input tensor does not set to `require_grad`. The original message does not contain the index info, making users hard to debug.
The error message style consists with that on line 105-109.
Co-authored-by: Jeffrey Wan <soulitzer@gmail.com>
Pull Request resolved: https://github.com/pytorch/pytorch/pull/123154
Approved by: https://github.com/soulitzer
This commit is contained in:
Chun Cai
2024-04-03 19:07:15 +00:00
committed by PyTorch MergeBot
parent 700917c361
commit 691054eeef
2 changed files with 7 additions and 3 deletions

View File

@ -1661,7 +1661,7 @@ TEST(TestAutogradNotImplementedFallback, TensorlistOp) {
ASSERT_THROWS_WITH(
torch::autograd::grad({out}, {vec[0]}),
"One of the differentiated Tensors does not require grad");
"element 0 of the input tensors does not require grad");
ASSERT_THROWS_WITH(
torch::autograd::grad({out}, {vec[1]}), "is not implemented");

View File

@ -126,7 +126,9 @@ static variable_list run_backward(
}
TORCH_CHECK(
input.requires_grad(),
"One of the differentiated Tensors does not require grad");
"element ",
i,
" of the input tensors does not require grad");
if (!grad_fn) {
// See NOTE [ Autograd Unreachable Input ] for details
output_edges.emplace_back(std::make_shared<Identity>(), 0);
@ -149,7 +151,9 @@ static variable_list run_backward(
for (const auto i : c10::irange(num_inputs)) {
TORCH_CHECK(
grad_inputs[i].defined(),
"One of the "
"element ",
i,
"of the "
"differentiated Tensors appears to not have been used "
"in the graph. Set allow_unused=True if this is the "
"desired behavior.");