mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Fix error message of autograd (#123154)
This PR updates the error message in autograd when an input tensor does not set to `require_grad`. The original message does not contain the index info, making users hard to debug. The error message style consists with that on line 105-109. Co-authored-by: Jeffrey Wan <soulitzer@gmail.com> Pull Request resolved: https://github.com/pytorch/pytorch/pull/123154 Approved by: https://github.com/soulitzer
This commit is contained in:
committed by
PyTorch MergeBot
parent
700917c361
commit
691054eeef
@ -1661,7 +1661,7 @@ TEST(TestAutogradNotImplementedFallback, TensorlistOp) {
|
||||
|
||||
ASSERT_THROWS_WITH(
|
||||
torch::autograd::grad({out}, {vec[0]}),
|
||||
"One of the differentiated Tensors does not require grad");
|
||||
"element 0 of the input tensors does not require grad");
|
||||
ASSERT_THROWS_WITH(
|
||||
torch::autograd::grad({out}, {vec[1]}), "is not implemented");
|
||||
|
||||
|
@ -126,7 +126,9 @@ static variable_list run_backward(
|
||||
}
|
||||
TORCH_CHECK(
|
||||
input.requires_grad(),
|
||||
"One of the differentiated Tensors does not require grad");
|
||||
"element ",
|
||||
i,
|
||||
" of the input tensors does not require grad");
|
||||
if (!grad_fn) {
|
||||
// See NOTE [ Autograd Unreachable Input ] for details
|
||||
output_edges.emplace_back(std::make_shared<Identity>(), 0);
|
||||
@ -149,7 +151,9 @@ static variable_list run_backward(
|
||||
for (const auto i : c10::irange(num_inputs)) {
|
||||
TORCH_CHECK(
|
||||
grad_inputs[i].defined(),
|
||||
"One of the "
|
||||
"element ",
|
||||
i,
|
||||
"of the "
|
||||
"differentiated Tensors appears to not have been used "
|
||||
"in the graph. Set allow_unused=True if this is the "
|
||||
"desired behavior.");
|
||||
|
Reference in New Issue
Block a user