mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Make optimizer not complain about parameters with requires_grad=False (#7419)
This commit is contained in:
committed by
Soumith Chintala
parent
6fd252ccae
commit
f43e067128
@ -189,8 +189,6 @@ class Optimizer(object):
|
||||
if not isinstance(param, torch.Tensor):
|
||||
raise TypeError("optimizer can only optimize Tensors, "
|
||||
"but one of the params is " + torch.typename(param))
|
||||
if not param.requires_grad:
|
||||
raise ValueError("optimizing a parameter that doesn't require gradients")
|
||||
if not param.is_leaf:
|
||||
raise ValueError("can't optimize a non-leaf Tensor")
|
||||
|
||||
|
Reference in New Issue
Block a user