Make optimizer not complain about parameters with requires_grad=False (#7419)

This commit is contained in:
Domagoj Alagić
2018-05-09 17:34:52 +02:00
committed by Soumith Chintala
parent 6fd252ccae
commit f43e067128

View File

@ -189,8 +189,6 @@ class Optimizer(object):
if not isinstance(param, torch.Tensor):
raise TypeError("optimizer can only optimize Tensors, "
"but one of the params is " + torch.typename(param))
if not param.requires_grad:
raise ValueError("optimizing a parameter that doesn't require gradients")
if not param.is_leaf:
raise ValueError("can't optimize a non-leaf Tensor")