Optim package docstring fix (#129086)

Fix docstrings in various files in optim package. This is a last remaining fix for the issue #112593

The fix can be verified by running pydocstyle path-to-file --count

Fixes #112593

Related #128248

Pull Request resolved: https://github.com/pytorch/pytorch/pull/129086
Approved by: https://github.com/janeyx99
This commit is contained in:
Sahdev Zala
2024-06-21 14:30:53 +00:00
committed by PyTorch MergeBot
parent b697808056
commit 9795dba1e0
8 changed files with 76 additions and 55 deletions

View File

@ -1,4 +1,5 @@
# mypy: allow-untyped-defs
r"""Implementation for Stochastic Gradient Descent optimizer."""
from typing import List, Optional
import torch
@ -18,7 +19,7 @@ from .optimizer import (
__all__ = ["SGD", "sgd"]
class SGD(Optimizer):
class SGD(Optimizer): # noqa: D101
def __init__(
self,
params,
@ -32,7 +33,7 @@ class SGD(Optimizer):
foreach: Optional[bool] = None,
differentiable: bool = False,
fused: Optional[bool] = None,
):
): # noqa: D107
if lr < 0.0:
raise ValueError(f"Invalid learning rate: {lr}")
if momentum < 0.0:
@ -73,7 +74,7 @@ class SGD(Optimizer):
if foreach:
raise RuntimeError("`fused` and `foreach` cannot be `True` together.")
def __setstate__(self, state):
def __setstate__(self, state): # noqa: D105
super().__setstate__(state)
for group in self.param_groups:
group.setdefault("nesterov", False)
@ -100,7 +101,7 @@ class SGD(Optimizer):
@_use_grad_for_differentiable
def step(self, closure=None):
"""Performs a single optimization step.
"""Perform a single optimization step.
Args:
closure (Callable, optional): A closure that reevaluates the model
@ -264,7 +265,6 @@ def sgd(
See :class:`~torch.optim.SGD` for details.
"""
# Respect when the user inputs False/True for foreach or fused. We only want to change
# the default when neither have been user-specified. Note that we default to foreach
# and pass False to use_fused. This is not a mistake--we want to give the fused impl