mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Test Plan: revert-hammer
Differential Revision:
D29241736 (0d2a936176)
Original commit changeset: 288b9b1f3125
fbshipit-source-id: 56c4ec98647c6f1822b130726741a1c9ca193670
35 lines
764 B
Python
35 lines
764 B
Python
"""
|
|
:mod:`torch.optim` is a package implementing various optimization algorithms.
|
|
Most commonly used methods are already supported, and the interface is general
|
|
enough, so that more sophisticated ones can be also easily integrated in the
|
|
future.
|
|
"""
|
|
|
|
from .adadelta import Adadelta
|
|
from .adagrad import Adagrad
|
|
from .adam import Adam
|
|
from .adamw import AdamW
|
|
from .sparse_adam import SparseAdam
|
|
from .adamax import Adamax
|
|
from .asgd import ASGD
|
|
from .sgd import SGD
|
|
from .rprop import Rprop
|
|
from .rmsprop import RMSprop
|
|
from .optimizer import Optimizer
|
|
from .lbfgs import LBFGS
|
|
from . import lr_scheduler
|
|
from . import swa_utils
|
|
|
|
del adadelta
|
|
del adagrad
|
|
del adam
|
|
del adamw
|
|
del sparse_adam
|
|
del adamax
|
|
del asgd
|
|
del sgd
|
|
del rprop
|
|
del rmsprop
|
|
del optimizer
|
|
del lbfgs
|