mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[Opt Overlap] Clean up code in _OptimizerHookState (#71620)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/71620 Remove from_functional_optim and make it the default constructor since that is the only way _OptimizerHookState is now being built. Also, no longer need to expose create_functional_optim helper function ghstack-source-id: 147577174 Test Plan: CI Reviewed By: cbalioglu Differential Revision: D33700593 fbshipit-source-id: ba089ce3bf66ccf8f71cffdd0f4d4bddc03e8b14 (cherry picked from commit a50b2caf0e19f9793fbf18b371d30e3dd8c5c0cf)
This commit is contained in:
committed by
PyTorch MergeBot
parent
1c8fcc44cb
commit
bdcdf94bdd
@ -5,7 +5,7 @@ import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
from torch.optim import SGD, Adam, AdamW
|
||||
from torch.testing._internal.common_utils import TestCase, run_tests
|
||||
from torch.distributed.optim import functional_optim_map
|
||||
from torch.distributed.optim.utils import functional_optim_map
|
||||
|
||||
class MyModule(torch.nn.Module):
|
||||
def __init__(self):
|
||||
|
Reference in New Issue
Block a user