Revert D30117838: [WIP] Gate DistributedOptimizers on RPC availability

Test Plan: revert-hammer

Differential Revision:
D30117838 (3f09485d7e)

Original commit changeset: e6365a910a3d

fbshipit-source-id: f276b2b2bdf5f7bd27df473fca0eebaee9f7aef2
This commit is contained in:
Natalia Gimelshein
2021-08-06 22:09:23 -07:00
committed by Facebook GitHub Bot
parent e6a3154519
commit b45cf9b81b
4 changed files with 32 additions and 33 deletions

View File

@ -14,7 +14,7 @@ from typing import Any, Callable, Dict, List, NamedTuple, Optional, Type, Union
import torch
import torch.distributed as dist
from torch.distributed.algorithms.join import Join, Joinable, JoinHook
from torch.distributed.optim import functional_optim_map
from torch.distributed.optim import DistributedOptimizer
from torch.optim import Optimizer
__all__ = ["ZeroRedundancyOptimizer"]
@ -309,6 +309,8 @@ class ZeroRedundancyOptimizer(Optimizer, Joinable):
"""
functional_optim_map = DistributedOptimizer.functional_optim_map
def __init__(
self,
params,
@ -1335,6 +1337,7 @@ class ZeroRedundancyOptimizer(Optimizer, Joinable):
- if ``overlap_with_ddp=False`` and ``optimizer_class`` is a
functional optimizer.
"""
functional_optim_map = ZeroRedundancyOptimizer.functional_optim_map
functional_optims = functional_optim_map.values()
if not self._overlap_with_ddp:
if optimizer_class in functional_optims: