Revert "Allow get attributes on DDP similar to FSDP (#128620)"

This reverts commit 065c386990dce444db17eff7b254bf79e82450ef.

Reverted https://github.com/pytorch/pytorch/pull/128620 on behalf of https://github.com/jeanschmidt due to Reverting in order to see if the trunk error on inductor is fixed ([comment](https://github.com/pytorch/pytorch/pull/128620#issuecomment-2200717876))
This commit is contained in:
PyTorch MergeBot
2024-07-01 17:56:59 +00:00
parent bb0f3df562
commit b02186ffc1

View File

@ -1236,13 +1236,6 @@ class DistributedDataParallel(Module, Joinable):
# passing a handle to torch.nn.SyncBatchNorm layer
self._passing_sync_batchnorm_handle(self.module)
def __getattr__(self, name: str) -> Any:
"""Forward missing attributes to the wrapped module."""
try:
return super().__getattr__(name) # defer to nn.Module's logic
except AttributeError:
return getattr(self.module, name)
def __getstate__(self):
self._check_default_group()
attrs = copy.copy(self.__dict__)