[BE]: Try TCH autofixes on torch/ (#125536)

Tries TCH autofixes and see what breaks

Pull Request resolved: https://github.com/pytorch/pytorch/pull/125536
Approved by: https://github.com/ezyang
This commit is contained in:
Aaron Gokaslan
2024-05-05 23:13:54 +00:00
committed by PyTorch MergeBot
parent ccbac091d2
commit 1dd42e42c4
49 changed files with 255 additions and 110 deletions

View File

@ -11,14 +11,13 @@ from collections import defaultdict, deque
from contextlib import contextmanager
from dataclasses import dataclass, fields, is_dataclass
from enum import auto, Enum
from typing import Any, Callable, List, Optional, Tuple, Type
from typing import Any, Callable, List, Optional, Tuple, Type, TYPE_CHECKING
import torch
import torch.distributed as dist
from torch.autograd import Function, Variable
from torch.distributed.algorithms.join import Join, Joinable, JoinHook
from torch.utils._pytree import tree_flatten, tree_unflatten
from torch.utils.hooks import RemovableHandle
RPC_AVAILABLE = False
if dist.is_available():
@ -44,6 +43,9 @@ from torch._utils import _get_device_index
from ..modules import Module
from .scatter_gather import gather, scatter_kwargs # noqa: F401
if TYPE_CHECKING:
from torch.utils.hooks import RemovableHandle
__all__ = ["DistributedDataParallel"]
logger = logging.getLogger(__name__)