Clean up usage of torch._six partially (#49785)

Summary:
See https://github.com/pytorch/pytorch/issues/42919

Pull Request resolved: https://github.com/pytorch/pytorch/pull/49785

Reviewed By: mruberry

Differential Revision: D25963833

Pulled By: bugra

fbshipit-source-id: 11c90d6b8d3f206c9d0a4d8621b773beb10c6ba2
This commit is contained in:
Chester Liu
2021-02-08 13:56:12 -08:00
committed by Facebook GitHub Bot
parent 97e35858ec
commit 58eb23378f
29 changed files with 72 additions and 100 deletions

View File

@ -14,11 +14,11 @@ import warnings
from textwrap import dedent
import torch
import sys
import builtins
# This is needed. `torch._jit_internal` is imported before `torch.distributed.__init__`.
# Explicitly ask to import `torch.distributed.__init__` first.
# Otherwise, "AttributeError: module 'torch' has no attribute 'distributed'" is raised.
import torch.distributed.rpc
from torch._six import builtins
from torch._utils_internal import get_source_lines_and_file
from torch.futures import Future
from typing import Tuple, List, Dict, Optional, Union, Any, TypeVar, Generic, Callable # noqa: F401