mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[reland][dtensor] move DTensor to public namespace (#134203)
reland of https://github.com/pytorch/pytorch/pull/133113 I have to create a new PR because the previous reverted PR could not either be rebased, or imported successfully :( ---- Moving DTensor to be in the public namespace, to formally add the documentation page that includes all the public APIs. This includes: * many path renames and path import fixes * a dedicated doc page without too much content yet (adding in the next PRs) * To preserve the BC for users still using the torch.distributed._tensor, I added a shim script to redirect old path calls to the new module The BC preserving is evidented by the fact that all DTensor tests are still working without changing the public imports. So it's safe to land the changes Pull Request resolved: https://github.com/pytorch/pytorch/pull/134203 Approved by: https://github.com/tianyu-l
This commit is contained in:
committed by
PyTorch MergeBot
parent
20cab91a12
commit
cfc227ad43
@ -14,7 +14,6 @@ from torch.distributed._tensor import (
|
||||
DTensor,
|
||||
init_device_mesh,
|
||||
)
|
||||
from torch.distributed._tensor.debug import CommDebugMode
|
||||
from torch.distributed._tensor.experimental import implicit_replication
|
||||
from torch.distributed._tensor.placement_types import (
|
||||
DTensorSpec,
|
||||
@ -23,6 +22,7 @@ from torch.distributed._tensor.placement_types import (
|
||||
Shard,
|
||||
TensorMeta,
|
||||
)
|
||||
from torch.distributed.tensor.debug import CommDebugMode
|
||||
from torch.distributed.tensor.parallel import (
|
||||
ColwiseParallel,
|
||||
parallelize_module,
|
||||
@ -943,7 +943,7 @@ class TestDTensorPlacementTypes(DTensorTestBase):
|
||||
]
|
||||
assert_array_equal(expected_pad_sizes, pad_sizes)
|
||||
|
||||
from torch.distributed._tensor._collective_utils import unpad_tensor
|
||||
from torch.distributed.tensor._collective_utils import unpad_tensor
|
||||
|
||||
unpadded_list = [
|
||||
unpad_tensor(tensor, shard_placement.dim, pad_sizes[i])
|
||||
|
Reference in New Issue
Block a user