Always build USE_DISTRIBUTED. (#160449)

Signed-off-by: Edward Yang <ezyang@meta.com>

Pull Request resolved: https://github.com/pytorch/pytorch/pull/160449
Approved by: https://github.com/wconstab, https://github.com/albanD, https://github.com/dcci
This commit is contained in:
Edward Yang
2025-09-02 23:34:48 -04:00
committed by PyTorch MergeBot
parent b0a3e58dd7
commit 90b08643c3
28 changed files with 122 additions and 213 deletions

View File

@ -2,10 +2,6 @@
import torch
import torch.distributed as dist
from torch.autograd import Function
# The two imports below are not always available depending on the
# USE_DISTRIBUTED compile flag. Make sure they raise import error
# if we're trying to use them.
from torch.distributed import group, ReduceOp