Always build USE_DISTRIBUTED. (#160449)

Signed-off-by: Edward Yang <ezyang@meta.com>

Pull Request resolved: https://github.com/pytorch/pytorch/pull/160449
Approved by: https://github.com/wconstab, https://github.com/albanD, https://github.com/dcci
This commit is contained in:
Edward Yang
2025-09-04 12:58:51 -04:00
committed by PyTorch MergeBot
parent 6087ef41e5
commit de893e96c7
29 changed files with 124 additions and 213 deletions

View File

@ -2,10 +2,6 @@
import torch
import torch.distributed as dist
from torch.autograd import Function
# The two imports below are not always available depending on the
# USE_DISTRIBUTED compile flag. Make sure they raise import error
# if we're trying to use them.
from torch.distributed import group, ReduceOp