Revert "Always build USE_DISTRIBUTED. (#160449)"

This reverts commit b7034e9c924412bfbe8ee25a22d7e95239b5ca65.

Reverted https://github.com/pytorch/pytorch/pull/160449 on behalf of https://github.com/jeanschmidt due to Breaking internal builds, can't be landed with forward fix due to internal tooling problems ([comment](https://github.com/pytorch/pytorch/pull/160449#issuecomment-3246689684))
This commit is contained in:
PyTorch MergeBot
2025-09-02 20:28:42 +00:00
parent 420c52ecf3
commit 4e42aa8ffc
28 changed files with 213 additions and 120 deletions

View File

@ -1225,7 +1225,7 @@ std::shared_ptr<SugaredValue> toSugaredValue(
} else if (obj.ptr() == py::module::import("torch").attr("_check").ptr()) {
return std::make_shared<TorchCheckValue>();
#ifdef USE_RPC
// This is not defined on WINDOWS
// RPC module is only available when build flag "USE_DISTRIBUTED" is on.
} else if (
isRpcAvailable &&
obj.ptr() ==
@ -1238,6 +1238,7 @@ std::shared_ptr<SugaredValue> toSugaredValue(
return SpecialFormValue::create(prim::rpc_sync);
} else if (
isRpcAvailable &&
// RPC module is only available when build flag "USE_DISTRIBUTED" is on.
obj.ptr() ==
py::module::import("torch.distributed.rpc").attr("remote").ptr()) {
return SpecialFormValue::create(prim::rpc_remote);