mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Revert "Always build USE_DISTRIBUTED. (#160449)"
This reverts commit b7034e9c924412bfbe8ee25a22d7e95239b5ca65. Reverted https://github.com/pytorch/pytorch/pull/160449 on behalf of https://github.com/jeanschmidt due to Breaking internal builds, can't be landed with forward fix due to internal tooling problems ([comment](https://github.com/pytorch/pytorch/pull/160449#issuecomment-3246689684))
This commit is contained in:
@ -120,12 +120,14 @@
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef USE_DISTRIBUTED
|
||||
#ifdef USE_C10D
|
||||
#include <torch/csrc/distributed/autograd/python_autograd.h>
|
||||
#include <torch/csrc/distributed/c10d/c10d.h>
|
||||
#include <torch/csrc/distributed/rpc/rpc.h>
|
||||
#include <torch/csrc/distributed/rpc/testing/testing.h>
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if defined(USE_VALGRIND)
|
||||
#include <callgrind.h>
|
||||
@ -550,7 +552,11 @@ static PyObject* THPModule_getBackcompatKeepdimWarn(
|
||||
}
|
||||
|
||||
static PyObject* THPModule_hasDistributed(PyObject* _unused, PyObject* noargs) {
|
||||
#ifdef USE_DISTRIBUTED
|
||||
Py_RETURN_TRUE;
|
||||
#else
|
||||
Py_RETURN_FALSE;
|
||||
#endif
|
||||
}
|
||||
|
||||
static PyObject* THPModule_showConfig(PyObject* module, PyObject* noargs) {
|
||||
@ -1987,7 +1993,7 @@ PyObject* initModule() {
|
||||
#ifdef USE_XPU
|
||||
THPUtils_addPyMethodDefs(methods, THXPModule_methods());
|
||||
#endif
|
||||
#ifdef USE_C10D
|
||||
#if defined(USE_DISTRIBUTED) && defined(USE_C10D)
|
||||
THPUtils_addPyMethodDefs(
|
||||
methods, torch::distributed::c10d::python_functions());
|
||||
#ifndef _WIN32
|
||||
|
Reference in New Issue
Block a user