Enable distributed package on windows, Gloo backend supported only (#42897)

Summary:
Fixes https://github.com/pytorch/pytorch/issues/42095

For test case part will be committed to this PR later

mrshenli, please help to review

Pull Request resolved: https://github.com/pytorch/pytorch/pull/42897

Reviewed By: osalpekar

Differential Revision: D23841786

Pulled By: mrshenli

fbshipit-source-id: 334ba1ed73eff2f668857390fc32d1bc7f08e5f3
This commit is contained in:
gunandrose4u
2020-09-24 21:12:16 -07:00
committed by Facebook GitHub Bot
parent c6500bcf14
commit 0122299f9b
39 changed files with 462 additions and 167 deletions

View File

@ -23,7 +23,7 @@
#include <torch/csrc/jit/runtime/profiling_record.h>
#include <torch/csrc/jit/runtime/vararg_functions.h>
#ifdef USE_DISTRIBUTED
#ifdef USE_RPC
#include <torch/csrc/distributed/autograd/context/container.h>
using torch::distributed::autograd::DistAutogradContainer;
#endif
@ -267,7 +267,7 @@ void insertLastUses(Graph& g) {
}
inline int64_t getDistAutogradContextId() {
#ifdef USE_DISTRIBUTED
#ifdef USE_RPC
return DistAutogradContainer::currentContextId();
#else
return 0;
@ -1690,7 +1690,7 @@ InterpreterState::InterpreterState(
: pImpl(std::move(pImpl_)) {}
void InterpreterContinuation::operator()() {
#ifdef USE_DISTRIBUTED
#ifdef USE_RPC
auto prev_dist_id = DistAutogradContainer::currentContextId();
DistAutogradContainer::forceCurrentContextId(dist_autograd_context_id_);
#endif
@ -1700,7 +1700,7 @@ void InterpreterContinuation::operator()() {
} else {
state.runAsync(stack);
}
#ifdef USE_DISTRIBUTED
#ifdef USE_RPC
DistAutogradContainer::forceCurrentContextId(prev_dist_id);
#endif
}