mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Follows #125102 Pull Request resolved: https://github.com/pytorch/pytorch/pull/130109 Approved by: https://github.com/ezyang
22 lines
832 B
C++
22 lines
832 B
C++
#include <torch/csrc/distributed/rpc/request_callback.h>
|
|
|
|
#include <torch/csrc/distributed/autograd/context/container.h>
|
|
#include <torch/csrc/distributed/autograd/utils.h>
|
|
|
|
namespace torch::distributed::rpc {
|
|
|
|
using namespace torch::distributed::autograd;
|
|
|
|
c10::intrusive_ptr<JitFuture> RequestCallback::operator()(
|
|
Message& request,
|
|
std::vector<c10::Stream> streams) const {
|
|
// NB: cannot clear autograd context id here because the processMessage method
|
|
// might pause waiting for all RRefs in the arguments to be confirmed by their
|
|
// owners and resume processing in a different thread. Hence, the
|
|
// thread_local context id needs to be set and cleared in the thread that
|
|
// indeed carries out the processing logic.
|
|
return processMessage(request, std::move(streams));
|
|
}
|
|
|
|
} // namespace torch::distributed::rpc
|