Guard TensorPipe agent by USE_TENSORPIPE (#42682)

Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/42682

ghstack-source-id: 109834351

Test Plan: CI

Reviewed By: malfet

Differential Revision: D22978717

fbshipit-source-id: 18b7cbdb532e78ff9259e82f0f92ad279124419d
This commit is contained in:
Luca Wehrstedt
2020-08-14 02:55:04 -07:00
committed by Facebook GitHub Bot
parent ccd9f3244b
commit ed242cbec5
7 changed files with 44 additions and 3 deletions

View File

@ -279,9 +279,18 @@ if(NOT INTERN_BUILD_MOBILE OR NOT BUILD_CAFFE2_MOBILE)
target_link_libraries(process_group_agent PRIVATE torch c10d fmt::fmt-header-only)
add_dependencies(process_group_agent torch c10d)
# Define this target even if we're building without TensorPipe, to make life
# easier to other targets that depend on this. However, in that case, by not
# setting the USE_TENSORPIPE compile definition, this target will just end
# up being empty. Downstream targets should also add a #ifdef guard.
add_library(tensorpipe_agent "${TORCH_SRC_DIR}/csrc/distributed/rpc/tensorpipe_agent.cpp" "${TORCH_SRC_DIR}/csrc/distributed/rpc/tensorpipe_agent.h")
target_link_libraries(tensorpipe_agent PRIVATE torch c10d tensorpipe fmt::fmt-header-only)
add_dependencies(tensorpipe_agent torch c10d tensorpipe)
add_dependencies(tensorpipe_agent torch c10d)
if(USE_TENSORPIPE)
target_compile_definitions(tensorpipe_agent PUBLIC USE_TENSORPIPE)
target_link_libraries(tensorpipe_agent PRIVATE tensorpipe)
add_dependencies(tensorpipe_agent tensorpipe)
endif()
endif()
set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)

View File

@ -3,10 +3,12 @@ set(TORCH_RPC_TEST_SOURCES
${TORCH_ROOT}/test/cpp/common/main.cpp
${TORCH_RPC_TEST_DIR}/e2e_test_base.cpp
${TORCH_RPC_TEST_DIR}/test_wire_serialization.cpp
${TORCH_RPC_TEST_DIR}/test_tensorpipe_serialization.cpp
${TORCH_RPC_TEST_DIR}/test_e2e_process_group.cpp
${TORCH_RPC_TEST_DIR}/test_e2e_tensorpipe.cpp
)
set(TORCH_RPC_TEST_DEPENDENCY_LIBS
torch c10d gtest process_group_agent tensorpipe_agent
)
if(USE_GLOO)
list(APPEND TORCH_RPC_TEST_SOURCES
@ -14,11 +16,20 @@ if(USE_GLOO)
)
endif()
if(USE_TENSORPIPE)
list(APPEND TORCH_RPC_TEST_SOURCES
${TORCH_RPC_TEST_DIR}/test_tensorpipe_serialization.cpp
)
list(APPEND TORCH_RPC_TEST_DEPENDENCY_LIBS
tensorpipe
)
endif()
add_executable(test_cpp_rpc ${TORCH_RPC_TEST_SOURCES})
target_include_directories(
test_cpp_rpc PRIVATE
${ATen_CPU_INCLUDE})
target_link_libraries(test_cpp_rpc PRIVATE torch c10d tensorpipe gtest process_group_agent tensorpipe_agent)
target_link_libraries(test_cpp_rpc PRIVATE ${TORCH_RPC_TEST_DEPENDENCY_LIBS})
if(USE_CUDA)
target_link_libraries(test_cpp_rpc PRIVATE

View File

@ -13,6 +13,8 @@ namespace rpc {
using namespace torch::distributed::autograd;
#ifdef USE_TENSORPIPE
class TestE2ETensorPipe : public TestE2EBase {
protected:
void buildRpcAgent() override {
@ -50,6 +52,8 @@ TEST_F(TestE2ETensorPipe, TestTrainingLoop) {
runTrainingLoop();
}
#endif
} // namespace rpc
} // namespace distributed
} // namespace torch

View File

@ -87,6 +87,8 @@ set(TORCH_PYTHON_LINK_LIBRARIES
set(TORCH_PYTHON_COMPILE_DEFINITIONS)
set(TORCH_PYTHON_PUBLIC_COMPILE_DEFINITIONS)
set(TORCH_PYTHON_COMPILE_OPTIONS)
set(TORCH_PYTHON_LINK_FLAGS "")
@ -167,6 +169,7 @@ if(USE_DISTRIBUTED)
list(APPEND TORCH_PYTHON_COMPILE_DEFINITIONS USE_C10D)
if(USE_TENSORPIPE)
list(APPEND TORCH_PYTHON_LINK_LIBRARIES tensorpipe)
list(APPEND TORCH_PYTHON_PUBLIC_COMPILE_DEFINITIONS USE_TENSORPIPE)
endif()
endif()
endif()
@ -264,6 +267,8 @@ target_link_libraries(torch_python ${TORCH_PYTHON_LINK_LIBRARIES})
target_compile_definitions(torch_python PRIVATE ${TORCH_PYTHON_COMPILE_DEFINITIONS})
target_compile_definitions(torch_python PUBLIC ${TORCH_PYTHON_PUBLIC_COMPILE_DEFINITIONS})
target_compile_options(torch_python PRIVATE ${TORCH_PYTHON_COMPILE_OPTIONS})
target_include_directories(torch_python PUBLIC ${TORCH_PYTHON_INCLUDE_DIRECTORIES})

View File

@ -455,6 +455,8 @@ PyObject* rpc_init(PyObject* /* unused */) {
&ProcessGroupAgent::sync,
py::call_guard<py::gil_scoped_release>());
#ifdef USE_TENSORPIPE
// Base class: torch.distributed.rpc.RpcBackendOptions.
py::class_<TensorPipeRpcBackendOptions>(
module,
@ -552,6 +554,8 @@ PyObject* rpc_init(PyObject* /* unused */) {
TensorPipeAgent::getWorkerInfos,
py::call_guard<py::gil_scoped_release>());
#endif // USE_TENSORPIPE
module.def("_is_current_rpc_agent_set", &RpcAgent::isCurrentRpcAgentSet);
module.def("_get_current_rpc_agent", &RpcAgent::getCurrentRpcAgent);

View File

@ -1,5 +1,7 @@
#include <torch/csrc/distributed/rpc/tensorpipe_agent.h>
#ifdef USE_TENSORPIPE
#include <limits>
#include <fmt/format.h>
@ -1025,3 +1027,5 @@ void TensorPipeAgent::markFutureWithError(
} // namespace rpc
} // namespace distributed
} // namespace torch
#endif // USE_TENSORPIPE

View File

@ -1,5 +1,7 @@
#pragma once
#ifdef USE_TENSORPIPE
#include <atomic>
#include <thread>
@ -341,3 +343,5 @@ class TensorPipeAgent : public RpcAgent {
} // namespace rpc
} // namespace distributed
} // namespace torch
#endif // USE_TENSORPIPE