mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Refactor distribuetd to use absolute header path (#85780)
Headers under torch/csrc/distributed may be referened with relative path, e.g., "<c10d/...>". However, relative path cannot be gracefully handled by Meta internal build when the NCCL PG is hipified to support AMD/RCCL because the "hipified" header files are generated in other directories. Moreover, using absolute path for header inclusion is the state-of-the-art in most components in Pytorch. Thus, this patch refactors all header paths in torch/csrc/distributed to be absolute. See D39835774 for more details about Meta internal complication. **How to test**: commit 9e5d199 removes -I./torch/csrc/distributed in compile options. Thus use it to verify we don't miss any relative path use of torch/csrc/distributed headers. Pull Request resolved: https://github.com/pytorch/pytorch/pull/85780 Approved by: https://github.com/kumpera, https://github.com/huydhn
This commit is contained in:
@ -10,12 +10,12 @@
|
||||
#include <ATen/core/ivalue_inl.h>
|
||||
#include <c10/macros/Macros.h>
|
||||
#include <c10/util/intrusive_ptr.h>
|
||||
#include <c10d/ProcessGroup.hpp>
|
||||
#include <c10d/Utils.hpp>
|
||||
#include <c10d/comm.hpp>
|
||||
#include <c10d/debug.h>
|
||||
#include <c10d/reducer_timer.hpp>
|
||||
#include <c10d/default_comm_hooks.hpp>
|
||||
#include <torch/csrc/distributed/c10d/ProcessGroup.hpp>
|
||||
#include <torch/csrc/distributed/c10d/Utils.hpp>
|
||||
#include <torch/csrc/distributed/c10d/comm.hpp>
|
||||
#include <torch/csrc/distributed/c10d/debug.h>
|
||||
#include <torch/csrc/distributed/c10d/reducer_timer.hpp>
|
||||
#include <torch/csrc/distributed/c10d/default_comm_hooks.hpp>
|
||||
#include <torch/csrc/autograd/function.h>
|
||||
#include <torch/csrc/autograd/profiler.h>
|
||||
#include <torch/csrc/autograd/variable.h>
|
||||
|
Reference in New Issue
Block a user