mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
Revert "shrink_group implementation to expose ncclCommShrink API (#164518)"
This reverts commit fa0db212e717b6cb225159cb32ea3d83baa52381. Reverted https://github.com/pytorch/pytorch/pull/164518 on behalf of https://github.com/pytorch-auto-revert due to Reverted automatically by pytorch's autorevert, to avoid this behaviour add the tag autorevert: disable ([comment](https://github.com/pytorch/pytorch/pull/164518#issuecomment-3419893217))
This commit is contained in:
@ -90,10 +90,6 @@ static_assert(
|
||||
#define NCCL_HAS_NVLS_CTAS
|
||||
#endif
|
||||
|
||||
#if NCCL_VERSION_CODE >= NCCL_VERSION(2, 27, 0)
|
||||
#define NCCL_HAS_COMM_SHRINK
|
||||
#endif
|
||||
|
||||
// Macro to throw on a non-successful NCCL return value.
|
||||
#define C10D_NCCL_CHECK(cmd, failureReason) \
|
||||
do { \
|
||||
@ -298,14 +294,6 @@ class NCCLComm {
|
||||
ncclConfig_t& config);
|
||||
#endif // NCCL_HAS_COMM_SPLIT
|
||||
|
||||
#ifdef NCCL_HAS_COMM_SHRINK
|
||||
static std::shared_ptr<NCCLComm> shrink(
|
||||
NCCLComm* source,
|
||||
std::vector<int>& ranks_to_exclude,
|
||||
ncclConfig_t* config,
|
||||
int shrinkFlags = 0);
|
||||
#endif // NCCL_HAS_COMM_SHRINK
|
||||
|
||||
#if (defined(IS_NCCLX) || defined(USE_ROCM)) && defined(NCCL_COMM_DUMP)
|
||||
std::unordered_map<std::string, std::string> ncclCommDump();
|
||||
#endif
|
||||
|
Reference in New Issue
Block a user