mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Revert "[Distributed] [2/N] Fix clang-tidy warnings in torch/csrc/distributed/c10d (#122892)"
This reverts commit 0ba16ffd35af3eb56da4892cc5387c5e8ac864bb. Reverted https://github.com/pytorch/pytorch/pull/122892 on behalf of https://github.com/atalman due to broke cuda tests ([comment](https://github.com/pytorch/pytorch/pull/122892#issuecomment-2037207036))
This commit is contained in:
@ -51,7 +51,7 @@ class TORCH_API Reducer {
|
||||
explicit Reducer(
|
||||
std::vector<at::Tensor> params,
|
||||
std::vector<std::vector<size_t>> bucket_indices,
|
||||
const std::vector<size_t>& per_bucket_size_limits,
|
||||
std::vector<size_t> per_bucket_size_limits,
|
||||
c10::intrusive_ptr<c10d::ProcessGroup> process_group,
|
||||
std::vector<bool> expect_sparse_gradients,
|
||||
int64_t bucket_bytes_cap,
|
||||
@ -303,9 +303,11 @@ class TORCH_API Reducer {
|
||||
using GradCallback = std::function<bool(at::Tensor&)>;
|
||||
#ifndef _WIN32
|
||||
static_assert(
|
||||
std::is_same_v<
|
||||
std::is_same<
|
||||
GradCallback,
|
||||
torch::distributed::autograd::DistAutogradContext::GradCallback>);
|
||||
torch::distributed::autograd::DistAutogradContext::GradCallback>::
|
||||
value,
|
||||
"");
|
||||
#endif
|
||||
void runGradCallbackForVariable(at::Tensor& variable, GradCallback&& cb);
|
||||
|
||||
|
Reference in New Issue
Block a user