Files
pytorch/torch/csrc/autograd/cpp_hook.cpp
Amir Khojaste 748790588c Upgrading the loop to use irange (#70326)
Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/70326

See D24145988 for context: it allows loops such as for(int i=0;i<10;i++) to be expressed as for(const auto i : c10::irange(10)). This is nice because it auto-types the loops and adds const-safety to the iteration variable.

Test Plan: buck run //caffe2/torch/fb/sparsenn:test

Reviewed By: r-barnes

Differential Revision: D33243400

fbshipit-source-id: b1f1b4163f4bf662031baea9e5268459b40c69a3
2022-01-06 07:06:53 -08:00

46 lines
1.3 KiB
C++

#include <c10/util/irange.h>
#include <torch/csrc/autograd/cpp_hook.h>
#include <torch/csrc/autograd/variable.h>
#include <torch/csrc/autograd/custom_function.h>
namespace {
using torch::autograd::Variable;
void check_single_result (const at::TensorBase &value, const at::TensorBase &result, std::string hook_name) {
if (!value.defined()) {
throw std::runtime_error("can't replace a empty gradient with a non-empty value");
}
torch::autograd::check_variable_result(value, result, hook_name);
}
}
namespace torch { namespace autograd {
// NOLINTNEXTLINE(modernize-pass-by-value)
CppFunctionPreHook::CppFunctionPreHook(const std::shared_ptr<hooks_list> &hooks, int value_idx)
: hooks_(hooks)
, value_idx_(value_idx)
{}
variable_list CppFunctionPreHook::operator()(const variable_list& values) {
auto value = values[value_idx_];
for (const auto i : c10::irange(hooks_->size())) {
auto &hook = (*hooks_)[i];
if (!hook) {
// hook was removed
continue;
}
auto res = hook(value);
if (!res.defined()) {
// Don't change gradient
continue;
}
check_single_result(value, res, c10::to_string(i));
value = std::move(res);
}
variable_list results(values);
results[value_idx_] = value;
return results;
}
}} // namespace torch::autograd