mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/14269 Removes reference to Context proper and instead adds a bool argument for async copy (the same as `copy_`) For CopyFrom - I haven't tweaked all callsites yet. Instead I rely on a terrible hack that pointer to context is implicitly converted to bool when passed, haha :) It's not a good code and I propose to fix it in a follow up diff (maybe using clangr tooling). Reviewed By: ezyang Differential Revision: D13117981 fbshipit-source-id: 7cb1dc2ba6a4c50ac26614f45ab8318ea96e3138
26 lines
548 B
C++
26 lines
548 B
C++
#ifndef CAFFE2_OPERATORS_STOP_GRADIENT_H_
|
|
#define CAFFE2_OPERATORS_STOP_GRADIENT_H_
|
|
|
|
#include "caffe2/core/operator.h"
|
|
|
|
namespace caffe2 {
|
|
|
|
template <class Context>
|
|
class StopGradientOp : public Operator<Context> {
|
|
public:
|
|
USE_SIMPLE_CTOR_DTOR(StopGradientOp)
|
|
USE_OPERATOR_CONTEXT_FUNCTIONS;
|
|
bool RunOnDevice() override {
|
|
const auto& in = Input(0);
|
|
auto* out = Output(0);
|
|
if (out != &in) {
|
|
out->CopyFrom(in, true /*async*/);
|
|
}
|
|
return true;
|
|
}
|
|
};
|
|
|
|
} // namespace caffe2
|
|
|
|
#endif // CAFFE2_OPERATORS_STOP_GRADIENT_H_
|