mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Fix some issues for lite interpreter internal build. (#29620)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/29620 Modify buck for lite interpreter to build successfully on internal integration. ghstack-source-id: 93733618 Test Plan: buck build xplat/caffe2:torch_mobile_coreAndroid Reviewed By: iseeyuan Differential Revision: D18438105 fbshipit-source-id: d6f6615623a385383105763733607c3872c89c42
This commit is contained in:
committed by
Facebook Github Bot
parent
bd0394d473
commit
422fbfb108
@ -21,21 +21,6 @@
|
||||
|
||||
namespace torch {
|
||||
namespace autograd {
|
||||
AutogradMeta::AutogradMeta(at::TensorImpl* self_impl, bool requires_grad, Edge gradient_edge) {
|
||||
grad_fn_ = std::move(gradient_edge.function);
|
||||
requires_grad_ = false;
|
||||
is_view_ = false;
|
||||
output_nr_ = gradient_edge.input_nr;
|
||||
|
||||
// set_requires_grad also checks error conditions.
|
||||
if (requires_grad) {
|
||||
TORCH_INTERNAL_ASSERT(self_impl);
|
||||
set_requires_grad(requires_grad, self_impl);
|
||||
}
|
||||
TORCH_CHECK(
|
||||
!grad_fn_ || !requires_grad_,
|
||||
"requires_grad should be false if grad_fn is set");
|
||||
}
|
||||
|
||||
std::shared_ptr<Node> Variable::grad_accumulator() const {
|
||||
auto autograd_meta = get_autograd_meta();
|
||||
|
@ -370,10 +370,21 @@ struct TORCH_API AutogradMeta : public c10::AutogradMetaInterface {
|
||||
return grad_;
|
||||
}
|
||||
|
||||
AutogradMeta(
|
||||
at::TensorImpl* self_impl = nullptr,
|
||||
bool requires_grad = false,
|
||||
Edge gradient_edge = Edge());
|
||||
AutogradMeta(at::TensorImpl* self_impl = nullptr, bool requires_grad = false, Edge gradient_edge = Edge() ) {
|
||||
grad_fn_ = std::move(gradient_edge.function);
|
||||
requires_grad_ = false;
|
||||
is_view_ = false;
|
||||
output_nr_ = gradient_edge.input_nr;
|
||||
|
||||
// set_requires_grad also checks error conditions.
|
||||
if (requires_grad) {
|
||||
TORCH_INTERNAL_ASSERT(self_impl);
|
||||
set_requires_grad(requires_grad, self_impl);
|
||||
}
|
||||
TORCH_CHECK(
|
||||
!grad_fn_ || !requires_grad_,
|
||||
"requires_grad should be false if grad_fn is set");
|
||||
}
|
||||
};
|
||||
|
||||
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
@ -21,7 +21,11 @@ class TORCH_API Module {
|
||||
Module(c10::intrusive_ptr<c10::ivalue::Object> object,
|
||||
std::shared_ptr<CompilationUnit> cu)
|
||||
: object_(object), cu_(cu) {};
|
||||
Module() {}
|
||||
c10::IValue run_method(const std::string& method_name, Stack& stack);
|
||||
c10::IValue forward(std::vector<c10::IValue>& inputs) {
|
||||
return run_method("forward", inputs);
|
||||
}
|
||||
Function* find_method(const std::string& basename) const;
|
||||
private:
|
||||
c10::intrusive_ptr<c10::ivalue::Object> object_;
|
||||
|
Reference in New Issue
Block a user