mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Move TypeAndSize out of /generated/ (#105195)
This avoids a circular import in the next PR. Pull Request resolved: https://github.com/pytorch/pytorch/pull/105195 Approved by: https://github.com/albanD
This commit is contained in:
committed by
PyTorch MergeBot
parent
28d018dafd
commit
3fdf365397
@ -43,18 +43,7 @@ inline c10::List<c10::optional<Tensor>> unpack_opt_list(at::ArrayRef<SavedVariab
|
||||
return result;
|
||||
}
|
||||
|
||||
struct TypeAndSize {
|
||||
TypeAndSize() : options(at::TensorOptions()) {}
|
||||
/* implicit */
|
||||
TypeAndSize(const Tensor & t)
|
||||
: sym_sizes(t.sym_sizes().vec())
|
||||
, options(t.options()) {}
|
||||
|
||||
Tensor zeros() { return at::zeros_symint(sym_sizes, options); }
|
||||
|
||||
std::vector<c10::SymInt> sym_sizes;
|
||||
at::TensorOptions options;
|
||||
};
|
||||
using torch::autograd::TypeAndSize;
|
||||
|
||||
${autograd_function_declarations}
|
||||
|
||||
|
@ -730,6 +730,21 @@ edge_list collect_next_edges(Variables&&... variables) {
|
||||
make.apply(std::forward<Variables>(variables)...);
|
||||
return std::move(make.next_edges);
|
||||
}
|
||||
|
||||
struct TypeAndSize {
|
||||
TypeAndSize() : options(at::TensorOptions()) {}
|
||||
/* implicit */
|
||||
TypeAndSize(const at::Tensor& t)
|
||||
: sym_sizes(t.sym_sizes().vec()), options(t.options()) {}
|
||||
|
||||
at::Tensor zeros() {
|
||||
return at::zeros_symint(sym_sizes, options);
|
||||
}
|
||||
|
||||
std::vector<c10::SymInt> sym_sizes;
|
||||
at::TensorOptions options;
|
||||
};
|
||||
|
||||
} // namespace autograd
|
||||
} // namespace torch
|
||||
|
||||
|
Reference in New Issue
Block a user