mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Revert "Record view stacks if running anomaly mode (#103185)"
This reverts commit a02c573a8996d5d47585410ceaf81c87104cfd43. Reverted https://github.com/pytorch/pytorch/pull/103185 on behalf of https://github.com/izaitsevfb due to Breaks internal builds, see D46629734 ([comment](https://github.com/pytorch/pytorch/pull/103185#issuecomment-1588258206))
This commit is contained in:
@ -7,7 +7,6 @@
|
||||
#include <torch/csrc/autograd/edge.h>
|
||||
#include <torch/csrc/autograd/forward_grad.h>
|
||||
#include <torch/csrc/autograd/function_hook.h>
|
||||
#include <torch/csrc/profiler/combined_traceback.h>
|
||||
|
||||
#include <ATen/NamedTensorUtils.h>
|
||||
#include <ATen/core/Tensor.h>
|
||||
@ -595,7 +594,6 @@ struct TORCH_API DifferentiableViewMeta : public AutogradMeta {
|
||||
/// version_counter.current_version().
|
||||
uint32_t attr_version_;
|
||||
CreationMeta creation_meta_;
|
||||
std::shared_ptr<torch::CapturedTraceback> creation_traceback_;
|
||||
|
||||
public:
|
||||
/// requires_grad is a backward AD field so we only use the view specific
|
||||
@ -637,13 +635,12 @@ struct TORCH_API DifferentiableViewMeta : public AutogradMeta {
|
||||
return creation_meta_;
|
||||
}
|
||||
|
||||
const std::shared_ptr<torch::CapturedTraceback>& get_creation_traceback()
|
||||
const {
|
||||
return creation_traceback_;
|
||||
void set_creation_meta(CreationMeta new_creation_meta) {
|
||||
TORCH_CHECK(
|
||||
has_bw_view(), "creation_meta can only exist for backward views.");
|
||||
creation_meta_ = new_creation_meta;
|
||||
}
|
||||
|
||||
void set_creation_meta(CreationMeta new_creation_meta);
|
||||
|
||||
bool has_fw_view() const {
|
||||
return shared_view_info_ || forward_info_.has_value();
|
||||
}
|
||||
|
Reference in New Issue
Block a user