mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Reland "Make debug_pkl smaller by only emitting unique traces." (#73368)
Summary: ## Original commit message: Pull Request resolved: https://github.com/pytorch/pytorch/pull/73368 debug_pkl file inside of pytorch's .pt file consists of a list of SourceRanges. Each SourceRange points to a Source which is a stack track, filename, and start, end numbers. Those are emitted in debug_pkl file as strings. Since many SourceRange shares the same source, the string for trace can be deduped. The newer format saves a set of unique traces in a tuple, then each SourceRange will save the offset of it's trace w.r.t. position in that tuple. (i.e. manually applying dictionary compression). The above helps with smaller file size. On loading, if we copy each trace to Source as string the runtime memory would still blowup. To mitigate this, we use SourceView directly instead of source which will take the reference of string inside of Deserializer and make that into string_view. This is safe because Deserializer is hold by Unpickler by shared_ptr, and Unpickler is also hold by shared_ptr by another Source object. That Source object will be alive during the model construction. Test Plan: ## Original Test plan unit test Took original file (312271638_930.predictor.disagg.local); loaded with `torch.jit.load` save again with `torch.jit.save`. Unzip both, look at contents: ``` [qihan@devvm5585.vll0 ~]$ du archive -h 4.0K archive/xl_model_weights 3.7M archive/extra 8.0K archive/code/__torch__/caffe2/torch/fb/model_transform/splitting 8.0K archive/code/__torch__/caffe2/torch/fb/model_transform 8.0K archive/code/__torch__/caffe2/torch/fb 8.0K archive/code/__torch__/caffe2/torch 8.0K archive/code/__torch__/caffe2 20M archive/code/__torch__/torch/fx/graph_module 20M archive/code/__torch__/torch/fx 8.0K archive/code/__torch__/torch/classes 20M archive/code/__torch__/torch 20M archive/code/__torch__ 20M archive/code 2.7M archive/constants 35M archive [qihan@devvm5585.vll0 ~]$ du resaved -h 4.0K resaved/extra 8.0K resaved/code/__torch__/caffe2/torch/fb/model_transform/splitting 8.0K resaved/code/__torch__/caffe2/torch/fb/model_transform 8.0K resaved/code/__torch__/caffe2/torch/fb 8.0K resaved/code/__torch__/caffe2/torch 8.0K resaved/code/__torch__/caffe2 1.3M resaved/code/__torch__/torch/fx/graph_module 1.3M resaved/code/__torch__/torch/fx 8.0K resaved/code/__torch__/torch/classes 1.4M resaved/code/__torch__/torch 1.4M resaved/code/__torch__ 1.4M resaved/code 2.7M resaved/constants 13M resaved [qihan@devvm5585.vll0 ~]$ ``` ## Additional test: `buck test mode/dev-tsan //caffe2/benchmarks/static_runtime:static_runtime_cpptest -- --exact 'caffe2/benchmarks/static_runtime:static_runtime_cpptest - StaticRuntime.to'` passes test jest.fbios.startup_cold_start.local.simulator f333356873 - Differential Revision: D35196883 Pull Request resolved: https://github.com/pytorch/pytorch/pull/74869 Approved by: https://github.com/gmagogsfm
This commit is contained in:
@ -1,54 +1,97 @@
|
||||
#include <torch/csrc/jit/serialization/source_range_serialization.h>
|
||||
#include <torch/csrc/jit/serialization/source_range_serialization_impl.h>
|
||||
|
||||
#include <c10/util/Exception.h>
|
||||
#include <c10/util/Flags.h>
|
||||
#include <torch/csrc/jit/mobile/type_parser.h>
|
||||
#include <torch/csrc/jit/serialization/pickle.h>
|
||||
#include <algorithm>
|
||||
|
||||
namespace torch {
|
||||
namespace jit {
|
||||
|
||||
// "Whether to emit compact debug_pkl when saving a model to .pt file."
|
||||
// "Compact file is smaller but cannot be loaded by old torch binaries."
|
||||
// TODO(qihan) remove when all binaries are using string table.
|
||||
thread_local bool should_use_format_with_string_table_ = false;
|
||||
|
||||
class SourceRangeSerializer {
|
||||
public:
|
||||
// Serialize SourceRange as Tuple[SourceType, int, int]
|
||||
// where SourceType = Tuple[str, Optional[str], int, List[int]],
|
||||
// where SourceType = Tuple[int, int, int, List[int]],
|
||||
// The first 2 ints are positions into the vector returned by textSaved
|
||||
// after all the Ranges are processed. textSaved() returns a vector of str
|
||||
// the serialized form of Source
|
||||
c10::IValue serialize(const SourceRange& sr);
|
||||
|
||||
const std::vector<c10::IValue>& texts_saved() {
|
||||
return texts_;
|
||||
}
|
||||
|
||||
SourceRangeSerializer() {
|
||||
texts_.emplace_back("");
|
||||
text_to_idx_[texts_.back().toStringRef()] = 0;
|
||||
}
|
||||
|
||||
private:
|
||||
// Serialize Source as Tuple[str, Optional[str], int, List[int]]
|
||||
// This caches serialized sources, since many SourceRanges can
|
||||
// refer to the same one.
|
||||
c10::IValue serialize_source(const std::shared_ptr<SourceView>& s);
|
||||
c10::IValue serialize_source(const std::shared_ptr<Source>& s);
|
||||
std::unordered_map<std::shared_ptr<Source>, c10::IValue> serialized_sources;
|
||||
|
||||
std::unordered_map<std::shared_ptr<SourceView>, c10::IValue>
|
||||
serialized_sources;
|
||||
int64_t store_text_and_get_index(const std::string& text_view);
|
||||
|
||||
std::vector<c10::IValue> texts_;
|
||||
std::unordered_map<c10::string_view, int64_t> text_to_idx_;
|
||||
};
|
||||
|
||||
SourceRange SourceRangeDeserializer::deserialize(const c10::IValue& iv) {
|
||||
const auto& tup_elems = iv.toTupleRef().elements();
|
||||
TORCH_INTERNAL_ASSERT(tup_elems.size() == 3);
|
||||
std::shared_ptr<SourceView> source_ = deserialize_source(tup_elems[0]);
|
||||
std::shared_ptr<Source> source_ = deserialize_source(tup_elems[0]);
|
||||
int64_t start_ = tup_elems[1].toInt();
|
||||
int64_t end_ = tup_elems[2].toInt();
|
||||
return SourceRange(source_, start_, end_);
|
||||
}
|
||||
|
||||
std::shared_ptr<SourceView> SourceRangeDeserializer::deserialize_source(
|
||||
std::shared_ptr<Source> SourceRangeDeserializer::deserialize_source(
|
||||
const c10::IValue& iv) {
|
||||
auto tup = iv.toTuple();
|
||||
auto it = cached_sources.find(tup);
|
||||
if (it != cached_sources.end()) {
|
||||
return it->second;
|
||||
}
|
||||
|
||||
std::shared_ptr<Source> source;
|
||||
const auto& tup_elems = tup->elements();
|
||||
TORCH_INTERNAL_ASSERT(tup_elems.size() == 3);
|
||||
std::string text_ = tup_elems[0].toString()->string();
|
||||
c10::optional<std::string> filename_ = tup_elems[1].toOptional<std::string>();
|
||||
int64_t starting_line_no_ = tup_elems[2].toInt();
|
||||
if (!text_table_.empty()) {
|
||||
const auto& textIndex = tup_elems[0].toIntList();
|
||||
int64_t fnameIndex = tup_elems[1].toInt();
|
||||
int64_t starting_line_no_ = tup_elems[2].toInt();
|
||||
c10::optional<std::string> filename = c10::nullopt;
|
||||
|
||||
auto source = std::make_shared<Source>(
|
||||
std::move(text_), std::move(filename_), starting_line_no_);
|
||||
filename = *text_table_[fnameIndex];
|
||||
|
||||
std::vector<c10::string_view> pieces;
|
||||
std::vector<std::shared_ptr<std::string>> strs;
|
||||
|
||||
for (int64_t i : textIndex) {
|
||||
pieces.emplace_back(*text_table_[i]);
|
||||
strs.emplace_back(text_table_[i]);
|
||||
}
|
||||
|
||||
StringCordView str_cord(std::move(pieces), std::move(strs));
|
||||
|
||||
source = std::make_shared<Source>(str_cord, filename, starting_line_no_);
|
||||
} else {
|
||||
std::string text_ = tup_elems[0].toString()->string();
|
||||
c10::optional<std::string> filename_ =
|
||||
tup_elems[1].toOptional<std::string>();
|
||||
int64_t starting_line_no_ = tup_elems[2].toInt();
|
||||
source = std::make_shared<Source>(
|
||||
std::move(text_), std::move(filename_), starting_line_no_);
|
||||
}
|
||||
cached_sources[tup] = source;
|
||||
return source;
|
||||
}
|
||||
@ -58,17 +101,50 @@ c10::IValue SourceRangeSerializer::serialize(const SourceRange& sr) {
|
||||
serialize_source(sr.source()), (int64_t)sr.start(), (int64_t)sr.end());
|
||||
}
|
||||
|
||||
int64_t SourceRangeSerializer::store_text_and_get_index(
|
||||
const std::string& text_view) {
|
||||
auto text_iter = text_to_idx_.find(text_view);
|
||||
if (text_iter == text_to_idx_.end()) {
|
||||
int64_t text_pos = static_cast<int64_t>(texts_.size());
|
||||
texts_.emplace_back(text_view);
|
||||
text_to_idx_[texts_.back().toStringView()] = text_pos;
|
||||
return text_pos;
|
||||
} else {
|
||||
return text_iter->second;
|
||||
}
|
||||
}
|
||||
|
||||
c10::IValue SourceRangeSerializer::serialize_source(
|
||||
const std::shared_ptr<SourceView>& s) {
|
||||
const std::shared_ptr<Source>& s) {
|
||||
if (serialized_sources.count(s)) {
|
||||
return serialized_sources.at(s);
|
||||
}
|
||||
c10::intrusive_ptr<c10::ivalue::Tuple> serialized;
|
||||
if (s == nullptr) {
|
||||
serialized = c10::ivalue::Tuple::create({"", "", 0});
|
||||
c10::List<int64_t> lines;
|
||||
if (should_use_format_with_string_table_) {
|
||||
if (s == nullptr) {
|
||||
serialized = c10::ivalue::Tuple::create({lines, 0, 0});
|
||||
} else {
|
||||
for (size_t lineno = 0; lineno < s->num_lines(); lineno++) {
|
||||
std::string line_content = s->get_line(lineno).str();
|
||||
int64_t text_pos = store_text_and_get_index(line_content);
|
||||
lines.push_back(text_pos);
|
||||
}
|
||||
|
||||
int64_t fname_pos = 0;
|
||||
if (s->filename().has_value()) {
|
||||
fname_pos = store_text_and_get_index(*s->filename());
|
||||
}
|
||||
serialized = c10::ivalue::Tuple::create(
|
||||
{lines, fname_pos, (int64_t)s->starting_line_no()});
|
||||
}
|
||||
} else {
|
||||
serialized = c10::ivalue::Tuple::create(
|
||||
{s->text(), s->filename(), (int64_t)s->starting_line_no()});
|
||||
if (s == nullptr) {
|
||||
serialized = c10::ivalue::Tuple::create({"", "", 0});
|
||||
} else {
|
||||
serialized = c10::ivalue::Tuple::create(
|
||||
{s->text_str().str(), s->filename(), (int64_t)s->starting_line_no()});
|
||||
}
|
||||
}
|
||||
serialized_sources[s] = serialized;
|
||||
return serialized;
|
||||
@ -86,14 +162,24 @@ std::vector<char> SourceRangePickler::pickle(
|
||||
if (it != source_range_tags.end()) {
|
||||
source_range_tag = it->second;
|
||||
}
|
||||
|
||||
ivalues.emplace_back(c10::ivalue::Tuple::create(
|
||||
{(int64_t)range.bytes,
|
||||
srs->serialize(range.range),
|
||||
static_cast<int64_t>(source_range_tag)}));
|
||||
}
|
||||
|
||||
std::vector<at::Tensor> table;
|
||||
auto textTable = c10::ivalue::Tuple::create(srs->texts_saved());
|
||||
auto ivalue = c10::ivalue::Tuple::create(std::move(ivalues));
|
||||
auto result = jit::pickle(ivalue, &table);
|
||||
std::vector<char> result;
|
||||
if (should_use_format_with_string_table_) {
|
||||
result = jit::pickle(
|
||||
c10::ivalue::Tuple::create({kFormatWithStringTable, textTable, ivalue}),
|
||||
&table);
|
||||
} else {
|
||||
result = jit::pickle(ivalue, &table);
|
||||
}
|
||||
TORCH_CHECK(table.size() == 0, "Expected 0 tensors to be written");
|
||||
return result;
|
||||
}
|
||||
@ -103,7 +189,7 @@ ConcreteSourceRangeUnpickler::ConcreteSourceRangeUnpickler(
|
||||
size_t size)
|
||||
: data(std::move(data)),
|
||||
size(size),
|
||||
deserializer(new SourceRangeDeserializer()),
|
||||
deserializer(nullptr),
|
||||
unpickled_records(nullptr) {}
|
||||
|
||||
void ConcreteSourceRangeUnpickler::unpickle() {
|
||||
@ -119,10 +205,19 @@ void ConcreteSourceRangeUnpickler::unpickle() {
|
||||
{},
|
||||
c10::parseType)
|
||||
.toTuple();
|
||||
const auto& ivalues = ivaluesTuple->elements();
|
||||
|
||||
const auto& ivalues = ivaluesTuple->elements();
|
||||
unpickled_records = std::make_shared<SourceRangeRecords>();
|
||||
for (auto& val : ivalues) {
|
||||
IValue lines;
|
||||
if (ivalues[0].isString() &&
|
||||
kFormatWithStringTable == ivalues[0].toStringRef()) {
|
||||
deserializer.reset(new SourceRangeDeserializer(ivalues[1]));
|
||||
lines = ivalues[2];
|
||||
} else {
|
||||
deserializer.reset(new SourceRangeDeserializer());
|
||||
lines = ivaluesTuple;
|
||||
}
|
||||
for (auto& val : lines.toTuple()->elements()) {
|
||||
const auto& tup_elems = val.toTupleRef().elements();
|
||||
int64_t offset = tup_elems[kByteOffsetIndex].toInt();
|
||||
auto source_range = deserializer->deserialize(tup_elems[kSourceRangeIndex]);
|
||||
@ -152,5 +247,10 @@ c10::optional<SourceRange> ConcreteSourceRangeUnpickler::
|
||||
return c10::nullopt;
|
||||
}
|
||||
|
||||
TORCH_API void setShouldUseFormatWithStringTable(
|
||||
bool should_use_format_with_string_table) {
|
||||
should_use_format_with_string_table_ = should_use_format_with_string_table;
|
||||
}
|
||||
|
||||
} // namespace jit
|
||||
} // namespace torch
|
||||
|
Reference in New Issue
Block a user