Make msg() and msg_with_backtrace() private (#37094)

Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/37094

Signed-off-by: Edward Z. Yang <ezyang@fb.com>

Test Plan: Imported from OSS

Differential Revision: D21202892

Pulled By: ezyang

fbshipit-source-id: d59e6bffabd90cc734056bdce2cd1fe63262fab8
This commit is contained in:
Edward Yang
2020-05-04 11:51:15 -07:00
committed by Facebook GitHub Bot
parent 6dd1beaaa8
commit efd8f70cac
8 changed files with 11 additions and 19 deletions

View File

@ -34,7 +34,7 @@ TEST(LoggingTest, TestEnforceEquals) {
// This should never be triggered.
ADD_FAILURE();
} catch (const ::c10::Error& err) {
EXPECT_NE(err.msg().find("5 vs 6"), string::npos);
EXPECT_NE(std::string(err.what()).find("5 vs 6"), string::npos);
}
// arguments are expanded only once

View File

@ -58,11 +58,6 @@ class C10_API Error : public std::exception {
void AppendMessage(const std::string& msg);
// Compute the full message from msg_ and msg_without_backtrace_
// TODO: Maybe this should be private
std::string msg() const;
std::string msg_without_backtrace() const;
const std::vector<std::string>& msg_stack() const {
return msg_stack_;
}
@ -80,6 +75,11 @@ class C10_API Error : public std::exception {
const char* what_without_backtrace() const noexcept {
return msg_without_backtrace_.c_str();
}
private:
// Compute the full message from msg_ and msg_without_backtrace_
std::string msg() const;
std::string msg_without_backtrace() const;
};
class C10_API WarningHandler {

View File

@ -580,7 +580,7 @@ TensorShapes InferBlobShapesAndTypes(
}
} catch (::caffe2::EnforceNotMet& enf) {
LOG(ERROR) << "Shape inference error: " << enf.msg();
LOG(ERROR) << "Shape inference error: " << enf.what();
LOG(ERROR) << "Operator: " << ProtoDebugString(op) << std::endl;
LOG(ERROR) << "Returning empty results.";

View File

@ -114,14 +114,14 @@ TEST(OperatorTest, ExceptionWorks) {
// This should not happen - exception should throw above.
LOG(FATAL) << "This should not happen.";
} catch (const EnforceNotMet& err) {
LOG(INFO) << err.msg();
LOG(INFO) << err.what();
}
try {
op->RunAsync();
// This should not happen - exception should throw above.
LOG(FATAL) << "This should not happen.";
} catch (const EnforceNotMet& err) {
LOG(INFO) << err.msg();
LOG(INFO) << err.what();
}
}

View File

@ -17,10 +17,6 @@ Caffe2IOSPredictor* MakeCaffe2Predictor(const std::string& init_net_str,
try {
predictor = Caffe2IOSPredictor::NewCaffe2IOSPredictor(
init_net, predict_net, disableMultithreadProcessing, allowMetalOperators);
} catch (const caffe2::EnforceNotMet& e) {
std::string error = e.msg();
errorMessage.swap(error);
return NULL;
} catch (const std::exception& e) {
std::string error = e.what();
errorMessage.swap(error);

View File

@ -57,10 +57,6 @@ void Caffe2IOSPredictor::run(const Tensor& inData, Tensor& outData, std::string&
caffe2::Predictor::TensorList output_vec;
try {
predictor_(input_vec, &output_vec);
} catch (const caffe2::EnforceNotMet& e) {
std::string error = e.msg();
errorMessage.swap(error);
return;
} catch (const std::exception& e) {
std::string error = e.what();
errorMessage.swap(error);

View File

@ -820,7 +820,7 @@ void BoundShapeInferencer::InferCommonOp(const OperatorDef& op) {
}
} catch (const caffe2::EnforceNotMet& e) {
LOG(ERROR) << "Enforce not met while inferring shapes for " << op.type()
<< ": " << e.msg() << " first output: " << op.output(0);
<< ": " << e.what() << " first output: " << op.output(0);
} catch (const std::exception& e) {
LOG(WARNING) << "Caught exception while inferring shapes for " << op.type()
<< ": " << e.what() << " first output: " << op.output(0);

View File

@ -20,7 +20,7 @@ inline std::ostream& operator<<(
const ExceptionMessage& msg) {
auto c10_error = dynamic_cast<const c10::Error*>(&msg.e_);
if (c10_error) {
out << c10_error->msg_without_backtrace();
out << c10_error->what_without_backtrace();
} else {
out << msg.e_.what();
}