Updating onnxtrt submodule to master branch

Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/18441

Differential Revision: D14613517

Pulled By: bddppq

fbshipit-source-id: dd20d718db55942df9cce7acd1151d6902bc57ff
This commit is contained in:
Kevin Chen
2019-03-26 14:15:39 -07:00
committed by Facebook Github Bot
parent 654e59fcac
commit bbe110f4e1
4 changed files with 4 additions and 4 deletions

2
.gitmodules vendored
View File

@ -57,7 +57,7 @@
url = https://github.com/onnx/onnx.git
[submodule "third_party/onnx-tensorrt"]
path = third_party/onnx-tensorrt
url = https://github.com/bddppq/onnx-tensorrt
url = https://github.com/onnx/onnx-tensorrt
[submodule "third_party/sleef"]
path = third_party/sleef
url = https://github.com/zdevito/sleef

View File

@ -486,7 +486,7 @@ void TensorRTTransformer::Transform(
auto trt_builder = tensorrt::TrtObject(nvinfer1::createInferBuilder(logger));
auto trt_network = tensorrt::TrtObject(trt_builder->createNetwork());
auto importer =
tensorrt::TrtObject(nvonnxparser::createParser(trt_network.get(), logger));
tensorrt::TrtObject(nvonnxparser::createParser(*trt_network, logger));
// function to tell whether TensorRT supports a given C2 op or not
auto supports =

View File

@ -13,7 +13,7 @@ std::shared_ptr<nvinfer1::ICudaEngine> BuildTrtEngine(
auto trt_builder = TrtObject(nvinfer1::createInferBuilder(*logger));
auto trt_network = TrtObject(trt_builder->createNetwork());
auto trt_parser =
TrtObject(nvonnxparser::createParser(trt_network.get(), *logger));
TrtObject(nvonnxparser::createParser(*trt_network, *logger));
auto status = trt_parser->parse(onnx_model_str.data(), onnx_model_str.size());
if (!status) {
const auto num_errors = trt_parser->getNbErrors();