未验证 提交 237c1fe6 编写于 作者: L Leo Chen 提交者: GitHub

[Paddle-TRT] Use TRT inspector to show the information inside an engine to verbose log (#38200)

* Inspect the information inside a TRT engine.

* Follow up the google code style.

* Fix code error.
上级 8bc27015
......@@ -54,6 +54,7 @@ void TensorRTEngine::Execute(int batch_size, std::vector<void *> *buffers,
} else {
#if IS_TRT_VERSION_GE(6000)
infer_context->enqueueV2(buffers->data(), stream, nullptr);
GetEngineInfo();
#endif
}
SetRuntimeBatch(batch_size);
......@@ -237,6 +238,11 @@ void TensorRTEngine::FreezeNetwork() {
#endif
}
#if IS_TRT_VERSION_GE(8200)
infer_builder_config_->setProfilingVerbosity(
nvinfer1::ProfilingVerbosity::kDETAILED);
#endif
#if IS_TRT_VERSION_LT(8000)
infer_engine_.reset(infer_builder_->buildEngineWithConfig(
*network(), *infer_builder_config_));
......@@ -253,6 +259,8 @@ void TensorRTEngine::FreezeNetwork() {
infer_engine_, platform::errors::Fatal(
"Build TensorRT cuda engine failed! Please recheck "
"you configurations related to paddle-TensorRT."));
GetEngineInfo();
}
nvinfer1::ITensor *TensorRTEngine::DeclareInput(const std::string &name,
......
......@@ -321,6 +321,8 @@ class TensorRTEngine {
"on the same GPU architecture;\n2. The Paddle Inference version of "
"generating serialization file and doing inference are "
"consistent."));
GetEngineInfo();
}
void SetRuntimeBatch(size_t batch_size);
......@@ -538,6 +540,18 @@ class TensorRTEngine {
}
}
void GetEngineInfo() {
#if IS_TRT_VERSION_GE(8200)
std::unique_ptr<nvinfer1::IEngineInspector> infer_inspector(
infer_engine_->createEngineInspector());
infer_inspector->setExecutionContext(context());
VLOG(3) << infer_inspector->getEngineInformation(
nvinfer1::LayerInformationFormat::kJSON);
#else
VLOG(3) << "Inspector needs TensorRT version 8.2 and after.";
#endif
}
private:
// Each ICudaEngine object is bound to a specific GPU when it is instantiated,
// ensure that the thread is associated with the correct device by calling
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册