未验证 提交 f24eadd9 编写于 作者: Z zhoutianzi666 提交者: GitHub

[Paddle-TRT] allow plugin fall back to fp16 when int8 (#50554)

* allow fall back to fp16 when int8

* refine code

* refine code

* refine code
上级 9429936c
......@@ -358,7 +358,9 @@ class TensorRTEngine {
bool WithFp16() {
bool enable_fp16 = (precision_ == AnalysisConfig::Precision::kHalf);
bool support_fp16 = infer_builder_->platformHasFastFp16();
return enable_fp16 && support_fp16;
// below is consistent with setFlag in engine.cc
bool fall_back_fp16 = WithInt8() && !use_dla_;
return (enable_fp16 || fall_back_fp16) && support_fp16;
}
bool WithInt8() {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册