From 4805da503a9db4c12f4361287f5f9b5594906798 Mon Sep 17 00:00:00 2001 From: Wangzheee <634486483@qq.com> Date: Wed, 10 Aug 2022 19:13:30 +0800 Subject: [PATCH] [Paddle Inference]Disable skip layernorm half (#45047) * disable_skip_layernorm_fp16 --- paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc b/paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc index 25a6861eb67..26675fdddba 100644 --- a/paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc +++ b/paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc @@ -163,8 +163,10 @@ class SkipLayerNormOpConverter : public OpConverter { auto scale_weight = GetFp32Weight("Scale").get(); float eps = PADDLE_GET_CONST(float, op_desc.GetAttr("epsilon")); - bool with_fp16 = - engine_->WithFp16() && !engine_->disable_trt_plugin_fp16(); + // bool with_fp16 = + // engine_->WithFp16() && !engine_->disable_trt_plugin_fp16(); + bool with_fp16 = false; + plugin::SkipLayerNormPluginDynamic* plugin = new plugin::SkipLayerNormPluginDynamic( static_cast(bias_weight.values), -- GitLab