未验证 提交 2480bdef 编写于 作者: P Pei Yang 提交者: GitHub

change hard_swish from plugin to layer (#29177)

* change hard_swish from plugin to layer

* add ut when threshold != scale
上级 b122d0bb
......@@ -65,13 +65,21 @@ class HardSwishOpConverter : public OpConverter {
const float offset = op_desc.HasAttr("offset")
? BOOST_GET_CONST(float, op_desc.GetAttr("offset"))
: 3.0f;
nvinfer1::ILayer* layer = nullptr;
plugin::HardSwishPlugin* plugin =
new plugin::HardSwishPlugin(threshold, scale, offset);
layer = engine_->AddPlugin(&input, input_num, plugin);
if (threshold == scale) {
auto* hsig_layer = TRT_ENGINE_ADD_LAYER(
engine_, Activation, *input, nvinfer1::ActivationType::kHARD_SIGMOID);
hsig_layer->setAlpha(1.0 / scale);
hsig_layer->setBeta(offset / scale);
nvinfer1::IElementWiseLayer* eltwise_layer = TRT_ENGINE_ADD_LAYER(
engine_, ElementWise, *input, *(hsig_layer->getOutput(0)),
nvinfer1::ElementWiseOperation::kPROD);
layer = eltwise_layer;
} else {
plugin::HardSwishPlugin* plugin =
new plugin::HardSwishPlugin(threshold, scale, offset);
layer = engine_->AddPlugin(&input, input_num, plugin);
}
auto output_name = op_desc.Output("Out")[0];
RreplenishLayerAndOutput(layer, "hard_swish", {output_name}, test_mode);
}
......
......@@ -346,6 +346,12 @@ class TensorRTSubgraphPassHardSigmoidTest(TensorRTSubgraphPassActivationTest):
return fluid.layers.hard_sigmoid(x)
class TensorRTSubgraphPassHardSwishPluginTest(
TensorRTSubgraphPassActivationTest):
def append_act(self, x):
return fluid.layers.hard_swish(x, threshold=4.0, scale=8.0)
class TensorRTSubgraphPassClipTest(TensorRTSubgraphPassActivationTest):
def append_act(self, x):
return fluid.layers.clip(x, 0, 1)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册