diff --git a/x2paddle/decoder/caffe_decoder.py b/x2paddle/decoder/caffe_decoder.py index cb9f59708fa0980b0a3bcb029319a33a3df18f92..d9e926c57d2b4b5a3ecaa67c256d9bebb2e6e745 100644 --- a/x2paddle/decoder/caffe_decoder.py +++ b/x2paddle/decoder/caffe_decoder.py @@ -88,6 +88,49 @@ class CaffeGraph(Graph): # filter them out here. if (not exclude) and (phase == 'test'): exclude = (type_str == 'Dropout') + ''' + 如果要去除Dropout Layer的话,原先这里写的不对,因为还得修正下一层Layer的bottom指向 + + 例如: + layer { + name: "pool_8x8_s1" + type: "Pooling" + bottom: "inception_c2_concat" + top: "pool_8x8_s1" + pooling_param { + pool: AVE + global_pooling: true + } + } + layer { + name: "pool_8x8_s1_drop" + type: "Dropout" + bottom: "pool_8x8_s1" + top: "pool_8x8_s1_drop" + dropout_param { + dropout_ratio: 0.2 + } + } + layer { + name: "classifier" + type: "InnerProduct" + bottom: "pool_8x8_s1_drop" + } + + 这种prototxt形式下,直接去除pool_8x8_s1_drop不行 + 会导致dropout的下一层找不到正确的bottom而报错 + 需要将下一层里的bottom指向dropout的上一层 + ''' + if layer.type == 'Dropout': + drop_layer_top = layer.top[0] + drop_layer_bottom = layer.bottom[0] + if drop_layer_top != drop_layer_bottom: + for next_layer in layers: + for next_layer_bottom_idx, next_layer_bottom in enumerate(next_layer.bottom): + if drop_layer_top == next_layer_bottom: + next_layer.bottom.remove(drop_layer_top) + next_layer.bottom.insert(next_layer_bottom_idx, drop_layer_bottom) + if not exclude: filtered_layers.append(layer) # Guard against dupes. diff --git a/x2paddle/op_mapper/caffe_custom_layer/__init__.py b/x2paddle/op_mapper/caffe_custom_layer/__init__.py index b75ca72c83aba24b0e384779836e986135134c0a..5dc3079cb6dba452d767f20cd2ed29912940c124 100644 --- a/x2paddle/op_mapper/caffe_custom_layer/__init__.py +++ b/x2paddle/op_mapper/caffe_custom_layer/__init__.py @@ -10,6 +10,8 @@ from . import select from . import shufflechannel from . import convolutiondepthwise from . import axpy +from . import upsample +from . import relu6 #custom layer import ends custom_layers = get_registered_layers() diff --git a/x2paddle/op_mapper/caffe_custom_layer/axpy.py b/x2paddle/op_mapper/caffe_custom_layer/axpy.py index c32de40f5c45cd345847c4bc031f969afb34e466..6e0f843398afc877e22b24a454f45c96e5369ceb 100644 --- a/x2paddle/op_mapper/caffe_custom_layer/axpy.py +++ b/x2paddle/op_mapper/caffe_custom_layer/axpy.py @@ -2,7 +2,7 @@ from .register import register from x2paddle.core.util import * -def axpy_shape(input_shape): +def axpy_shape(input_shapes): assert len(input_shapes) == 3, "not valid input shape for axpy layer" assert len(input_shapes[0]) == len(input_shapes[1]), 'should have same dims' output_shape = input_shapes[1] @@ -18,7 +18,7 @@ def axpy_layer(inputs, input_shape=None, name=None): y = inputs[2] out = fluid.layers.elementwise_mul(x, alpha, axis=0) out = fluid.layers.elementwise_add(out, y, name=name) - print(out) + return out def axpy_weights(name, data=None): diff --git a/x2paddle/op_mapper/caffe_custom_layer/relu6.py b/x2paddle/op_mapper/caffe_custom_layer/relu6.py new file mode 100644 index 0000000000000000000000000000000000000000..878af0cf2408e7efb090b1785eaa9406bfc72a61 --- /dev/null +++ b/x2paddle/op_mapper/caffe_custom_layer/relu6.py @@ -0,0 +1,24 @@ +from .register import register +from x2paddle.core.util import * + + +def relu6_shape(input_shape): + return input_shape + + +def relu6_layer(inputs, input_shape=None, name=None): + input = inputs[0] + out = fluid.layers.relu6(x=input) + return out + + +def relu6_weights(name, data=None): + weights_name = [] + return weights_name + + +register( + kind='ReLU6', + shape=relu6_shape, + layer=relu6_layer, + weights=relu6_weights) diff --git a/x2paddle/op_mapper/caffe_custom_layer/upsample.py b/x2paddle/op_mapper/caffe_custom_layer/upsample.py new file mode 100644 index 0000000000000000000000000000000000000000..6e9e13199b9c0197020bf7b4c687a3bf07b63e98 --- /dev/null +++ b/x2paddle/op_mapper/caffe_custom_layer/upsample.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +################################################################################ +# +# Copyright (c) 2020 Baidu.com, Inc. All Rights Reserved +# +################################################################################ +""" +Author: Drift +Email: wutuobang@baidu.com +Date: 2020/04/22 18:45 +""" + +from .register import register +from x2paddle.core.util import * + + +def upsample_shape(input_shapes, scale): + """ + + :param input_shapes: + :param scale: + :return: + """ + assert len(input_shapes) == 1, "not valid input shape for upsample layer" + assert type(scale) is int + + input_shape = input_shapes[0] + new_h = scale * input_shape[2] + new_w = scale * input_shape[3] + + output_shape = [input_shape[0], input_shape[1], new_h, new_w] + return [output_shape] + + +def upsample_layer(inputs, scale, input_shape=None, name=None): + """ + + :param inputs: + :param scale: + :param input_shape: + :param name: + :return: + """ + x = inputs[0] + out = fluid.layers.resize_nearest(x, + align_corners=False, + scale=scale, + name=name) + + return out + + +def upsample_weights(name, data=None): + """ + + :param name: + :param data: + :return: + """ + weights_name = [] + return weights_name + + +register(kind='Upsample', shape=upsample_shape, layer=upsample_layer, weights=upsample_weights) diff --git a/x2paddle/op_mapper/caffe_op_mapper.py b/x2paddle/op_mapper/caffe_op_mapper.py index f350c50ea707518e296c6e807e7d89cca686bbcf..26e08cc907e0764f58c27c289291ca1c8415405b 100644 --- a/x2paddle/op_mapper/caffe_op_mapper.py +++ b/x2paddle/op_mapper/caffe_op_mapper.py @@ -23,7 +23,6 @@ from x2paddle.op_mapper.caffe_custom_layer import * class CaffeOpMapper(OpMapper): directly_map_ops = { - 'ReLU': 'relu', 'AbsVal': 'abs', 'Sigmoid': 'sigmoid', 'TanH': 'tanh', @@ -435,6 +434,30 @@ class CaffeOpMapper(OpMapper): node.fluid_code.add_layer( "concat", inputs=inputs, output=node, param_attr=attr) + def ReLU(self, node): + """ + + :param node: + :return: + """ + assert len( + node.inputs) == 1, 'The count of ReLU node\'s input is not 1.' + input = self.graph.get_bottom_node(node, idx=0, copy=True) + + # 如果存在negative_slope,则相当于 LeakyReLU + params = node.layer.relu_param + if params.HasField('negative_slope') and params.negative_slope != 0: + negative_slope = float(params.negative_slope) + + attr = { + 'alpha': negative_slope + } + node.fluid_code.add_layer( + 'leaky_relu', inputs=input, output=node, param_attr=attr) + else: + node.fluid_code.add_layer( + 'relu', inputs=input, output=node) + def PReLU(self, node): assert len( node.inputs) == 1, 'The count of PReLU node\'s input is not 1.'