From ddb9aea6f2bd324b469f72c277123caa8a76fd6f Mon Sep 17 00:00:00 2001 From: SunAhong1993 Date: Fri, 29 Jan 2021 17:36:55 +0800 Subject: [PATCH] fix the caffe --- .../caffe_custom_layer/normalize.py | 22 +++++++++------- .../dygraph/caffe2paddle/caffe_op_mapper.py | 26 ++++++++++++------- .../caffe_custom_layer/normalize.py | 19 ++++++++++---- .../static/caffe2paddle/caffe_op_mapper.py | 16 ++++++------ 4 files changed, 52 insertions(+), 31 deletions(-) diff --git a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py index 0714744..647c7a6 100644 --- a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py +++ b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py @@ -16,17 +16,21 @@ import paddle import paddle.fluid as fluid class Normalize(object): - def __init__(self, axis, param_name, param_shape): + def __init__(self, axis): self.axis = axis - self.param_name = param_name - self.param_shape = param_shape - def __call__(self, x): - l2 = fluid.layers.prior_box(x=x, p=2, axis=1) - attr = fluid.ParamAttr(name=self.param_name, trainable=False) - param = paddle.nn.Layer.create_parameter(shape=self.param_shape, - attr=atr) - out = paddle.multiply(x=l2, y=param, axis=self.axis) + def __call__(self, x, param): + l2_norm = fluid.layers.l2_normalize(x=x, axis=1) + param = paddle.reshape(param, [param.shape[-1]]) + perm = list(range(len(l2_norm.shape))) + perm.pop(self.axis) + perm = perm + [self.axis] + l2_norm = paddle.transpose(l2_norm, perm=perm) + out = paddle.multiply(x=l2_norm, y=param) + perm = list(range(len(l2_norm.shape))) + dim = perm.pop(-1) + perm.insert(self.axis, dim) + out = paddle.transpose(out, perm=perm) return out \ No newline at end of file diff --git a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py index f2ae88e..888d809 100644 --- a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py +++ b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py @@ -782,7 +782,7 @@ class CaffeOpMapper(OpMapper): out_max_val = params.out_max_val if hasattr(params, out_max_val) else False top_k = params.top_k if hasattr(params, top_k) else 1 - axis = parmas.axis if hasattr(params, axis) else -1 + axis = params.axis if hasattr(params, axis) else -1 if axis < 0: axis += len(input_shape) if out_max_val is True: @@ -1018,22 +1018,30 @@ class CaffeOpMapper(OpMapper): node.inputs) == 1, "The count of Normalize node\'s input is not 1." input = self.graph.get_input_node(node, idx=0, copy=True) params = node.layer.norm_param + param_name = node.layer_name + "_scale" if node.data is None or len(node.data) != 1: print( "The parameter of {} (type is {}) is not set. So we set the parameters as 0" .format(node.layer_name, node.layer_type)) - self.parmas[node.layer_name + ".scale"] = \ - np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32") + self.params[param_name] = \ + np.zeros([1] if params.channel_shared else [node.in_shapes[0][1]]).astype("float32") else: - self.parmas[node.layer_name + ".scale"] = _adjust_parameters(node)[0] + self.params[param_name] = _adjust_parameters(node)[0] + + self.paddle_graph.add_layer( + "self.create_parameter", + inputs={}, + outputs=[param_name], + shape=self.params[param_name].shape, + attr=string(param_name)) + inputs_dict = {} layer_attrs = { - "axis": -1 if params.channel_shared else 1, - "param_name": node.layer_name + ".scale", - "param_shape": self.parmas[node.layer_name + ".scale"].shape} - self.pd_pdgraph.add_layer( + "axis": -1 if params.channel_shared else 1} + self.paddle_graph.add_layer( "custom_layer:Normalize", - inputs={"x": input.name}, + inputs={"x": input.name, + "param": param_name}, outputs=layer_outputs, **layer_attrs) diff --git a/x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py b/x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py index a3c884c..180560a 100644 --- a/x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py +++ b/x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py @@ -13,12 +13,21 @@ # limitations under the License. import paddle -import paddle.fluid as fluid def normalize(x, axis, param_name, param_shape, param_dtype): - l2 = fluid.layers.prior_box(x=x, p=2, axis=1) + l2_norm = paddle.fluid.layers.l2_normalize(x=x, axis=1) param = paddle.static.nn.create_parameter(shape=param_shape, - dtype=string(param_dtype), - name=string(param_name)) - out = paddle.multiply(x=l2, y=param, axis=axis) + dtype=param_dtype, + name=param_name) + param = paddle.reshape(param, [param.shape[-1]]) + perm = list(range(len(l2_norm.shape))) + perm.pop(axis) + perm = perm + [axis] + l2_norm = paddle.transpose(l2_norm, perm=perm) + out = paddle.multiply(x=l2_norm, y=param) + perm = list(range(len(l2_norm.shape))) + dim = perm.pop(-1) + perm.insert(axis, dim) + out = paddle.transpose(out, perm=perm) + return out \ No newline at end of file diff --git a/x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py b/x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py index d9ba3fd..68ce95c 100644 --- a/x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py +++ b/x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py @@ -855,7 +855,7 @@ class CaffeOpMapper(OpMapper): out_max_val = params.out_max_val if hasattr(params, out_max_val) else False top_k = params.top_k if hasattr(params, top_k) else 1 - axis = parmas.axis if hasattr(params, axis) else -1 + axis = params.axis if hasattr(params, axis) else -1 if axis < 0: axis += len(in_shapes) if out_max_val is True: @@ -1090,17 +1090,17 @@ class CaffeOpMapper(OpMapper): print( "The parameter of {} (type is {}) is not set. So we set the parameters as 0" .format(scale_name, node.layer_type)) - self.parmas[scale_name] = \ - np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32") + self.params[scale_name] = \ + np.zeros([1] if params.channel_shared else [node.in_shapes[0][1]]).astype("float32") else: - self.parmas[scale_name] = _adjust_parameters(node)[0] + self.params[scale_name] = _adjust_parameters(node)[0] layer_attrs = { "axis": -1 if params.channel_shared else 1, - "param_name": scale_name, - "param_shape": self.parmas[scale_name].shape, - "param_dtype": str(self.parmas[scale_name].dtype)} - self.pd_pdgraph.add_layer( + "param_name": string(scale_name), + "param_shape": self.params[scale_name].shape, + "param_dtype": string(self.params[scale_name].dtype)} + self.paddle_graph.add_layer( "custom_layer:normalize", inputs={"x": input.name}, outputs=[node.name], -- GitLab