diff --git a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py index 07147449b93641f0018aec695ea841e25a656957..647c7a6e556c98b68427c4db91ff9c6a6565c4b6 100644 --- a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py +++ b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py @@ -16,17 +16,21 @@ import paddle import paddle.fluid as fluid class Normalize(object): - def __init__(self, axis, param_name, param_shape): + def __init__(self, axis): self.axis = axis - self.param_name = param_name - self.param_shape = param_shape - def __call__(self, x): - l2 = fluid.layers.prior_box(x=x, p=2, axis=1) - attr = fluid.ParamAttr(name=self.param_name, trainable=False) - param = paddle.nn.Layer.create_parameter(shape=self.param_shape, - attr=atr) - out = paddle.multiply(x=l2, y=param, axis=self.axis) + def __call__(self, x, param): + l2_norm = fluid.layers.l2_normalize(x=x, axis=1) + param = paddle.reshape(param, [param.shape[-1]]) + perm = list(range(len(l2_norm.shape))) + perm.pop(self.axis) + perm = perm + [self.axis] + l2_norm = paddle.transpose(l2_norm, perm=perm) + out = paddle.multiply(x=l2_norm, y=param) + perm = list(range(len(l2_norm.shape))) + dim = perm.pop(-1) + perm.insert(self.axis, dim) + out = paddle.transpose(out, perm=perm) return out \ No newline at end of file diff --git a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py index f2ae88e88982d0de952c00c31914c4256b05f21b..888d8095cc9c3ea9f6ef4a046520de563a370727 100644 --- a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py +++ b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py @@ -782,7 +782,7 @@ class CaffeOpMapper(OpMapper): out_max_val = params.out_max_val if hasattr(params, out_max_val) else False top_k = params.top_k if hasattr(params, top_k) else 1 - axis = parmas.axis if hasattr(params, axis) else -1 + axis = params.axis if hasattr(params, axis) else -1 if axis < 0: axis += len(input_shape) if out_max_val is True: @@ -1018,22 +1018,30 @@ class CaffeOpMapper(OpMapper): node.inputs) == 1, "The count of Normalize node\'s input is not 1." input = self.graph.get_input_node(node, idx=0, copy=True) params = node.layer.norm_param + param_name = node.layer_name + "_scale" if node.data is None or len(node.data) != 1: print( "The parameter of {} (type is {}) is not set. So we set the parameters as 0" .format(node.layer_name, node.layer_type)) - self.parmas[node.layer_name + ".scale"] = \ - np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32") + self.params[param_name] = \ + np.zeros([1] if params.channel_shared else [node.in_shapes[0][1]]).astype("float32") else: - self.parmas[node.layer_name + ".scale"] = _adjust_parameters(node)[0] + self.params[param_name] = _adjust_parameters(node)[0] + + self.paddle_graph.add_layer( + "self.create_parameter", + inputs={}, + outputs=[param_name], + shape=self.params[param_name].shape, + attr=string(param_name)) + inputs_dict = {} layer_attrs = { - "axis": -1 if params.channel_shared else 1, - "param_name": node.layer_name + ".scale", - "param_shape": self.parmas[node.layer_name + ".scale"].shape} - self.pd_pdgraph.add_layer( + "axis": -1 if params.channel_shared else 1} + self.paddle_graph.add_layer( "custom_layer:Normalize", - inputs={"x": input.name}, + inputs={"x": input.name, + "param": param_name}, outputs=layer_outputs, **layer_attrs) diff --git a/x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py b/x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py index a3c884c661093b007158053c52cad3e9f1e1fb2d..180560a31479f3e529c63945419cf1563b258b1b 100644 --- a/x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py +++ b/x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py @@ -13,12 +13,21 @@ # limitations under the License. import paddle -import paddle.fluid as fluid def normalize(x, axis, param_name, param_shape, param_dtype): - l2 = fluid.layers.prior_box(x=x, p=2, axis=1) + l2_norm = paddle.fluid.layers.l2_normalize(x=x, axis=1) param = paddle.static.nn.create_parameter(shape=param_shape, - dtype=string(param_dtype), - name=string(param_name)) - out = paddle.multiply(x=l2, y=param, axis=axis) + dtype=param_dtype, + name=param_name) + param = paddle.reshape(param, [param.shape[-1]]) + perm = list(range(len(l2_norm.shape))) + perm.pop(axis) + perm = perm + [axis] + l2_norm = paddle.transpose(l2_norm, perm=perm) + out = paddle.multiply(x=l2_norm, y=param) + perm = list(range(len(l2_norm.shape))) + dim = perm.pop(-1) + perm.insert(axis, dim) + out = paddle.transpose(out, perm=perm) + return out \ No newline at end of file diff --git a/x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py b/x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py index d9ba3fd4774669f96f7276fd3eb1414f74c8965a..68ce95c71dc7124c5fce71d8338abd0730605f98 100644 --- a/x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py +++ b/x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py @@ -855,7 +855,7 @@ class CaffeOpMapper(OpMapper): out_max_val = params.out_max_val if hasattr(params, out_max_val) else False top_k = params.top_k if hasattr(params, top_k) else 1 - axis = parmas.axis if hasattr(params, axis) else -1 + axis = params.axis if hasattr(params, axis) else -1 if axis < 0: axis += len(in_shapes) if out_max_val is True: @@ -1090,17 +1090,17 @@ class CaffeOpMapper(OpMapper): print( "The parameter of {} (type is {}) is not set. So we set the parameters as 0" .format(scale_name, node.layer_type)) - self.parmas[scale_name] = \ - np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32") + self.params[scale_name] = \ + np.zeros([1] if params.channel_shared else [node.in_shapes[0][1]]).astype("float32") else: - self.parmas[scale_name] = _adjust_parameters(node)[0] + self.params[scale_name] = _adjust_parameters(node)[0] layer_attrs = { "axis": -1 if params.channel_shared else 1, - "param_name": scale_name, - "param_shape": self.parmas[scale_name].shape, - "param_dtype": str(self.parmas[scale_name].dtype)} - self.pd_pdgraph.add_layer( + "param_name": string(scale_name), + "param_shape": self.params[scale_name].shape, + "param_dtype": string(self.params[scale_name].dtype)} + self.paddle_graph.add_layer( "custom_layer:normalize", inputs={"x": input.name}, outputs=[node.name],