提交 ddb9aea6 编写于 作者: S SunAhong1993

fix the caffe

上级 3d35ebd0
...@@ -16,17 +16,21 @@ import paddle ...@@ -16,17 +16,21 @@ import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
class Normalize(object): class Normalize(object):
def __init__(self, axis, param_name, param_shape): def __init__(self, axis):
self.axis = axis self.axis = axis
self.param_name = param_name
self.param_shape = param_shape
def __call__(self, x): def __call__(self, x, param):
l2 = fluid.layers.prior_box(x=x, p=2, axis=1) l2_norm = fluid.layers.l2_normalize(x=x, axis=1)
attr = fluid.ParamAttr(name=self.param_name, trainable=False) param = paddle.reshape(param, [param.shape[-1]])
param = paddle.nn.Layer.create_parameter(shape=self.param_shape, perm = list(range(len(l2_norm.shape)))
attr=atr) perm.pop(self.axis)
out = paddle.multiply(x=l2, y=param, axis=self.axis) perm = perm + [self.axis]
l2_norm = paddle.transpose(l2_norm, perm=perm)
out = paddle.multiply(x=l2_norm, y=param)
perm = list(range(len(l2_norm.shape)))
dim = perm.pop(-1)
perm.insert(self.axis, dim)
out = paddle.transpose(out, perm=perm)
return out return out
\ No newline at end of file
...@@ -782,7 +782,7 @@ class CaffeOpMapper(OpMapper): ...@@ -782,7 +782,7 @@ class CaffeOpMapper(OpMapper):
out_max_val = params.out_max_val if hasattr(params, out_max_val = params.out_max_val if hasattr(params,
out_max_val) else False out_max_val) else False
top_k = params.top_k if hasattr(params, top_k) else 1 top_k = params.top_k if hasattr(params, top_k) else 1
axis = parmas.axis if hasattr(params, axis) else -1 axis = params.axis if hasattr(params, axis) else -1
if axis < 0: if axis < 0:
axis += len(input_shape) axis += len(input_shape)
if out_max_val is True: if out_max_val is True:
...@@ -1018,22 +1018,30 @@ class CaffeOpMapper(OpMapper): ...@@ -1018,22 +1018,30 @@ class CaffeOpMapper(OpMapper):
node.inputs) == 1, "The count of Normalize node\'s input is not 1." node.inputs) == 1, "The count of Normalize node\'s input is not 1."
input = self.graph.get_input_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.norm_param params = node.layer.norm_param
param_name = node.layer_name + "_scale"
if node.data is None or len(node.data) != 1: if node.data is None or len(node.data) != 1:
print( print(
"The parameter of {} (type is {}) is not set. So we set the parameters as 0" "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.format(node.layer_name, node.layer_type)) .format(node.layer_name, node.layer_type))
self.parmas[node.layer_name + ".scale"] = \ self.params[param_name] = \
np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32") np.zeros([1] if params.channel_shared else [node.in_shapes[0][1]]).astype("float32")
else: else:
self.parmas[node.layer_name + ".scale"] = _adjust_parameters(node)[0] self.params[param_name] = _adjust_parameters(node)[0]
self.paddle_graph.add_layer(
"self.create_parameter",
inputs={},
outputs=[param_name],
shape=self.params[param_name].shape,
attr=string(param_name))
inputs_dict = {}
layer_attrs = { layer_attrs = {
"axis": -1 if params.channel_shared else 1, "axis": -1 if params.channel_shared else 1}
"param_name": node.layer_name + ".scale", self.paddle_graph.add_layer(
"param_shape": self.parmas[node.layer_name + ".scale"].shape}
self.pd_pdgraph.add_layer(
"custom_layer:Normalize", "custom_layer:Normalize",
inputs={"x": input.name}, inputs={"x": input.name,
"param": param_name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
......
...@@ -13,12 +13,21 @@ ...@@ -13,12 +13,21 @@
# limitations under the License. # limitations under the License.
import paddle import paddle
import paddle.fluid as fluid
def normalize(x, axis, param_name, param_shape, param_dtype): def normalize(x, axis, param_name, param_shape, param_dtype):
l2 = fluid.layers.prior_box(x=x, p=2, axis=1) l2_norm = paddle.fluid.layers.l2_normalize(x=x, axis=1)
param = paddle.static.nn.create_parameter(shape=param_shape, param = paddle.static.nn.create_parameter(shape=param_shape,
dtype=string(param_dtype), dtype=param_dtype,
name=string(param_name)) name=param_name)
out = paddle.multiply(x=l2, y=param, axis=axis) param = paddle.reshape(param, [param.shape[-1]])
perm = list(range(len(l2_norm.shape)))
perm.pop(axis)
perm = perm + [axis]
l2_norm = paddle.transpose(l2_norm, perm=perm)
out = paddle.multiply(x=l2_norm, y=param)
perm = list(range(len(l2_norm.shape)))
dim = perm.pop(-1)
perm.insert(axis, dim)
out = paddle.transpose(out, perm=perm)
return out return out
\ No newline at end of file
...@@ -855,7 +855,7 @@ class CaffeOpMapper(OpMapper): ...@@ -855,7 +855,7 @@ class CaffeOpMapper(OpMapper):
out_max_val = params.out_max_val if hasattr(params, out_max_val = params.out_max_val if hasattr(params,
out_max_val) else False out_max_val) else False
top_k = params.top_k if hasattr(params, top_k) else 1 top_k = params.top_k if hasattr(params, top_k) else 1
axis = parmas.axis if hasattr(params, axis) else -1 axis = params.axis if hasattr(params, axis) else -1
if axis < 0: if axis < 0:
axis += len(in_shapes) axis += len(in_shapes)
if out_max_val is True: if out_max_val is True:
...@@ -1090,17 +1090,17 @@ class CaffeOpMapper(OpMapper): ...@@ -1090,17 +1090,17 @@ class CaffeOpMapper(OpMapper):
print( print(
"The parameter of {} (type is {}) is not set. So we set the parameters as 0" "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.format(scale_name, node.layer_type)) .format(scale_name, node.layer_type))
self.parmas[scale_name] = \ self.params[scale_name] = \
np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32") np.zeros([1] if params.channel_shared else [node.in_shapes[0][1]]).astype("float32")
else: else:
self.parmas[scale_name] = _adjust_parameters(node)[0] self.params[scale_name] = _adjust_parameters(node)[0]
layer_attrs = { layer_attrs = {
"axis": -1 if params.channel_shared else 1, "axis": -1 if params.channel_shared else 1,
"param_name": scale_name, "param_name": string(scale_name),
"param_shape": self.parmas[scale_name].shape, "param_shape": self.params[scale_name].shape,
"param_dtype": str(self.parmas[scale_name].dtype)} "param_dtype": string(self.params[scale_name].dtype)}
self.pd_pdgraph.add_layer( self.paddle_graph.add_layer(
"custom_layer:normalize", "custom_layer:normalize",
inputs={"x": input.name}, inputs={"x": input.name},
outputs=[node.name], outputs=[node.name],
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册