提交 8085ee04 编写于 作者: S SunAhong1993

fix the bn

上级 cb705751
......@@ -771,6 +771,12 @@ class CaffeOpMapper(OpMapper):
'epsilon': eps,
'momentum': momentum
}
if len(node.in_shapes[0]) == 2:
self.paddle_graph.add_layer(
"paddle.unsqueeze",
inputs={"x": input.name},
outputs=[input.name],
axis=[2,3])
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.batch_norm",
inputs={"x": input.name,
......@@ -780,6 +786,12 @@ class CaffeOpMapper(OpMapper):
"running_var": variance_name,},
outputs=[node.name],
**layer_attrs)
if len(node.in_shapes[0]) == 2:
self.paddle_graph.add_layer(
"paddle.squeeze",
inputs={"x": node.layer_name},
outputs=[node.layer_name],
axis=[2,3])
def Scale(self, node):
if node.data is None:
......@@ -795,7 +807,12 @@ class CaffeOpMapper(OpMapper):
else:
self.params[node.name + "_cparam1"] = np.squeeze(node.data[
0]).astype("float32")
self.params[node.name + "_cparam2"] = np.squeeze(node.data[
if not node.layer.scale_param.bias_term:
self.params[node.layer_name + "_cparam2"] = np.zeros([
node.in_shapes[0][1],
]).astype("float32")
else:
self.params[node.layer_name + "_cparam2"] = np.squeeze(node.data[
1]).astype("float32")
params = node.layer.scale_param
axis = params.axis
......@@ -826,10 +843,16 @@ class CaffeOpMapper(OpMapper):
inputs_dict = {}
inputs_dict['x'] = input0_name
inputs_dict['y'] = node.name + "_cparam1"
if len(node.in_shapes[0]) == 2:
self.paddle_graph.add_layer(
"paddle.multiply",
inputs=inputs_dict,
outputs=[node.name + "_mul"],
outputs=[node.layer_name + "_mul"])
else:
self.paddle_graph.add_layer(
"paddle.multiply",
inputs=inputs_dict,
outputs=[node.layer_name + "_mul"],
axis=axis)
self.paddle_graph.add_layer(
"paddle.static.create_parameter",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册