未验证 提交 d823b87f 编写于 作者: W WJJ1995 提交者: GitHub

[Bug]Support pnc model (#962)

* support pnc model

* fixed code style
上级 3844a5a5
...@@ -213,6 +213,11 @@ class OpSet(): ...@@ -213,6 +213,11 @@ class OpSet():
attrs_name_map_dict = op_info[1] attrs_name_map_dict = op_info[1]
for onnx_attr_name, pd_attr_name in attrs_name_map_dict.items(): for onnx_attr_name, pd_attr_name in attrs_name_map_dict.items():
if onnx_attr_name in onnx_attrs: if onnx_attr_name in onnx_attrs:
# convert for dynamic code, mv 0 to False, 1 to True
if pd_attr_name == "keepdim":
keepdims = False if onnx_attrs[
onnx_attr_name] == 0 else True
onnx_attrs[onnx_attr_name] = keepdims
layer_attrs[pd_attr_name] = onnx_attrs[onnx_attr_name] layer_attrs[pd_attr_name] = onnx_attrs[onnx_attr_name]
else: else:
layer_attrs[pd_attr_name] = op_info[2][onnx_attr_name] layer_attrs[pd_attr_name] = op_info[2][onnx_attr_name]
...@@ -340,7 +345,7 @@ class OpSet(): ...@@ -340,7 +345,7 @@ class OpSet():
elif len(node.layer.input) == 3: elif len(node.layer.input) == 3:
# opset 11 # opset 11
try: try:
#to avoid the error causeed by NULL value of resize inputs. # to avoid the error causeed by NULL value of resize inputs.
val_scales = self.graph.get_input_node( val_scales = self.graph.get_input_node(
node, idx=2, copy=True) node, idx=2, copy=True)
except: except:
...@@ -912,7 +917,7 @@ class OpSet(): ...@@ -912,7 +917,7 @@ class OpSet():
axis = node.get_attr('axis', 0) axis = node.get_attr('axis', 0)
if len(indices_shape) == 1 or \ if len(indices_shape) == 1 or \
(indices_values is not None and isinstance(indices_values, int)) or \ (indices_values is not None and isinstance(indices_values, int)) or \
(indices_values is not None and len(indices_values) == 1): (indices_values is not None and len(indices_values) == 1):
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.gather', 'paddle.gather',
inputs={'x': val_x.name, inputs={'x': val_x.name,
...@@ -920,7 +925,7 @@ class OpSet(): ...@@ -920,7 +925,7 @@ class OpSet():
outputs=[node.name], outputs=[node.name],
axis=axis) axis=axis)
# deal with indice is scalar(0D) Tensor # deal with indice is scalar(0D) Tensor
if isinstance(indices_values, int) and len(val_x_shape) > 1: if isinstance(indices_values, int) and len(val_x_shape) != 1:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.squeeze', 'paddle.squeeze',
inputs={'x': node.name}, inputs={'x': node.name},
...@@ -1150,7 +1155,18 @@ class OpSet(): ...@@ -1150,7 +1155,18 @@ class OpSet():
ends_value[idx] = val_x.out_shapes[0][axes[idx]] ends_value[idx] = val_x.out_shapes[0][axes[idx]]
elif ends_value[idx] > 2**31 - 1: elif ends_value[idx] > 2**31 - 1:
ends_value[idx] = 2**31 - 1 ends_value[idx] = 2**31 - 1
elif ends_value[idx] < -2**31:
ends_value[idx] = -2**31
# If stride is -1 and starts and ends meet the conditions, just reverse it directly
if steps == [-1] and len(starts_value) == 1 and len(
ends_value) == 1 and starts_value[
0] == -1 and ends_value[0] == -2**31:
self.paddle_graph.add_layer(
"paddle.flip",
inputs={"x": val_x.name},
outputs=[node.name],
axis=axes)
return
layer_attrs = { layer_attrs = {
"axes": axes, "axes": axes,
"starts": starts_value, "starts": starts_value,
...@@ -1186,6 +1202,8 @@ class OpSet(): ...@@ -1186,6 +1202,8 @@ class OpSet():
for idx in range(len(ends)): for idx in range(len(ends)):
if ends[idx] > 2**31 - 1: if ends[idx] > 2**31 - 1:
ends[idx] = 2**31 - 1 ends[idx] = 2**31 - 1
elif ends[idx] < -2**31:
ends[idx] = 0
layer_attrs = {"axes": axes, "starts": starts, "ends": ends} layer_attrs = {"axes": axes, "starts": starts, "ends": ends}
if steps is not None: if steps is not None:
...@@ -1396,13 +1414,12 @@ class OpSet(): ...@@ -1396,13 +1414,12 @@ class OpSet():
@print_mapping_info @print_mapping_info
def Split(self, node): def Split(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
paddle_op = 'split'
split = node.get_attr('split') split = node.get_attr('split')
axis = node.get_attr('axis', 0) axis = node.get_attr('axis', 0)
if split is None: if split is None:
split_num = len(node.layer.output) split_num = len(node.layer.output)
try: try:
#split is an input of this node # split is an input of this node
split_node = self.graph.get_input_node(node, idx=1, copy=True) split_node = self.graph.get_input_node(node, idx=1, copy=True)
split_value = _const_weight_or_none(split_node) split_value = _const_weight_or_none(split_node)
layer_attrs = { layer_attrs = {
...@@ -1419,7 +1436,7 @@ class OpSet(): ...@@ -1419,7 +1436,7 @@ class OpSet():
if hasattr(node, 'index'): if hasattr(node, 'index'):
outputs_list.append("{}_p{}".format(node.layer_name, i)) outputs_list.append("{}_p{}".format(node.layer_name, i))
else: else:
outputs_list.append("{}".format(node.layer_name)) outputs_list.append("{}".format(node.layer.output[i]))
if split_num > 1: if split_num > 1:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.split', 'paddle.split',
...@@ -2233,7 +2250,6 @@ class OpSet(): ...@@ -2233,7 +2250,6 @@ class OpSet():
kernel_shape = node.get_attr('kernel_shape') kernel_shape = node.get_attr('kernel_shape')
convnd = len(kernel_shape) convnd = len(kernel_shape)
assert 2 <= convnd <= 3, 'only Conv2D and Conv3D is supported'
num_out_channels = val_w.out_shapes[0][0] num_out_channels = val_w.out_shapes[0][0]
num_in_channels = val_w.out_shapes[0][1] num_in_channels = val_w.out_shapes[0][1]
paddle_op = 'paddle.nn.Conv{}D'.format(convnd) paddle_op = 'paddle.nn.Conv{}D'.format(convnd)
...@@ -2379,7 +2395,8 @@ class OpSet(): ...@@ -2379,7 +2395,8 @@ class OpSet():
output_size[1]) output_size[1])
if auto_pad == "SAME_UPPER": if auto_pad == "SAME_UPPER":
for i in range(len(total_paddings)): for i in range(len(total_paddings)):
paddings[2 * i] = total_paddings[0] - total_paddings[0] // 2 paddings[2 * i] = total_paddings[0] - \
total_paddings[0] // 2
paddings[2 * i + 1] = total_paddings[0] // 2 paddings[2 * i + 1] = total_paddings[0] // 2
else: else:
for i in range(len(total_paddings)): for i in range(len(total_paddings)):
...@@ -2540,20 +2557,26 @@ class OpSet(): ...@@ -2540,20 +2557,26 @@ class OpSet():
if input_nums > 5 and node.layer.input[5] != '': if input_nums > 5 and node.layer.input[5] != '':
init_h = self.graph.get_input_node( init_h = self.graph.get_input_node(
node, idx=exist_input_nums, copy=True) node, idx=exist_input_nums, copy=True)
self.paddle_graph.add_layer( init_h_shape = init_h.out_shapes[0]
'paddle.reshape', if len(init_h_shape) != 0 and reduce(lambda x, y: x * y,
inputs={"x": init_h.name}, init_h_shape) not in [1, -1]:
outputs=[init_h.name], self.paddle_graph.add_layer(
shape=init_h.out_shapes[0]) 'paddle.reshape',
inputs={"x": init_h.name},
outputs=[init_h.name],
shape=init_h.out_shapes[0])
exist_input_nums += 1 exist_input_nums += 1
if input_nums > 6 and node.layer.input[6] != '': if input_nums > 6 and node.layer.input[6] != '':
init_c = self.graph.get_input_node( init_c = self.graph.get_input_node(
node, idx=exist_input_nums, copy=True) node, idx=exist_input_nums, copy=True)
self.paddle_graph.add_layer( init_c_shape = init_c.out_shapes[0]
'paddle.reshape', if len(init_c_shape) != 0 and reduce(lambda x, y: x * y,
inputs={"x": init_c.name}, init_c_shape) not in [1, -1]:
outputs=[init_c.name], self.paddle_graph.add_layer(
shape=init_c.out_shapes[0]) 'paddle.reshape',
inputs={"x": init_c.name},
outputs=[init_c.name],
shape=init_c.out_shapes[0])
input_weight_np = _const_weight_or_none(input_weight) input_weight_np = _const_weight_or_none(input_weight)
_rename_or_remove_weight(self.weights, input_weight.name) _rename_or_remove_weight(self.weights, input_weight.name)
...@@ -2592,8 +2615,10 @@ class OpSet(): ...@@ -2592,8 +2615,10 @@ class OpSet():
def generate_paddle_param_names(op_name, suffix=''): def generate_paddle_param_names(op_name, suffix=''):
param_names = [] param_names = []
param_names.extend(['{}.weight_ih_l0{}', '{}.weight_hh_l0{}']) param_names.extend(['{}.weight_ih_l0{}', '{}.weight_hh_l0{}'])
if have_bias != False: param_names.append('{}.bias_ih_l0{}') if have_bias != False:
if have_bias != False: param_names.append('{}.bias_hh_l0{}') param_names.append('{}.bias_ih_l0{}')
if have_bias != False:
param_names.append('{}.bias_hh_l0{}')
param_names = [x.format(op_name, suffix) for x in param_names] param_names = [x.format(op_name, suffix) for x in param_names]
return param_names return param_names
...@@ -2639,6 +2664,14 @@ class OpSet(): ...@@ -2639,6 +2664,14 @@ class OpSet():
def TopK(self, node): def TopK(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_k = self.graph.get_input_node(node, idx=1, copy=True) val_k = self.graph.get_input_node(node, idx=1, copy=True)
# If the topk result is the entire graph output, modify the graph result
graph_output_new = list()
if node.layer_name in self.graph.output_nodes:
graph_output_new = [
"{}_p{}".format(node.layer_name, 0)
if x == node.layer_name else x for x in self.graph.output_nodes
]
self.paddle_graph.outputs = graph_output_new
layer_attrs = dict() layer_attrs = dict()
layer_attrs["axis"] = node.get_attr('axis', -1) layer_attrs["axis"] = node.get_attr('axis', -1)
layer_attrs["largest"] = True if node.get_attr('largest', layer_attrs["largest"] = True if node.get_attr('largest',
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册