From 56a24ef89201223b740b987061fb4e64d0f8c4a6 Mon Sep 17 00:00:00 2001 From: Channingss Date: Tue, 17 Sep 2019 13:05:00 +0800 Subject: [PATCH] merge elementwise op convert, supported dynamic scale for resize op --- x2paddle/onnx_infer.py | 2 - x2paddle/op_mapper/onnx_op_mapper.py | 206 +++++++++++---------------- 2 files changed, 86 insertions(+), 122 deletions(-) diff --git a/x2paddle/onnx_infer.py b/x2paddle/onnx_infer.py index 8d6b0f2..b883a25 100644 --- a/x2paddle/onnx_infer.py +++ b/x2paddle/onnx_infer.py @@ -44,11 +44,9 @@ def main(): inputs_dict = {} for i, ipt in enumerate(inputs): inputs_dict[sess.get_inputs()[i].name] = ipt - res = sess.run(None, input_feed=inputs_dict) for idx, value_info in enumerate(model.graph.output): np.save(os.path.join(save_dir, value_info.name), res[idx]) - if __name__ == "__main__": main() diff --git a/x2paddle/op_mapper/onnx_op_mapper.py b/x2paddle/op_mapper/onnx_op_mapper.py index f8382ab..161c444 100644 --- a/x2paddle/op_mapper/onnx_op_mapper.py +++ b/x2paddle/op_mapper/onnx_op_mapper.py @@ -51,7 +51,14 @@ def get_same_padding(in_size, kernel_size, stride): return [pad0, pad1] -class ONNXOpMapper(OpMapper): +class ONNXOpMapper(OpMapper): + elementwise_ops = { + 'Add': 'elementwise_add', + 'Div': 'elementwise_div', + 'Sub': 'elementwise_sub', + 'Mul': 'elementwise_mul', + 'Pow': 'elementwise_pow',} + def __init__(self, decoder, save_dir): super(ONNXOpMapper, self).__init__() self.decoder = decoder @@ -63,10 +70,10 @@ class ONNXOpMapper(OpMapper): self.is_inference = False self.tmp_data_dir = os.path.join(save_dir, 'tmp_data') self.get_output_shapes() - + if not self.op_checker(): raise Exception("Model are not supported yet.") - + #mapping op print("Total nodes: {}".format( sum([ @@ -83,6 +90,8 @@ class ONNXOpMapper(OpMapper): self.directly_map(node) elif op in custom_layers: self.deal_custom_layer(node) + elif op in self.elementwise_ops: + self.elementwise_map(node) self.remove_tmp_data() @@ -91,9 +100,10 @@ class ONNXOpMapper(OpMapper): for node_name in self.graph.topo_sort: node = self.graph.get_node(node_name) op = node.layer_type - if not hasattr( - self, op - ) and op not in default_op_mapping and op not in custom_layers: + if not hasattr(self, op) and \ + op not in default_op_mapping and \ + op not in custom_layers and \ + op not in self.elementwise_ops: unsupported_ops.add(op) if len(unsupported_ops) == 0: return True @@ -131,7 +141,10 @@ class ONNXOpMapper(OpMapper): """ get dynamic shape from infer_result """ - output = np.load(os.path.join(self.tmp_data_dir, layer + '.npy')) + path = os.path.join(self.tmp_data_dir, layer + '.npy') + if not os.path.exists(path): + return [None, None, None] + output = np.load(path) return output.tolist(), output.dtype, output.shape def get_output_shapes(self): @@ -148,7 +161,7 @@ class ONNXOpMapper(OpMapper): if opt in value_infos: value_info = value_infos[opt] if len(value_info['shape'] - ) == 0 or value_info['dtype'] is None: + ) == 0 or value_info['dtype'] is None or 0 in value_info['shape']: if self.is_inference == False: self.get_results_of_inference( onnx_model, value_infos, @@ -245,6 +258,48 @@ class ONNXOpMapper(OpMapper): if child_func_code is not None: self.used_custom_layers[op + '_child_func'] = child_func_code + def elementwise_map(self, node): + assert node.layer_type in self.elementwise_ops + op_type = self.elementwise_ops[node.layer_type] + val_x = self.graph.get_input_node(node, idx=0, copy=True) + val_y = self.graph.get_input_node(node, idx=1, copy=True) + + if len(val_x.out_shapes[0]) 0: + val_y_reshaped = val_y_shape[slice_idx:] + var_y_reshaped = val_y.layer_name + '_reshaped' + attr_reshaped = { + 'shape': val_y_reshaped, + 'name': string(var_y_reshaped) + } + node.fluid_code.add_layer('reshape', + inputs=val_y, + output=var_y_reshaped, + param_attr=attr_reshaped) + inputs = {'x': val_x, 'y': var_y_reshaped} + node.fluid_code.add_layer(op_type, + inputs=inputs, + output=node, + param_attr=attr) + else: + inputs = {'x': val_x, 'y': val_y} + node.fluid_code.add_layer(op_type, + inputs=inputs, + output=node, + param_attr=attr) def place_holder(self, node): self.input_shapes.append(node.out_shapes[0]) @@ -296,7 +351,7 @@ class ONNXOpMapper(OpMapper): val_x = self.graph.get_input_node(node, idx=0, copy=True) val_scales = self.graph.get_input_node(node, idx=1, copy=True) val_y = self.graph.get_node(node.layer.output[0], copy=True) - + out_shape_ = val_y.out_shapes[0] if out_shape_ is not None: assert len(out_shape_) == 4, 'only 4-D Tensor as X and Y supported' @@ -320,10 +375,17 @@ class ONNXOpMapper(OpMapper): assert len( in_shape) == 4, 'only 4-D Tensor as X and Y supported' out_shape_ = [in_shape[2] * scale, in_shape[3] * scale] - + mode = node.get_attr('mode', 'nearest') + fluid_op = 'resize_{}'.format(mode) - + if 'linear' in mode: + print('Warnning: paddle not support resize wiht mode: linear, we use bilinear replace linear') + fluid_op = 'resize_bilinear' + + if isinstance(val_scales, ONNXGraphNode): + scale, _, _ = self.get_dynamic_shape(val_scales.layer_name) + attr = { 'scale': scale, 'out_shape': out_shape, @@ -384,11 +446,21 @@ class ONNXOpMapper(OpMapper): def Unsqueeze(self, node): val_x = self.graph.get_input_node(node, idx=0, copy=True) axes = node.get_attr('axes') - attr = {'axes': axes, 'name': string(node.layer_name)} - node.fluid_code.add_layer('unsqueeze', + + if len(val_x.out_shapes[0])==0: + node.fluid_code.add_layer('assign', inputs=val_x, output=node, - param_attr=attr) + param_attr=None) + else: + attr = {'axes': axes, 'name': string(node.layer_name)} + node.fluid_code.add_layer('unsqueeze', + inputs=val_x, + output=node, + param_attr=attr) + + + def Shrink(self, node): val_x = self.graph.get_input_node(node, idx=0, copy=True) @@ -773,44 +845,6 @@ class ONNXOpMapper(OpMapper): output=node, param_attr=attr) - def Add(self, node): - val_x = self.graph.get_input_node(node, idx=0, copy=True) - val_y = self.graph.get_input_node(node, idx=1, copy=True) - inputs = { - "x": val_x, - "y": val_y, - } - attr = {"name": string(node.layer_name)} - node.fluid_code.add_layer("elementwise_add", - inputs=inputs, - output=node, - param_attr=attr) - - def Sub(self, node): - val_x = self.graph.get_input_node(node, idx=0, copy=True) - val_y = self.graph.get_input_node(node, idx=1, copy=True) - inputs = { - "x": val_x, - "y": val_y, - } - attr = {"name": string(node.layer_name)} - node.fluid_code.add_layer("elementwise_sub", - inputs=inputs, - output=node, - param_attr=attr) - - def Pow(self, node): - val_x = self.graph.get_input_node(node, idx=0, copy=True) - val_y = self.graph.get_input_node(node, idx=1, copy=True) - inputs = { - "x": val_x, - "y": val_y, - } - attr = {"name": string(node.layer_name)} - node.fluid_code.add_layer("elementwise_pow", - inputs=inputs, - output=node, - param_attr=attr) def Sum(self, node): val_inps = node.layer.input @@ -883,74 +917,6 @@ class ONNXOpMapper(OpMapper): output=node, param_attr=attr) - def Mul(self, node): - val_x = self.graph.get_input_node(node, idx=0, copy=True) - val_y = self.graph.get_input_node(node, idx=1, copy=True) - val_y_shape = val_y.out_shapes[0] - slice_idx = 0 - for dim in val_y_shape: - if dim == 1: - slice_idx += 1 - else: - break - attr = {"name": string(node.layer_name)} - if slice_idx < len(val_y_shape) and slice_idx > 0: - val_y_reshaped = val_y_shape[slice_idx:] - var_y_reshaped = val_y.layer_name + '_reshaped' - attr_reshaped = { - 'shape': val_y_reshaped, - 'name': string(var_y_reshaped) - } - node.fluid_code.add_layer('reshape', - inputs=val_y, - output=var_y_reshaped, - param_attr=attr_reshaped) - inputs = {'x': val_x, 'y': var_y_reshaped} - node.fluid_code.add_layer("elementwise_mul", - inputs=inputs, - output=node, - param_attr=attr) - else: - inputs = {'x': val_x, 'y': val_y} - node.fluid_code.add_layer("elementwise_mul", - inputs=inputs, - output=node, - param_attr=attr) - - def Div(self, node): - val_x = self.graph.get_input_node(node, idx=0, copy=True) - val_y = self.graph.get_input_node(node, idx=1, copy=True) - val_y_shape = val_y.out_shapes[0] - slice_idx = 0 - for dim in val_y_shape: - if dim == 1: - slice_idx += 1 - else: - break - attr = {"name": string(node.layer_name)} - if slice_idx < len(val_y_shape) and slice_idx > 0: - val_y_reshaped = val_y_shape[slice_idx:] - var_y_reshaped = val_y.layer_name + '_reshaped' - attr_reshaped = { - 'shape': val_y_reshaped, - 'name': string(var_y_reshaped) - } - node.fluid_code.add_layer('reshape', - inputs=val_y, - output=var_y_reshaped, - param_attr=attr_reshaped) - inputs = {'x': val_x, 'y': var_y_reshaped} - node.fluid_code.add_layer("elementwise_div", - inputs=inputs, - output=node, - param_attr=attr) - else: - inputs = {'x': val_x, 'y': val_y} - node.fluid_code.add_layer("elementwise_div", - inputs=inputs, - output=node, - param_attr=attr) - def Relu(self, node): val_x = self.graph.get_input_node(node, idx=0, copy=True) attr = {"name": string(node.layer_name)} -- GitLab