提交 56a24ef8 编写于 作者: C Channingss

merge elementwise op convert, supported dynamic scale for resize op

上级 065797b0
...@@ -44,11 +44,9 @@ def main(): ...@@ -44,11 +44,9 @@ def main():
inputs_dict = {} inputs_dict = {}
for i, ipt in enumerate(inputs): for i, ipt in enumerate(inputs):
inputs_dict[sess.get_inputs()[i].name] = ipt inputs_dict[sess.get_inputs()[i].name] = ipt
res = sess.run(None, input_feed=inputs_dict) res = sess.run(None, input_feed=inputs_dict)
for idx, value_info in enumerate(model.graph.output): for idx, value_info in enumerate(model.graph.output):
np.save(os.path.join(save_dir, value_info.name), res[idx]) np.save(os.path.join(save_dir, value_info.name), res[idx])
if __name__ == "__main__": if __name__ == "__main__":
main() main()
...@@ -52,6 +52,13 @@ def get_same_padding(in_size, kernel_size, stride): ...@@ -52,6 +52,13 @@ def get_same_padding(in_size, kernel_size, stride):
class ONNXOpMapper(OpMapper): class ONNXOpMapper(OpMapper):
elementwise_ops = {
'Add': 'elementwise_add',
'Div': 'elementwise_div',
'Sub': 'elementwise_sub',
'Mul': 'elementwise_mul',
'Pow': 'elementwise_pow',}
def __init__(self, decoder, save_dir): def __init__(self, decoder, save_dir):
super(ONNXOpMapper, self).__init__() super(ONNXOpMapper, self).__init__()
self.decoder = decoder self.decoder = decoder
...@@ -83,6 +90,8 @@ class ONNXOpMapper(OpMapper): ...@@ -83,6 +90,8 @@ class ONNXOpMapper(OpMapper):
self.directly_map(node) self.directly_map(node)
elif op in custom_layers: elif op in custom_layers:
self.deal_custom_layer(node) self.deal_custom_layer(node)
elif op in self.elementwise_ops:
self.elementwise_map(node)
self.remove_tmp_data() self.remove_tmp_data()
...@@ -91,9 +100,10 @@ class ONNXOpMapper(OpMapper): ...@@ -91,9 +100,10 @@ class ONNXOpMapper(OpMapper):
for node_name in self.graph.topo_sort: for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name) node = self.graph.get_node(node_name)
op = node.layer_type op = node.layer_type
if not hasattr( if not hasattr(self, op) and \
self, op op not in default_op_mapping and \
) and op not in default_op_mapping and op not in custom_layers: op not in custom_layers and \
op not in self.elementwise_ops:
unsupported_ops.add(op) unsupported_ops.add(op)
if len(unsupported_ops) == 0: if len(unsupported_ops) == 0:
return True return True
...@@ -131,7 +141,10 @@ class ONNXOpMapper(OpMapper): ...@@ -131,7 +141,10 @@ class ONNXOpMapper(OpMapper):
""" """
get dynamic shape from infer_result get dynamic shape from infer_result
""" """
output = np.load(os.path.join(self.tmp_data_dir, layer + '.npy')) path = os.path.join(self.tmp_data_dir, layer + '.npy')
if not os.path.exists(path):
return [None, None, None]
output = np.load(path)
return output.tolist(), output.dtype, output.shape return output.tolist(), output.dtype, output.shape
def get_output_shapes(self): def get_output_shapes(self):
...@@ -148,7 +161,7 @@ class ONNXOpMapper(OpMapper): ...@@ -148,7 +161,7 @@ class ONNXOpMapper(OpMapper):
if opt in value_infos: if opt in value_infos:
value_info = value_infos[opt] value_info = value_infos[opt]
if len(value_info['shape'] if len(value_info['shape']
) == 0 or value_info['dtype'] is None: ) == 0 or value_info['dtype'] is None or 0 in value_info['shape']:
if self.is_inference == False: if self.is_inference == False:
self.get_results_of_inference( self.get_results_of_inference(
onnx_model, value_infos, onnx_model, value_infos,
...@@ -245,6 +258,48 @@ class ONNXOpMapper(OpMapper): ...@@ -245,6 +258,48 @@ class ONNXOpMapper(OpMapper):
if child_func_code is not None: if child_func_code is not None:
self.used_custom_layers[op + self.used_custom_layers[op +
'_child_func'] = child_func_code '_child_func'] = child_func_code
def elementwise_map(self, node):
assert node.layer_type in self.elementwise_ops
op_type = self.elementwise_ops[node.layer_type]
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
if len(val_x.out_shapes[0])<len(val_y.out_shapes[0]):
val_x, val_y = val_y, val_x
val_y_shape = val_y.out_shapes[0]
val_x_shape = val_x.out_shapes[0]
slice_idx = 0
for dim in val_y_shape:
if dim == 1:
slice_idx += 1
else:
break
attr = {"name": string(node.layer_name)}
if slice_idx < len(val_y_shape) and slice_idx > 0:
val_y_reshaped = val_y_shape[slice_idx:]
var_y_reshaped = val_y.layer_name + '_reshaped'
attr_reshaped = {
'shape': val_y_reshaped,
'name': string(var_y_reshaped)
}
node.fluid_code.add_layer('reshape',
inputs=val_y,
output=var_y_reshaped,
param_attr=attr_reshaped)
inputs = {'x': val_x, 'y': var_y_reshaped}
node.fluid_code.add_layer(op_type,
inputs=inputs,
output=node,
param_attr=attr)
else:
inputs = {'x': val_x, 'y': val_y}
node.fluid_code.add_layer(op_type,
inputs=inputs,
output=node,
param_attr=attr)
def place_holder(self, node): def place_holder(self, node):
self.input_shapes.append(node.out_shapes[0]) self.input_shapes.append(node.out_shapes[0])
...@@ -322,7 +377,14 @@ class ONNXOpMapper(OpMapper): ...@@ -322,7 +377,14 @@ class ONNXOpMapper(OpMapper):
out_shape_ = [in_shape[2] * scale, in_shape[3] * scale] out_shape_ = [in_shape[2] * scale, in_shape[3] * scale]
mode = node.get_attr('mode', 'nearest') mode = node.get_attr('mode', 'nearest')
fluid_op = 'resize_{}'.format(mode) fluid_op = 'resize_{}'.format(mode)
if 'linear' in mode:
print('Warnning: paddle not support resize wiht mode: linear, we use bilinear replace linear')
fluid_op = 'resize_bilinear'
if isinstance(val_scales, ONNXGraphNode):
scale, _, _ = self.get_dynamic_shape(val_scales.layer_name)
attr = { attr = {
'scale': scale, 'scale': scale,
...@@ -384,12 +446,22 @@ class ONNXOpMapper(OpMapper): ...@@ -384,12 +446,22 @@ class ONNXOpMapper(OpMapper):
def Unsqueeze(self, node): def Unsqueeze(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
axes = node.get_attr('axes') axes = node.get_attr('axes')
if len(val_x.out_shapes[0])==0:
node.fluid_code.add_layer('assign',
inputs=val_x,
output=node,
param_attr=None)
else:
attr = {'axes': axes, 'name': string(node.layer_name)} attr = {'axes': axes, 'name': string(node.layer_name)}
node.fluid_code.add_layer('unsqueeze', node.fluid_code.add_layer('unsqueeze',
inputs=val_x, inputs=val_x,
output=node, output=node,
param_attr=attr) param_attr=attr)
def Shrink(self, node): def Shrink(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
bias = node.get_attr('bias') bias = node.get_attr('bias')
...@@ -773,44 +845,6 @@ class ONNXOpMapper(OpMapper): ...@@ -773,44 +845,6 @@ class ONNXOpMapper(OpMapper):
output=node, output=node,
param_attr=attr) param_attr=attr)
def Add(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = {
"x": val_x,
"y": val_y,
}
attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("elementwise_add",
inputs=inputs,
output=node,
param_attr=attr)
def Sub(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = {
"x": val_x,
"y": val_y,
}
attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("elementwise_sub",
inputs=inputs,
output=node,
param_attr=attr)
def Pow(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = {
"x": val_x,
"y": val_y,
}
attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("elementwise_pow",
inputs=inputs,
output=node,
param_attr=attr)
def Sum(self, node): def Sum(self, node):
val_inps = node.layer.input val_inps = node.layer.input
...@@ -883,74 +917,6 @@ class ONNXOpMapper(OpMapper): ...@@ -883,74 +917,6 @@ class ONNXOpMapper(OpMapper):
output=node, output=node,
param_attr=attr) param_attr=attr)
def Mul(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
val_y_shape = val_y.out_shapes[0]
slice_idx = 0
for dim in val_y_shape:
if dim == 1:
slice_idx += 1
else:
break
attr = {"name": string(node.layer_name)}
if slice_idx < len(val_y_shape) and slice_idx > 0:
val_y_reshaped = val_y_shape[slice_idx:]
var_y_reshaped = val_y.layer_name + '_reshaped'
attr_reshaped = {
'shape': val_y_reshaped,
'name': string(var_y_reshaped)
}
node.fluid_code.add_layer('reshape',
inputs=val_y,
output=var_y_reshaped,
param_attr=attr_reshaped)
inputs = {'x': val_x, 'y': var_y_reshaped}
node.fluid_code.add_layer("elementwise_mul",
inputs=inputs,
output=node,
param_attr=attr)
else:
inputs = {'x': val_x, 'y': val_y}
node.fluid_code.add_layer("elementwise_mul",
inputs=inputs,
output=node,
param_attr=attr)
def Div(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
val_y_shape = val_y.out_shapes[0]
slice_idx = 0
for dim in val_y_shape:
if dim == 1:
slice_idx += 1
else:
break
attr = {"name": string(node.layer_name)}
if slice_idx < len(val_y_shape) and slice_idx > 0:
val_y_reshaped = val_y_shape[slice_idx:]
var_y_reshaped = val_y.layer_name + '_reshaped'
attr_reshaped = {
'shape': val_y_reshaped,
'name': string(var_y_reshaped)
}
node.fluid_code.add_layer('reshape',
inputs=val_y,
output=var_y_reshaped,
param_attr=attr_reshaped)
inputs = {'x': val_x, 'y': var_y_reshaped}
node.fluid_code.add_layer("elementwise_div",
inputs=inputs,
output=node,
param_attr=attr)
else:
inputs = {'x': val_x, 'y': val_y}
node.fluid_code.add_layer("elementwise_div",
inputs=inputs,
output=node,
param_attr=attr)
def Relu(self, node): def Relu(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
attr = {"name": string(node.layer_name)} attr = {"name": string(node.layer_name)}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册