未验证 提交 d7242534 编写于 作者: J Jason 提交者: GitHub

Merge pull request #146 from Channingss/develop

supported dynamic scale for resize op
......@@ -44,7 +44,6 @@ def main():
inputs_dict = {}
for i, ipt in enumerate(inputs):
inputs_dict[sess.get_inputs()[i].name] = ipt
res = sess.run(None, input_feed=inputs_dict)
for idx, value_info in enumerate(model.graph.output):
np.save(os.path.join(save_dir, value_info.name), res[idx])
......
......@@ -52,6 +52,14 @@ def get_same_padding(in_size, kernel_size, stride):
class ONNXOpMapper(OpMapper):
elementwise_ops = {
'Add': 'elementwise_add',
'Div': 'elementwise_div',
'Sub': 'elementwise_sub',
'Mul': 'elementwise_mul',
'Pow': 'elementwise_pow',
}
def __init__(self, decoder, save_dir):
super(ONNXOpMapper, self).__init__()
self.decoder = decoder
......@@ -83,6 +91,8 @@ class ONNXOpMapper(OpMapper):
self.directly_map(node)
elif op in custom_layers:
self.deal_custom_layer(node)
elif op in self.elementwise_ops:
self.elementwise_map(node)
self.remove_tmp_data()
......@@ -91,9 +101,10 @@ class ONNXOpMapper(OpMapper):
for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name)
op = node.layer_type
if not hasattr(
self, op
) and op not in default_op_mapping and op not in custom_layers:
if not hasattr(self, op) and \
op not in default_op_mapping and \
op not in custom_layers and \
op not in self.elementwise_ops:
unsupported_ops.add(op)
if len(unsupported_ops) == 0:
return True
......@@ -131,7 +142,10 @@ class ONNXOpMapper(OpMapper):
"""
get dynamic shape from infer_result
"""
output = np.load(os.path.join(self.tmp_data_dir, layer + '.npy'))
path = os.path.join(self.tmp_data_dir, layer + '.npy')
if not os.path.exists(path):
return [None, None, None]
output = np.load(path)
return output.tolist(), output.dtype, output.shape
def get_output_shapes(self):
......@@ -147,8 +161,8 @@ class ONNXOpMapper(OpMapper):
for opt in layer.output:
if opt in value_infos:
value_info = value_infos[opt]
if len(value_info['shape']
) == 0 or value_info['dtype'] is None:
if len(value_info['shape']) == 0 or value_info[
'dtype'] is None or 0 in value_info['shape']:
if self.is_inference == False:
self.get_results_of_inference(
onnx_model, value_infos,
......@@ -246,6 +260,48 @@ class ONNXOpMapper(OpMapper):
self.used_custom_layers[op +
'_child_func'] = child_func_code
def elementwise_map(self, node):
assert node.layer_type in self.elementwise_ops
op_type = self.elementwise_ops[node.layer_type]
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
if len(val_x.out_shapes[0]) < len(val_y.out_shapes[0]):
val_x, val_y = val_y, val_x
val_y_shape = val_y.out_shapes[0]
val_x_shape = val_x.out_shapes[0]
slice_idx = 0
for dim in val_y_shape:
if dim == 1:
slice_idx += 1
else:
break
attr = {"name": string(node.layer_name)}
if slice_idx < len(val_y_shape) and slice_idx > 0:
val_y_reshaped = val_y_shape[slice_idx:]
var_y_reshaped = val_y.layer_name + '_reshaped'
attr_reshaped = {
'shape': val_y_reshaped,
'name': string(var_y_reshaped)
}
node.fluid_code.add_layer('reshape',
inputs=val_y,
output=var_y_reshaped,
param_attr=attr_reshaped)
inputs = {'x': val_x, 'y': var_y_reshaped}
node.fluid_code.add_layer(op_type,
inputs=inputs,
output=node,
param_attr=attr)
else:
inputs = {'x': val_x, 'y': val_y}
node.fluid_code.add_layer(op_type,
inputs=inputs,
output=node,
param_attr=attr)
def place_holder(self, node):
self.input_shapes.append(node.out_shapes[0])
attr = {
......@@ -322,7 +378,16 @@ class ONNXOpMapper(OpMapper):
out_shape_ = [in_shape[2] * scale, in_shape[3] * scale]
mode = node.get_attr('mode', 'nearest')
fluid_op = 'resize_{}'.format(mode)
if 'linear' in mode:
print(
'Warnning: paddle not support resize wiht mode: linear, we use bilinear replace linear'
)
fluid_op = 'resize_bilinear'
if isinstance(val_scales, ONNXGraphNode):
scale, _, _ = self.get_dynamic_shape(val_scales.layer_name)
attr = {
'scale': scale,
......@@ -384,11 +449,18 @@ class ONNXOpMapper(OpMapper):
def Unsqueeze(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
axes = node.get_attr('axes')
attr = {'axes': axes, 'name': string(node.layer_name)}
node.fluid_code.add_layer('unsqueeze',
inputs=val_x,
output=node,
param_attr=attr)
if len(val_x.out_shapes[0]) == 0:
node.fluid_code.add_layer('assign',
inputs=val_x,
output=node,
param_attr=None)
else:
attr = {'axes': axes, 'name': string(node.layer_name)}
node.fluid_code.add_layer('unsqueeze',
inputs=val_x,
output=node,
param_attr=attr)
def Shrink(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
......@@ -773,45 +845,6 @@ class ONNXOpMapper(OpMapper):
output=node,
param_attr=attr)
def Add(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = {
"x": val_x,
"y": val_y,
}
attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("elementwise_add",
inputs=inputs,
output=node,
param_attr=attr)
def Sub(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = {
"x": val_x,
"y": val_y,
}
attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("elementwise_sub",
inputs=inputs,
output=node,
param_attr=attr)
def Pow(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = {
"x": val_x,
"y": val_y,
}
attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("elementwise_pow",
inputs=inputs,
output=node,
param_attr=attr)
def Sum(self, node):
val_inps = node.layer.input
inputs = {
......@@ -883,74 +916,6 @@ class ONNXOpMapper(OpMapper):
output=node,
param_attr=attr)
def Mul(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
val_y_shape = val_y.out_shapes[0]
slice_idx = 0
for dim in val_y_shape:
if dim == 1:
slice_idx += 1
else:
break
attr = {"name": string(node.layer_name)}
if slice_idx < len(val_y_shape) and slice_idx > 0:
val_y_reshaped = val_y_shape[slice_idx:]
var_y_reshaped = val_y.layer_name + '_reshaped'
attr_reshaped = {
'shape': val_y_reshaped,
'name': string(var_y_reshaped)
}
node.fluid_code.add_layer('reshape',
inputs=val_y,
output=var_y_reshaped,
param_attr=attr_reshaped)
inputs = {'x': val_x, 'y': var_y_reshaped}
node.fluid_code.add_layer("elementwise_mul",
inputs=inputs,
output=node,
param_attr=attr)
else:
inputs = {'x': val_x, 'y': val_y}
node.fluid_code.add_layer("elementwise_mul",
inputs=inputs,
output=node,
param_attr=attr)
def Div(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
val_y_shape = val_y.out_shapes[0]
slice_idx = 0
for dim in val_y_shape:
if dim == 1:
slice_idx += 1
else:
break
attr = {"name": string(node.layer_name)}
if slice_idx < len(val_y_shape) and slice_idx > 0:
val_y_reshaped = val_y_shape[slice_idx:]
var_y_reshaped = val_y.layer_name + '_reshaped'
attr_reshaped = {
'shape': val_y_reshaped,
'name': string(var_y_reshaped)
}
node.fluid_code.add_layer('reshape',
inputs=val_y,
output=var_y_reshaped,
param_attr=attr_reshaped)
inputs = {'x': val_x, 'y': var_y_reshaped}
node.fluid_code.add_layer("elementwise_div",
inputs=inputs,
output=node,
param_attr=attr)
else:
inputs = {'x': val_x, 'y': val_y}
node.fluid_code.add_layer("elementwise_div",
inputs=inputs,
output=node,
param_attr=attr)
def Relu(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
attr = {"name": string(node.layer_name)}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册