未验证 提交 2c12e94b 编写于 作者: J Jason 提交者: GitHub

Merge pull request #403 from Channingss/fix_shape_infer

Fix shape infer
...@@ -346,8 +346,12 @@ class ONNXGraph(Graph): ...@@ -346,8 +346,12 @@ class ONNXGraph(Graph):
#if len(value_info['shape']) == 0 or value_info[ #if len(value_info['shape']) == 0 or value_info[
# 'dtype'] is None or 0 in value_info['shape']: # 'dtype'] is None or 0 in value_info['shape']:
# #TODO add node shape inference # #TODO add node shape inference
shape = value_info['shape']
for idx in range(len(shape)):
if shape[idx] == 0:
shape[idx] = -1
node.out_shapes.append(shape)
node.dtype = value_info['dtype'] node.dtype = value_info['dtype']
node.out_shapes.append(value_info['shape'])
else: else:
node.out_shapes.append([]) node.out_shapes.append([])
......
...@@ -104,14 +104,6 @@ class OpSet9(): ...@@ -104,14 +104,6 @@ class OpSet9():
default_op_mapping = { default_op_mapping = {
'Shape': ['shape', ['X'], ['Out']], 'Shape': ['shape', ['X'], ['Out']],
'Clip': [
'clip', ['X'], ['Out'], dict(), dict(
min=(np.asarray(
[255, 255, 127, 255], dtype=np.uint8).view(np.float32)[0]),
max=(np.asarray(
[255, 255, 127, 127], dtype=np.uint8).view(np.float32)[0]),
)
],
'Erf': ['erf', ['X'], ['Out']], 'Erf': ['erf', ['X'], ['Out']],
'Ceil': ['ceil', ['X'], ['Out']], 'Ceil': ['ceil', ['X'], ['Out']],
'ReduceMean': [ 'ReduceMean': [
...@@ -357,6 +349,7 @@ class OpSet9(): ...@@ -357,6 +349,7 @@ class OpSet9():
'Warnning: paddle not support op:resize wiht mode: linear, we use bilinear replace linear' 'Warnning: paddle not support op:resize wiht mode: linear, we use bilinear replace linear'
) )
fluid_op = 'resize_bilinear' fluid_op = 'resize_bilinear'
attr['align_corners'] = False
node.fluid_code.add_layer( node.fluid_code.add_layer(
fluid_op, inputs=inputs, output=node, param_attr=attr) fluid_op, inputs=inputs, output=node, param_attr=attr)
...@@ -745,53 +738,59 @@ class OpSet9(): ...@@ -745,53 +738,59 @@ class OpSet9():
param_attr=None) param_attr=None)
else: else:
input_inner_indices = node.layer_name + '_input_inner_indices' input_inner_indices = node.layer_name + '_input_inner_indices'
shape = val_x.out_shapes[0]
node.fluid_code.add_layer(
'reshape',
inputs=indices.layer_name,
output=indices.layer_name,
param_attr={'shape': indices.out_shapes[0]})
zeros_like_val_x = val_x.layer_name + '_zeros'
node.fluid_code.add_layer( node.fluid_code.add_layer(
'scatter_nd', 'zeros_like',
inputs=val_x,
output=zeros_like_val_x,
param_attr=None)
node.fluid_code.add_layer(
'scatter_nd_add',
inputs={ inputs={
'shape': val_x.out_shapes[0], 'ref': zeros_like_val_x,
'index': indices, 'index': indices,
'updates': updates 'updates': updates
}, },
output=input_inner_indices, output=input_inner_indices,
param_attr=None) param_attr=None)
indices_mask = node.layer_name + '_indices_mask'
constant_minus_one = node.layer_name + '_constant_minus_one' constant_minus_one = node.layer_name + '_constant_minus_one'
# full_like support create tensor shape like input tensor
node.fluid_code.add_layer( node.fluid_code.add_layer(
'fill_constant', 'full_like',
inputs=None, inputs=updates,
output=constant_minus_one, output=constant_minus_one,
param_attr={ param_attr={'dtype': string(updates.dtype),
'shape': updates.out_shapes[0], 'fill_value': -1})
'dtype': string(updates.dtype),
'value': -1
})
indices_mask = node.layer_name + '_indices_mask'
node.fluid_code.add_layer( node.fluid_code.add_layer(
'scatter_nd', 'scatter_nd_add',
inputs={ inputs={
'shape': val_x.out_shapes[0], 'ref': zeros_like_val_x,
'index': indices, 'index': indices,
'updates': constant_minus_one 'updates': constant_minus_one
}, },
output=indices_mask, output=indices_mask,
param_attr=None) param_attr=None)
constant_one = node.layer_name + '_constant_1'
constant_1 = node.layer_name + '_constant_1' # full_like support create tensor shape like input tensor
node.fluid_code.add_layer( node.fluid_code.add_layer(
'fill_constant', 'full_like',
inputs=None, inputs=val_x,
output=constant_1, output=constant_one,
param_attr={ param_attr={'dtype': string(val_x.dtype),
'shape': val_x.out_shapes[0], 'fill_value': 1})
'dtype': string(val_x.dtype),
'value': 1
})
input_out_indices_mask = node.layer_name + '_input_out_indices_mask' input_out_indices_mask = node.layer_name + '_input_out_indices_mask'
node.fluid_code.add_layer( node.fluid_code.add_layer(
"elementwise_add", "elementwise_add",
inputs={"x": indices_mask, inputs={"x": indices_mask,
"y": constant_1}, "y": constant_one},
output=input_out_indices_mask, output=input_out_indices_mask,
param_attr=None) param_attr=None)
...@@ -831,27 +830,35 @@ class OpSet9(): ...@@ -831,27 +830,35 @@ class OpSet9():
if len(node.inputs) > 1: if len(node.inputs) > 1:
starts = self.graph.get_input_node(node, idx=1, copy=True) starts = self.graph.get_input_node(node, idx=1, copy=True)
ends = self.graph.get_input_node(node, idx=2, copy=True) ends = self.graph.get_input_node(node, idx=2, copy=True)
starts_value = _const_weight_or_none(starts)
ends_value = _const_weight_or_none(ends)
if len(node.inputs) > 3: if len(node.inputs) > 3:
axes = self.graph.get_input_node(node, idx=3, copy=True) axes = self.graph.get_input_node(node, idx=3, copy=True)
axes = _const_weight_or_none(axes, necessary=True) axes = _const_weight_or_none(axes, necessary=True)
if len(node.inputs) > 4: if len(node.inputs) > 4:
steps = self.graph.get_input_node(node, idx=4, copy=True) steps = self.graph.get_input_node(node, idx=4, copy=True)
steps = _const_weight_or_none(steps) steps = _const_weight_or_none(steps)
if steps is not None:
assert steps == 1, "Only support convert op:Slice, which attribute:steps == 1"
attr = { attr = {
"axes": axes, "axes": axes,
"starts": starts.layer_name, "starts": starts.layer_name,
"ends": ends.layer_name "ends": ends.layer_name
} }
starts_value = _const_weight_or_none(starts)
ends_value = _const_weight_or_none(ends)
if starts_value is not None and ends_value is not None: if starts_value is not None and ends_value is not None:
self.omit_nodes.append(starts.layer_name) self.omit_nodes.append(starts.layer_name)
self.omit_nodes.append(ends.layer_name) self.omit_nodes.append(ends.layer_name)
starts_value = starts_value.copy()
ends_value = ends_value.copy() ends_value = ends_value.copy()
#for idx in range(len(ends_value)):
# if ends_value[idx] > 2**31 - 1:
# ends_value[idx] = 2**31 - 1
#print(val_x.out_shapes)
for idx in range(len(ends_value)): for idx in range(len(ends_value)):
if ends_value[idx] > 2**31 - 1: if starts_value[idx] >= val_x.out_shapes[0][axes[idx]]:
starts_value[idx] = val_x.out_shapes[0][axes[idx]] - 1
ends_value[idx] = val_x.out_shapes[0][axes[idx]]
starts_value[idx] = val_x.out_shapes[0][axes[idx]] - 1
elif ends_value[idx] > 2**31 - 1:
ends_value[idx] = 2**31 - 1 ends_value[idx] = 2**31 - 1
attr = { attr = {
"axes": axes, "axes": axes,
...@@ -884,6 +891,11 @@ class OpSet9(): ...@@ -884,6 +891,11 @@ class OpSet9():
ends[idx] = 2**31 - 1 ends[idx] = 2**31 - 1
attr = {"axes": axes, "starts": starts, "ends": ends} attr = {"axes": axes, "starts": starts, "ends": ends}
if steps is not None:
attr['strides'] = steps
node.fluid_code.add_layer(
'strided_slice', inputs=val_x, output=node, param_attr=attr)
else:
node.fluid_code.add_layer( node.fluid_code.add_layer(
'slice', inputs=val_x, output=node, param_attr=attr) 'slice', inputs=val_x, output=node, param_attr=attr)
...@@ -907,6 +919,38 @@ class OpSet9(): ...@@ -907,6 +919,38 @@ class OpSet9():
node.fluid_code.add_layer( node.fluid_code.add_layer(
'fill_constant', inputs=None, output=node, param_attr=attr) 'fill_constant', inputs=None, output=node, param_attr=attr)
@print_mapping_info
def Clip(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_node(node.layer.output[0], copy=True)
max_value, min_value = None, None
if len(node.inputs) == 1:
max_value = node.get_attr('max')
min_value = node.get_attr('min')
attr = {
'max': max_value,
'min': min_value,
}
node.fluid_code.add_layer(
'clip', inputs=val_x, output=node, param_attr=attr)
else:
max_ipt = self.graph.get_input_node(node, idx=1, copy=True)
min_ipt = self.graph.get_input_node(node, idx=2, copy=True)
max_value = _const_weight_or_none(max_ipt)
min_value = _const_weight_or_none(min_ipt)
self.omit_nodes.append(max_ipt.layer_name)
self.omit_nodes.append(min_ipt.layer_name)
if max_value.shape == (1, ):
max_value = max_value[0]
if min_value.shape == (1, ):
min_value = min_value[0]
if max_value is not None and min_value is not None:
attr = {'max': max_value, 'min': min_value}
node.fluid_code.add_layer(
'clip', inputs=val_x, output=node, param_attr=attr)
else:
raise
@print_mapping_info @print_mapping_info
def Split(self, node): def Split(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册