未验证 提交 2c12e94b 编写于 作者: J Jason 提交者: GitHub

Merge pull request #403 from Channingss/fix_shape_infer

Fix shape infer
......@@ -346,8 +346,12 @@ class ONNXGraph(Graph):
#if len(value_info['shape']) == 0 or value_info[
# 'dtype'] is None or 0 in value_info['shape']:
# #TODO add node shape inference
shape = value_info['shape']
for idx in range(len(shape)):
if shape[idx] == 0:
shape[idx] = -1
node.out_shapes.append(shape)
node.dtype = value_info['dtype']
node.out_shapes.append(value_info['shape'])
else:
node.out_shapes.append([])
......
......@@ -104,14 +104,6 @@ class OpSet9():
default_op_mapping = {
'Shape': ['shape', ['X'], ['Out']],
'Clip': [
'clip', ['X'], ['Out'], dict(), dict(
min=(np.asarray(
[255, 255, 127, 255], dtype=np.uint8).view(np.float32)[0]),
max=(np.asarray(
[255, 255, 127, 127], dtype=np.uint8).view(np.float32)[0]),
)
],
'Erf': ['erf', ['X'], ['Out']],
'Ceil': ['ceil', ['X'], ['Out']],
'ReduceMean': [
......@@ -357,6 +349,7 @@ class OpSet9():
'Warnning: paddle not support op:resize wiht mode: linear, we use bilinear replace linear'
)
fluid_op = 'resize_bilinear'
attr['align_corners'] = False
node.fluid_code.add_layer(
fluid_op, inputs=inputs, output=node, param_attr=attr)
......@@ -745,53 +738,59 @@ class OpSet9():
param_attr=None)
else:
input_inner_indices = node.layer_name + '_input_inner_indices'
shape = val_x.out_shapes[0]
node.fluid_code.add_layer(
'reshape',
inputs=indices.layer_name,
output=indices.layer_name,
param_attr={'shape': indices.out_shapes[0]})
zeros_like_val_x = val_x.layer_name + '_zeros'
node.fluid_code.add_layer(
'scatter_nd',
'zeros_like',
inputs=val_x,
output=zeros_like_val_x,
param_attr=None)
node.fluid_code.add_layer(
'scatter_nd_add',
inputs={
'shape': val_x.out_shapes[0],
'ref': zeros_like_val_x,
'index': indices,
'updates': updates
},
output=input_inner_indices,
param_attr=None)
indices_mask = node.layer_name + '_indices_mask'
constant_minus_one = node.layer_name + '_constant_minus_one'
# full_like support create tensor shape like input tensor
node.fluid_code.add_layer(
'fill_constant',
inputs=None,
'full_like',
inputs=updates,
output=constant_minus_one,
param_attr={
'shape': updates.out_shapes[0],
'dtype': string(updates.dtype),
'value': -1
})
indices_mask = node.layer_name + '_indices_mask'
param_attr={'dtype': string(updates.dtype),
'fill_value': -1})
node.fluid_code.add_layer(
'scatter_nd',
'scatter_nd_add',
inputs={
'shape': val_x.out_shapes[0],
'ref': zeros_like_val_x,
'index': indices,
'updates': constant_minus_one
},
output=indices_mask,
param_attr=None)
constant_1 = node.layer_name + '_constant_1'
constant_one = node.layer_name + '_constant_1'
# full_like support create tensor shape like input tensor
node.fluid_code.add_layer(
'fill_constant',
inputs=None,
output=constant_1,
param_attr={
'shape': val_x.out_shapes[0],
'dtype': string(val_x.dtype),
'value': 1
})
'full_like',
inputs=val_x,
output=constant_one,
param_attr={'dtype': string(val_x.dtype),
'fill_value': 1})
input_out_indices_mask = node.layer_name + '_input_out_indices_mask'
node.fluid_code.add_layer(
"elementwise_add",
inputs={"x": indices_mask,
"y": constant_1},
"y": constant_one},
output=input_out_indices_mask,
param_attr=None)
......@@ -831,27 +830,35 @@ class OpSet9():
if len(node.inputs) > 1:
starts = self.graph.get_input_node(node, idx=1, copy=True)
ends = self.graph.get_input_node(node, idx=2, copy=True)
starts_value = _const_weight_or_none(starts)
ends_value = _const_weight_or_none(ends)
if len(node.inputs) > 3:
axes = self.graph.get_input_node(node, idx=3, copy=True)
axes = _const_weight_or_none(axes, necessary=True)
if len(node.inputs) > 4:
steps = self.graph.get_input_node(node, idx=4, copy=True)
steps = _const_weight_or_none(steps)
if steps is not None:
assert steps == 1, "Only support convert op:Slice, which attribute:steps == 1"
attr = {
"axes": axes,
"starts": starts.layer_name,
"ends": ends.layer_name
}
starts_value = _const_weight_or_none(starts)
ends_value = _const_weight_or_none(ends)
if starts_value is not None and ends_value is not None:
self.omit_nodes.append(starts.layer_name)
self.omit_nodes.append(ends.layer_name)
starts_value = starts_value.copy()
ends_value = ends_value.copy()
#for idx in range(len(ends_value)):
# if ends_value[idx] > 2**31 - 1:
# ends_value[idx] = 2**31 - 1
#print(val_x.out_shapes)
for idx in range(len(ends_value)):
if ends_value[idx] > 2**31 - 1:
if starts_value[idx] >= val_x.out_shapes[0][axes[idx]]:
starts_value[idx] = val_x.out_shapes[0][axes[idx]] - 1
ends_value[idx] = val_x.out_shapes[0][axes[idx]]
starts_value[idx] = val_x.out_shapes[0][axes[idx]] - 1
elif ends_value[idx] > 2**31 - 1:
ends_value[idx] = 2**31 - 1
attr = {
"axes": axes,
......@@ -869,12 +876,12 @@ class OpSet9():
attr['starts'] = starts_cast
if ends.dtype != 'int32':
ends_cast = ends.layer_name + '_cast'
node.fluid_code.add_layer(
'cast',
inputs=ends,
output=ends_cast,
param_attr={'dtype': string('int32')})
attr['ends'] = ends_cast
node.fluid_code.add_layer(
'cast',
inputs=ends,
output=ends_cast,
param_attr={'dtype': string('int32')})
attr['ends'] = ends_cast
else:
starts = node.get_attr('starts')
ends = node.get_attr('ends')
......@@ -884,8 +891,13 @@ class OpSet9():
ends[idx] = 2**31 - 1
attr = {"axes": axes, "starts": starts, "ends": ends}
node.fluid_code.add_layer(
'slice', inputs=val_x, output=node, param_attr=attr)
if steps is not None:
attr['strides'] = steps
node.fluid_code.add_layer(
'strided_slice', inputs=val_x, output=node, param_attr=attr)
else:
node.fluid_code.add_layer(
'slice', inputs=val_x, output=node, param_attr=attr)
@print_mapping_info
def ConstantOfShape(self, node):
......@@ -907,6 +919,38 @@ class OpSet9():
node.fluid_code.add_layer(
'fill_constant', inputs=None, output=node, param_attr=attr)
@print_mapping_info
def Clip(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_node(node.layer.output[0], copy=True)
max_value, min_value = None, None
if len(node.inputs) == 1:
max_value = node.get_attr('max')
min_value = node.get_attr('min')
attr = {
'max': max_value,
'min': min_value,
}
node.fluid_code.add_layer(
'clip', inputs=val_x, output=node, param_attr=attr)
else:
max_ipt = self.graph.get_input_node(node, idx=1, copy=True)
min_ipt = self.graph.get_input_node(node, idx=2, copy=True)
max_value = _const_weight_or_none(max_ipt)
min_value = _const_weight_or_none(min_ipt)
self.omit_nodes.append(max_ipt.layer_name)
self.omit_nodes.append(min_ipt.layer_name)
if max_value.shape == (1, ):
max_value = max_value[0]
if min_value.shape == (1, ):
min_value = min_value[0]
if max_value is not None and min_value is not None:
attr = {'max': max_value, 'min': min_value}
node.fluid_code.add_layer(
'clip', inputs=val_x, output=node, param_attr=attr)
else:
raise
@print_mapping_info
def Split(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册