未验证 提交 d4966510 编写于 作者: J Jason 提交者: GitHub

Merge pull request #2 from PaddlePaddle/develop

pull
...@@ -9,13 +9,13 @@ X2Paddle在多个主流的CV模型上,测试过TensorFlow/Caffe/ONNX模型的 ...@@ -9,13 +9,13 @@ X2Paddle在多个主流的CV模型上,测试过TensorFlow/Caffe/ONNX模型的
## 环境依赖 ## 环境依赖
python >= 3.5 python == 2.7 | python >= 3.5
paddlepaddle >= 1.5.0 paddlepaddle >= 1.5.0
**按需安装以下依赖** **按需安装以下依赖**
tensorflow : tensorflow == 1.14.0 tensorflow : tensorflow == 1.14.0
caffe : 无 caffe : 无
onnx : onnx == 1.5.0 pytorch == 1.1.0 onnx : onnx == 1.5.0 onnxruntime == 0.4.0
## 安装 ## 安装
### 安装方式一(推荐) ### 安装方式一(推荐)
......
...@@ -23,4 +23,9 @@ setuptools.setup( ...@@ -23,4 +23,9 @@ setuptools.setup(
"Operating System :: OS Independent", "Operating System :: OS Independent",
], ],
license='Apache 2.0', license='Apache 2.0',
entry_points={'console_scripts': ['x2paddle=x2paddle.convert:main']}) entry_points={
'console_scripts': [
'x2paddle=x2paddle.convert:main',
'onnx_infer=x2paddle.onnx_infer:main'
]
})
__version__ = "0.4.5" __version__ = "0.5.0"
...@@ -154,7 +154,7 @@ def onnx2paddle(model_path, save_dir): ...@@ -154,7 +154,7 @@ def onnx2paddle(model_path, save_dir):
model = ONNXDecoder(model_path) model = ONNXDecoder(model_path)
from x2paddle.op_mapper.onnx_op_mapper import ONNXOpMapper from x2paddle.op_mapper.onnx_op_mapper import ONNXOpMapper
mapper = ONNXOpMapper(model) mapper = ONNXOpMapper(model, save_dir)
from x2paddle.optimizer.onnx_optimizer import ONNXOptimizer from x2paddle.optimizer.onnx_optimizer import ONNXOptimizer
optimizer = ONNXOptimizer(mapper) optimizer = ONNXOptimizer(mapper)
......
...@@ -17,7 +17,6 @@ from x2paddle.core.fluid_code import FluidCode ...@@ -17,7 +17,6 @@ from x2paddle.core.fluid_code import FluidCode
from onnx.checker import ValidationError from onnx.checker import ValidationError
from onnx.checker import check_model from onnx.checker import check_model
from onnx.utils import polish_model from onnx.utils import polish_model
from onnx.version_converter import convert_version
from onnx import helper from onnx import helper
from onnx.helper import get_attribute_value, make_attribute from onnx.helper import get_attribute_value, make_attribute
from onnx.shape_inference import infer_shapes from onnx.shape_inference import infer_shapes
...@@ -26,9 +25,11 @@ from onnx.numpy_helper import to_array ...@@ -26,9 +25,11 @@ from onnx.numpy_helper import to_array
from onnx import AttributeProto, TensorProto, GraphProto from onnx import AttributeProto, TensorProto, GraphProto
from collections import OrderedDict as Dict from collections import OrderedDict as Dict
import onnx import onnx
from onnx.helper import ValueInfoProto
import numpy as np import numpy as np
from copy import deepcopy from copy import deepcopy
import logging as _logging import logging as _logging
import os
default_op_domain = 'ai.onnx' default_op_domain = 'ai.onnx'
_logger = _logging.getLogger(__name__) _logger = _logging.getLogger(__name__)
...@@ -47,6 +48,7 @@ class ONNXGraphNode(GraphNode): ...@@ -47,6 +48,7 @@ class ONNXGraphNode(GraphNode):
self.weight_inputs = list() self.weight_inputs = list()
self.out_shapes = list() self.out_shapes = list()
self.dtype = None self.dtype = None
self.which_child = {}
def get_attr_map(self): def get_attr_map(self):
""" """
...@@ -60,10 +62,9 @@ class ONNXGraphNode(GraphNode): ...@@ -60,10 +62,9 @@ class ONNXGraphNode(GraphNode):
@property @property
def value(self): def value(self):
assert 'Constant' in self.layer_type, "Only Constant | ConstantOfShape node has value." assert 'Constant' in self.layer_type, "Only Constant | ConstantOfShape node has value."
attr = self.layer.attribute['value']
if 'value' not in self.attr_map: if 'value' not in self.attr_map:
return None return None
return self.attr_map[name] return self.attr_map['value']
def get_attribute_value2(self, attr): def get_attribute_value2(self, attr):
""" """
...@@ -105,29 +106,39 @@ class ONNXGraphDataNode(GraphNode): ...@@ -105,29 +106,39 @@ class ONNXGraphDataNode(GraphNode):
self.fluid_code = FluidCode() self.fluid_code = FluidCode()
self.weight = None self.weight = None
self.embeded_as = None self.embeded_as = None
self.which_child = {}
@property @property
def out_shapes(self): def out_shapes(self):
if isinstance(self.layer, ValueInfoProto):
values = self.layer.type.tensor_type.shape.dim values = self.layer.type.tensor_type.shape.dim
out_shapes = list() out_shapes = list()
out_shapes.append([dim.dim_value for dim in values]) out_shapes.append([dim.dim_value for dim in values])
return out_shapes return out_shapes
else:
values = self.layer.dims
out_shapes = list()
out_shapes.append(values)
return out_shapes
@property @property
def dtype(self): def dtype(self):
if isinstance(self.layer, ValueInfoProto):
dtype = self.layer.type.tensor_type.elem_type dtype = self.layer.type.tensor_type.elem_type
return TENSOR_TYPE_TO_NP_TYPE[dtype] return TENSOR_TYPE_TO_NP_TYPE[dtype]
else:
dtype = self.layer.data_type
return TENSOR_TYPE_TO_NP_TYPE[dtype]
class ONNXGraph(Graph): class ONNXGraph(Graph):
def __init__(self, graph, onnx_model): def __init__(self, onnx_model):
super(ONNXGraph, self).__init__(graph) super(ONNXGraph, self).__init__(onnx_model.graph)
self.onnx_model = onnx_model self.onnx_model = onnx_model
self.initializer = {} self.initializer = {}
self.place_holder_nodes = list() self.place_holder_nodes = list()
self.get_place_holder_nodes() self.get_place_holder_nodes()
self.value_infos = self.inferred_model_value_info(self.model)
self.value_infos = self.inferred_model_value_info(graph)
self.results_of_inference = dict() self.results_of_inference = dict()
def get_inner_nodes(self): def get_inner_nodes(self):
...@@ -165,22 +176,9 @@ class ONNXGraph(Graph): ...@@ -165,22 +176,9 @@ class ONNXGraph(Graph):
""" """
build topo_sort of ONNX model build topo_sort of ONNX model
""" """
data_node = self.place_holder_nodes[0]
value_info = self.value_infos[data_node]
input_shape = value_info['shape']
self.get_results_of_inference(self.onnx_model, input_shape)
for layer in self.model.node: for layer in self.model.node:
node = ONNXGraphNode(layer) node = ONNXGraphNode(layer)
self.node_map[layer.name] = node self.node_map[layer.name] = node
for opt in layer.output:
if opt in self.value_infos:
value_info = self.value_infos[opt]
node.dtype = value_info['dtype']
node.out_shapes.append(value_info['shape'])
else:
_, dtype, shape = self.get_dynamic_shape(opt)
node.dtype = dtype
node.out_shapes.append(shape)
for layer in self.model.input: for layer in self.model.input:
if layer.name not in self.node_map: if layer.name not in self.node_map:
...@@ -191,20 +189,40 @@ class ONNXGraph(Graph): ...@@ -191,20 +189,40 @@ class ONNXGraph(Graph):
is_global_input=is_place_holder) is_global_input=is_place_holder)
#set data node's weight #set data node's weight
for name, weight in self.graph_weights(self.model): for initializer in self.model.initializer:
name = initializer.name
weight = to_array(initializer)
if name in self.node_map: if name in self.node_map:
if isinstance(self.node_map[name], ONNXGraphDataNode): if isinstance(self.node_map[name], ONNXGraphDataNode):
self.node_map[name].weight = weight self.node_map[name].weight = weight
self.node_map[name].embeded_as = [] self.node_map[name].embeded_as = []
else:
self.node_map[name] = ONNXGraphDataNode(initializer,
layer_name=name,
is_global_input=False)
self.node_map[name].weight = weight
self.node_map[name].embeded_as = []
#generate connection between nodes for topo #generate connection between nodes for topo
for layer_name, node in self.node_map.items(): for layer_name, node in self.node_map.items():
if isinstance(node, ONNXGraphNode): if isinstance(node, ONNXGraphNode):
for idx, in_node in enumerate(node.layer.input): for idx, in_node in enumerate(node.layer.input):
if in_node not in self.node_map: if in_node not in self.node_map:
flag = 0
for nd in self.model.node:
for idx, opt in enumerate(nd.output):
if opt == in_node:
self.connect(nd.name, layer_name)
flag = 1
node.which_child[nd.name] = idx
self.node_map[nd.name].index = 0
break
if flag == 1:
break
if flag == 0:
raise Exception( raise Exception(
'input[{}] of node[{}] does not exist in node_map'. 'input[{}] of node[{}] does not exist in node_map'
format(in_node, layer_name)) .format(in_node, layer_name))
else: else:
self.connect(in_node, layer_name) self.connect(in_node, layer_name)
#generate topo #generate topo
...@@ -212,13 +230,16 @@ class ONNXGraph(Graph): ...@@ -212,13 +230,16 @@ class ONNXGraph(Graph):
self.input_nodes = self.place_holder_nodes self.input_nodes = self.place_holder_nodes
def get_nodes(self, names, copy=False): def get_input_node(self, node, idx=0, copy=False):
""" if len(node.which_child) == 0:
get nodes by more than one name ipt_node = super(ONNXGraph, self).get_node(node.inputs[idx], copy)
""" return ipt_node
nodes = []
for name in names: else:
nodes.add(self.get_node(name, copy=copy)) ipt_node = super(ONNXGraph, self).get_node(node.inputs[idx], copy)
if ipt_node.layer_name in node.which_child:
ipt_node.index = node.which_child[ipt_node.layer_name]
return ipt_node
def graph_weights(self, graph): def graph_weights(self, graph):
""" """
...@@ -270,50 +291,6 @@ class ONNXGraph(Graph): ...@@ -270,50 +291,6 @@ class ONNXGraph(Graph):
} }
return value_info return value_info
def get_results_of_inference(self, model, shape):
try:
import torch
version = torch.__version__
if '1.1.0' not in version:
print("your model have dynamic graph, torch==1.1.0 is required")
return
except:
print(
"your model have dynamic graph, we use caff2 to inference graph, please use \"pip install torch==1.1.0\"."
)
return
from x2paddle.decoder.onnx_backend import prepare
np_images = np.random.rand(shape[0], shape[1], shape[2],
shape[3]).astype('float32')
outputs = []
for node in model.graph.node:
value_info = helper.make_tensor_value_info(node.name,
TensorProto.UNDEFINED,
[])
outputs.append(value_info)
while len(outputs) > 0:
tmp_outputs = outputs[:254]
model.graph.ClearField('output')
model.graph.output.MergeFrom(tmp_outputs)
prepared_backend = prepare(model,
device='CPU',
no_check_UNSAFE=True)
res = prepared_backend.run(inputs=np_images)
for idx, info in enumerate(tmp_outputs):
self.results_of_inference[info.name] = res[idx]
outputs = outputs[254:]
return
def get_dynamic_shape(self, layer):
"""
get dynamic shape from caffe2.backend
"""
output = self.results_of_inference[layer]
return output.tolist(), output.dtype, output.shape
class ONNXDecoder(object): class ONNXDecoder(object):
def __init__(self, onnx_model): def __init__(self, onnx_model):
...@@ -334,8 +311,8 @@ class ONNXDecoder(object): ...@@ -334,8 +311,8 @@ class ONNXDecoder(object):
self.standardize_variable_name(model.graph) self.standardize_variable_name(model.graph)
self.model = model self.model = model
graph_def = model.graph graph = model.graph
self.onnx_graph = ONNXGraph(graph_def, model) self.onnx_graph = ONNXGraph(model)
self.onnx_graph.build() self.onnx_graph.build()
def build_value_refs(self, nodes): def build_value_refs(self, nodes):
...@@ -476,7 +453,7 @@ class ONNXDecoder(object): ...@@ -476,7 +453,7 @@ class ONNXDecoder(object):
if name == '': if name == '':
raise ValueError('name should not be empty') raise ValueError('name should not be empty')
for s in ' .*?\\/-:': # for s in ' .*?\\/-:':
name = name.replace(s, '_') name = name.replace(s, '_')
return '_' + name return '_' + name
...@@ -499,46 +476,3 @@ class ONNXDecoder(object): ...@@ -499,46 +476,3 @@ class ONNXDecoder(object):
node.input[i] = self.make_variable_name(node.input[i]) node.input[i] = self.make_variable_name(node.input[i])
for i in range(len(node.output)): for i in range(len(node.output)):
node.output[i] = self.make_variable_name(node.output[i]) node.output[i] = self.make_variable_name(node.output[i])
def split_model(self, model, outputs=None):
"""
Takes a model and changes its outputs.
"""
if outputs is None:
raise RuntimeError("outputs is None")
if outputs == model.graph.output[0].name:
return model
nodes = model.graph.node
keep_nodes = []
# all the nodes we need to keep.
for node in nodes:
if outputs in node.output:
keep_nodes.append(node)
break
keep_nodes.append(node)
infer_shapes = onnx.shape_inference.infer_shapes(model)
var_out = []
for value_info in infer_shapes.graph.value_info:
if value_info.name == outputs:
var_out.append(value_info)
break
graph = helper.make_graph(keep_nodes, model.graph.name,
model.graph.input, var_out,
model.graph.initializer)
onnx_model = helper.make_model(graph)
onnx_model.ir_version = model.ir_version
onnx_model.producer_name = model.producer_name
onnx_model.producer_version = model.producer_version
onnx_model.domain = model.domain
onnx_model.model_version = model.model_version
onnx_model.doc_string = model.doc_string
if len(onnx_model.graph.input) != len(model.graph.input):
raise RuntimeError("Input mismatch {} != {}".format(
len(onnx_model.input), len(model.input)))
return onnx_model
import os
import sys
import numpy as np
import onnx
import json
import argparse
from six import text_type as _text_type
def arg_parser():
parser = argparse.ArgumentParser()
parser.add_argument("--save_dir",
"-s",
type=_text_type,
default=None,
help="define save_dir")
return parser
def main():
try:
import onnxruntime as rt
version = rt.__version__
if version != '0.4.0':
print("onnxruntime==0.4.0 is required")
return
except:
print(
"onnxruntime is not installed, use \"pip install onnxruntime==0.4.0\"."
)
return
parser = arg_parser()
args = parser.parse_args()
save_dir = args.save_dir
model_dir = os.path.join(save_dir, 'onnx_model_infer.onnx')
data_dir = os.path.join(save_dir, 'input_data.npy')
model = onnx.load(model_dir)
sess = rt.InferenceSession(model_dir)
inputs = np.load(data_dir, allow_pickle=True)
data_dir
inputs_dict = {}
for i, ipt in enumerate(inputs):
inputs_dict[sess.get_inputs()[i].name] = ipt
res = sess.run(None, input_feed=inputs_dict)
for idx, value_info in enumerate(model.graph.output):
np.save(os.path.join(save_dir, value_info.name), res[idx])
if __name__ == "__main__":
main()
...@@ -954,6 +954,13 @@ class CaffeOpMapper(OpMapper): ...@@ -954,6 +954,13 @@ class CaffeOpMapper(OpMapper):
inputs_node = [] inputs_node = []
for i in range(len(node.inputs)): for i in range(len(node.inputs)):
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_bottom_node(node, idx=i, copy=True)
if i == 1 and op == 'DetectionOutput':
input = self.graph.get_bottom_node(node, idx=i, copy=True)
print(input.layer_type)
while input is not None and input.layer_type != 'Softmax':
input = self.graph.get_bottom_node(input, idx=0, copy=True)
assert input is not None, 'This kind of DetectionOutput is not supported!'
input = self.graph.get_bottom_node(input, idx=0, copy=True)
inputs_node.append(input) inputs_node.append(input)
node.fluid_code.add_layer(func.__code__.co_name, node.fluid_code.add_layer(func.__code__.co_name,
inputs=inputs_node, inputs=inputs_node,
......
...@@ -22,8 +22,9 @@ def InstanceNormalization_shape(input_shape): ...@@ -22,8 +22,9 @@ def InstanceNormalization_shape(input_shape):
def InstanceNormalization_layer(inputs, name=None): def InstanceNormalization_layer(inputs, name=None):
# TODO(lvmengsi@baidu.com): Check the accuracy when using fluid.layers.layer_norm. # TODO(lvmengsi@baidu.com): Check the accuracy when using fluid.layers.layer_norm.
epsilon = 1e-5 epsilon = 1e-5
mean = fluid.layers.reduce_mean(inputs, dim=[2, 3], keep_dim=True) input_ = inputs[0]
var = fluid.layers.reduce_mean(fluid.layers.square(inputs - mean), mean = fluid.layers.reduce_mean(input_, dim=[2, 3], keep_dim=True)
var = fluid.layers.reduce_mean(fluid.layers.square(input_ - mean),
dim=[2, 3], dim=[2, 3],
keep_dim=True) keep_dim=True)
if name is not None: if name is not None:
...@@ -36,13 +37,13 @@ def InstanceNormalization_layer(inputs, name=None): ...@@ -36,13 +37,13 @@ def InstanceNormalization_layer(inputs, name=None):
initializer=fluid.initializer.Constant(0.0), initializer=fluid.initializer.Constant(0.0),
trainable=True) trainable=True)
scale = fluid.layers.create_parameter(attr=scale_param, scale = fluid.layers.create_parameter(attr=scale_param,
shape=inputs.shape[1:2], shape=input_.shape[1:2],
dtype="float32") dtype="float32")
offset = fluid.layers.create_parameter(attr=offset_param, offset = fluid.layers.create_parameter(attr=offset_param,
shape=inputs.shape[1:2], shape=input_.shape[1:2],
dtype="float32") dtype="float32")
tmp = fluid.layers.elementwise_mul(x=(inputs - mean), y=scale, axis=1) tmp = fluid.layers.elementwise_mul(x=(input_ - mean), y=scale, axis=1)
tmp = tmp / fluid.layers.sqrt(var + epsilon) tmp = tmp / fluid.layers.sqrt(var + epsilon)
tmp = fluid.layers.elementwise_add(tmp, offset, axis=1) tmp = fluid.layers.elementwise_add(tmp, offset, axis=1)
return tmp return tmp
...@@ -56,4 +57,5 @@ def InstanceNormalization_weights(name, data=None): ...@@ -56,4 +57,5 @@ def InstanceNormalization_weights(name, data=None):
register(kind='InstanceNormalization', register(kind='InstanceNormalization',
shape=InstanceNormalization_shape, shape=InstanceNormalization_shape,
layer=InstanceNormalization_layer, layer=InstanceNormalization_layer,
child_func=None,
weights=InstanceNormalization_weights) weights=InstanceNormalization_weights)
...@@ -95,6 +95,17 @@ def make_custom_layer(node): ...@@ -95,6 +95,17 @@ def make_custom_layer(node):
return inspect.getsource(layer_func), layer_func return inspect.getsource(layer_func), layer_func
def make_custom_child_func(node):
""" get the code which implement the custom layer function
"""
layer_type = node.layer_type
child_func = custom_layers[layer_type]['child_func']
if child_func is None:
return None, child_func
import inspect
return inspect.getsource(child_func), child_func
def deal_weights(node, data=None): def deal_weights(node, data=None):
""" deal the weights of the custom layer """ deal the weights of the custom layer
""" """
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
g_custom_layers = {} g_custom_layers = {}
def register(kind, shape, layer, weights): def register(kind, shape, layer, child_func, weights):
""" register a custom layer or a list of custom layers """ register a custom layer or a list of custom layers
Args: Args:
...@@ -48,6 +48,7 @@ def register(kind, shape, layer, weights): ...@@ -48,6 +48,7 @@ def register(kind, shape, layer, weights):
g_custom_layers[k] = { g_custom_layers[k] = {
'shape': shape, 'shape': shape,
'layer': layer, 'layer': layer,
'child_func': child_func,
'weights': weights 'weights': weights
} }
......
...@@ -29,9 +29,6 @@ default_op_mapping = { ...@@ -29,9 +29,6 @@ default_op_mapping = {
'Gather': ['gather', ['X'], ['Out'], 'Gather': ['gather', ['X'], ['Out'],
dict(axis='')], dict(axis='')],
'Shape': ['shape', ['X'], ['Out']], 'Shape': ['shape', ['X'], ['Out']],
'Mul': ['elementwise_mul', ['X', 'Y'], ['Out'],
dict(),
dict(axis=-1)],
'Clip': [ 'Clip': [
'clip', ['X'], ['Out'], 'clip', ['X'], ['Out'],
dict(), dict(),
...@@ -42,6 +39,7 @@ default_op_mapping = { ...@@ -42,6 +39,7 @@ default_op_mapping = {
dtype=_np.uint8).view(_np.float32)), dtype=_np.uint8).view(_np.float32)),
) )
], ],
'Ceil': ['ceil', ['X'], ['Out']],
'ReduceMean': [ 'ReduceMean': [
'reduce_mean', ['X'], ['Out'], 'reduce_mean', ['X'], ['Out'],
dict(axes='dim', keepdims='keep_dim'), dict(axes='dim', keepdims='keep_dim'),
...@@ -52,7 +50,11 @@ default_op_mapping = { ...@@ -52,7 +50,11 @@ default_op_mapping = {
dict(axes='dim', keepdims='keep_dim'), dict(axes='dim', keepdims='keep_dim'),
dict(keep_dim=1) dict(keep_dim=1)
], ],
'ReduceMin': [
'reduce_min', ['X'], ['Out'],
dict(axes='dim', keepdims='keep_dim'),
dict(keep_dim=1)
],
#active function #active function
'Relu': ['relu', ['X'], ['Out']], 'Relu': ['relu', ['X'], ['Out']],
'LeakyRelu': ['leaky_relu', ['X'], ['Out'], 'LeakyRelu': ['leaky_relu', ['X'], ['Out'],
...@@ -66,9 +68,6 @@ default_op_mapping = { ...@@ -66,9 +68,6 @@ default_op_mapping = {
], ],
'Tanh': ['tanh', ['X'], ['Out']], 'Tanh': ['tanh', ['X'], ['Out']],
'Sigmoid': ['sigmoid', ['X'], ['Out']], 'Sigmoid': ['sigmoid', ['X'], ['Out']],
'Pow': ['elementwise_pow', ['X', 'Y'], ['Out'],
dict(),
dict(axis=-1)], # TODO: pow for scalar exponent
'HardSigmoid': [ 'HardSigmoid': [
'hard_sigmoid', ['X'], ['Out'], 'hard_sigmoid', ['X'], ['Out'],
dict(alpha='slope', beta='offset'), dict(alpha='slope', beta='offset'),
...@@ -78,8 +77,8 @@ default_op_mapping = { ...@@ -78,8 +77,8 @@ default_op_mapping = {
'Softplus': ['softplus', ['X'], ['Out']], 'Softplus': ['softplus', ['X'], ['Out']],
'Exp': ['exp', ['X'], ['Out']], 'Exp': ['exp', ['X'], ['Out']],
'Softmax': ['softmax', ['X'], ['Out'], 'Softmax': ['softmax', ['X'], ['Out'],
dict(axis=''), dict(), dict(axis=1)],
dict(axis=1)], 'Sqrt': ['sqrt', ['X'], ['Out']],
} }
activefunc_op_mapping = { activefunc_op_mapping = {
......
...@@ -23,16 +23,21 @@ from x2paddle.op_mapper.onnx_directly_map import default_ioa_constraint ...@@ -23,16 +23,21 @@ from x2paddle.op_mapper.onnx_directly_map import default_ioa_constraint
from x2paddle.op_mapper.onnx_custom_layer import * from x2paddle.op_mapper.onnx_custom_layer import *
from x2paddle.core.util import string from x2paddle.core.util import string
import numpy as np import numpy as np
import onnx
import onnx.numpy_helper as numpy_helper import onnx.numpy_helper as numpy_helper
from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE
import logging as _logging import logging as _logging
from collections import OrderedDict as _dict from collections import OrderedDict as _dict
import math
import os
import shutil
_logger = _logging.getLogger(__name__) _logger = _logging.getLogger(__name__)
def _const_weight_or_none(node): def _const_weight_or_none(node):
if 'Constant' in node.layer_name: if 'Constant' in node.layer_name:
return val.value return node.value
if isinstance(node, ONNXGraphDataNode): if isinstance(node, ONNXGraphDataNode):
return node.weight return node.weight
return None return None
...@@ -47,7 +52,7 @@ def get_same_padding(in_size, kernel_size, stride): ...@@ -47,7 +52,7 @@ def get_same_padding(in_size, kernel_size, stride):
class ONNXOpMapper(OpMapper): class ONNXOpMapper(OpMapper):
def __init__(self, decoder): def __init__(self, decoder, save_dir):
super(ONNXOpMapper, self).__init__() super(ONNXOpMapper, self).__init__()
self.decoder = decoder self.decoder = decoder
self.graph = decoder.onnx_graph self.graph = decoder.onnx_graph
...@@ -55,6 +60,9 @@ class ONNXOpMapper(OpMapper): ...@@ -55,6 +60,9 @@ class ONNXOpMapper(OpMapper):
self.weights = dict() self.weights = dict()
self.omit_nodes = list() self.omit_nodes = list()
self.used_custom_layers = dict() self.used_custom_layers = dict()
self.is_inference = False
self.tmp_data_dir = os.path.join(save_dir, 'tmp_data')
self.get_output_shapes()
if not self.op_checker(): if not self.op_checker():
raise Exception("Model are not supported yet.") raise Exception("Model are not supported yet.")
...@@ -76,6 +84,8 @@ class ONNXOpMapper(OpMapper): ...@@ -76,6 +84,8 @@ class ONNXOpMapper(OpMapper):
elif op in custom_layers: elif op in custom_layers:
self.deal_custom_layer(node) self.deal_custom_layer(node)
self.remove_tmp_data()
def op_checker(self): def op_checker(self):
unsupported_ops = set() unsupported_ops = set()
for node_name in self.graph.topo_sort: for node_name in self.graph.topo_sort:
...@@ -94,12 +104,85 @@ class ONNXOpMapper(OpMapper): ...@@ -94,12 +104,85 @@ class ONNXOpMapper(OpMapper):
print(op) print(op)
return False return False
def get_results_of_inference(self, model, value_infos, data_nodes):
inputs = []
for data_node in data_nodes:
value_info = value_infos[data_node]
ipt = np.random.random(value_info['shape']).astype(
value_info['dtype'])
inputs.append(ipt)
model = onnx.shape_inference.infer_shapes(model)
outputs = []
for value_info in model.graph.value_info:
outputs.append(value_info)
model.graph.ClearField('output')
model.graph.output.MergeFrom(outputs)
if not os.path.exists(self.tmp_data_dir):
os.makedirs(self.tmp_data_dir)
onnx.save(model, os.path.join(self.tmp_data_dir,
'onnx_model_infer.onnx'))
np.save(os.path.join(self.tmp_data_dir, 'input_data.npy'), inputs)
os.system('onnx_infer --save_dir=' + self.tmp_data_dir)
return
def get_dynamic_shape(self, layer):
"""
get dynamic shape from infer_result
"""
output = np.load(os.path.join(self.tmp_data_dir, layer + '.npy'))
return output.tolist(), output.dtype, output.shape
def get_output_shapes(self):
"""
build topo_sort of ONNX model
"""
nodes = self.decoder.model.graph.node
node_map = self.decoder.onnx_graph.node_map
value_infos = self.decoder.onnx_graph.value_infos
onnx_model = self.decoder.model
for layer in nodes:
node = node_map[layer.name]
for opt in layer.output:
if opt in value_infos:
value_info = value_infos[opt]
if len(value_info['shape']
) == 0 or value_info['dtype'] is None:
if self.is_inference == False:
self.get_results_of_inference(
onnx_model, value_infos,
self.decoder.onnx_graph.place_holder_nodes)
self.is_inference = True
_, dtype, shape = self.get_dynamic_shape(opt)
node.out_shapes.append(shape)
node.dtype = dtype
else:
node.dtype = value_info['dtype']
node.out_shapes.append(value_info['shape'])
else:
if self.is_inference == False:
self.get_results_of_inference(
onnx_model, value_infos,
self.decoder.onnx_graph.place_holder_nodes)
self.is_inference = True
_, dtype, shape = self.get_dynamic_shape(opt)
node.dtype = dtype
node.out_shapes.append(shape)
def remove_tmp_data(self):
"""
remove temporarily generated file
"""
if os.path.exists(self.tmp_data_dir):
import shutil
shutil.rmtree(self.tmp_data_dir)
def directly_map(self, node, name='', *args, **kwargs): def directly_map(self, node, name='', *args, **kwargs):
inputs = node.layer.input inputs = node.layer.input
outputs = node.layer.output outputs = node.layer.output
op_type = node.layer_type op_type = node.layer_type
attrs = node.attr_map attrs = node.attr_map
info = default_op_mapping[op_type] info = default_op_mapping[op_type]
info.extend(list(default_op_mapping_field_values.values())[len(info):]) info.extend(list(default_op_mapping_field_values.values())[len(info):])
( (
...@@ -127,34 +210,41 @@ class ONNXOpMapper(OpMapper): ...@@ -127,34 +210,41 @@ class ONNXOpMapper(OpMapper):
mapped_attrs.pop('_') mapped_attrs.pop('_')
fluid_attrs = default_attrs.copy() fluid_attrs = default_attrs.copy()
fluid_attrs.update(mapped_attrs) fluid_attrs.update(mapped_attrs)
val_inps = inputs if input_perm is None else list( inputs = inputs if input_perm is None else list(
map(lambda i: inputs[i], input_perm)) map(lambda i: inputs[i], input_perm))
val_inps = []
for idx, ipt in enumerate(inputs):
val_inps.append(self.graph.get_input_node(node, idx=idx, copy=True))
val_outs = outputs if output_perm is None else list( val_outs = outputs if output_perm is None else list(
map(lambda i: outputs[i], output_perm)) map(lambda i: outputs[i], output_perm))
attr = fluid_attrs attr = fluid_attrs
if fluid_op not in ['shape', 'gather']: assert len(val_inps) == 1, 'directly_map error with multi inputs'
if fluid_op not in ['shape']:
attr['name'] = string(node.layer_name) attr['name'] = string(node.layer_name)
node.fluid_code.add_layer(fluid_op, node.fluid_code.add_layer(fluid_op,
inputs=', '.join(val_inps), inputs=val_inps[0],
output=val_outs[0], output=val_outs[0],
param_attr=attr) param_attr=attr)
def deal_custom_layer(self, node): def deal_custom_layer(self, node):
op = node.layer_type op = node.layer_type
val_x = self.graph.get_node(node.layer.input[0], copy=True)
custom_code, func = make_custom_layer(node) custom_code, func = make_custom_layer(node)
child_func_code, child_func = make_custom_child_func(node)
params = get_params(node.layer, node.layer_type) params = get_params(node.layer, node.layer_type)
arg_names, kwargs = set_args(func, params) arg_names, kwargs = set_args(func, params)
kwargs['name'] = string(node.layer_name) kwargs['name'] = string(node.layer_name)
inputs_node = []
inputs_node.append(node.inputs[0])
node.fluid_code.add_layer(func.__code__.co_name, node.fluid_code.add_layer(func.__code__.co_name,
inputs=inputs_node[0], inputs=node.inputs,
output=node, output=node,
param_attr=kwargs, param_attr=kwargs,
is_custom_layer=True) is_custom_layer=True)
if op not in self.used_custom_layers: if op not in self.used_custom_layers:
self.used_custom_layers[op] = custom_code self.used_custom_layers[op] = custom_code
if op + '_child_func' not in self.used_custom_layers:
if child_func_code is not None:
self.used_custom_layers[op +
'_child_func'] = child_func_code
def place_holder(self, node): def place_holder(self, node):
self.input_shapes.append(node.out_shapes[0]) self.input_shapes.append(node.out_shapes[0])
...@@ -203,8 +293,8 @@ class ONNXOpMapper(OpMapper): ...@@ -203,8 +293,8 @@ class ONNXOpMapper(OpMapper):
return [0] * ndims, val_padded return [0] * ndims, val_padded
def _interpolate(self, node): def _interpolate(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_scales = self.graph.get_node(node.layer.input[1], copy=True) val_scales = self.graph.get_input_node(node, idx=1, copy=True)
val_y = self.graph.get_node(node.layer.output[0], copy=True) val_y = self.graph.get_node(node.layer.output[0], copy=True)
out_shape_ = val_y.out_shapes[0] out_shape_ = val_y.out_shapes[0]
...@@ -245,7 +335,7 @@ class ONNXOpMapper(OpMapper): ...@@ -245,7 +335,7 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Pad(self, node, op_independent=True): def Pad(self, node, op_independent=True):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
pads = node.get_attr('pads') pads = node.get_attr('pads')
mode = node.get_attr('mode', 'constant') mode = node.get_attr('mode', 'constant')
value = node.get_attr('value', 0.) value = node.get_attr('value', 0.)
...@@ -292,7 +382,7 @@ class ONNXOpMapper(OpMapper): ...@@ -292,7 +382,7 @@ class ONNXOpMapper(OpMapper):
return node.layer_name + '_paded' return node.layer_name + '_paded'
def Unsqueeze(self, node): def Unsqueeze(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
axes = node.get_attr('axes') axes = node.get_attr('axes')
attr = {'axes': axes, 'name': string(node.layer_name)} attr = {'axes': axes, 'name': string(node.layer_name)}
node.fluid_code.add_layer('unsqueeze', node.fluid_code.add_layer('unsqueeze',
...@@ -301,7 +391,7 @@ class ONNXOpMapper(OpMapper): ...@@ -301,7 +391,7 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Shrink(self, node): def Shrink(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
bias = node.get_attr('bias') bias = node.get_attr('bias')
lambd = node.get_attr('lambd') lambd = node.get_attr('lambd')
assert bias == 0.0, 'not support bias!=0' assert bias == 0.0, 'not support bias!=0'
...@@ -358,8 +448,8 @@ class ONNXOpMapper(OpMapper): ...@@ -358,8 +448,8 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Resize(self, node): def Resize(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_scales = self.graph.get_node(node.layer.input[1], copy=True) val_scales = self.graph.get_input_node(node, idx=1, copy=True)
val_y = self.graph.get_node(node.layer.output[0], copy=True) val_y = self.graph.get_node(node.layer.output[0], copy=True)
out_shape_ = val_y.out_shapes[0] out_shape_ = val_y.out_shapes[0]
...@@ -401,24 +491,76 @@ class ONNXOpMapper(OpMapper): ...@@ -401,24 +491,76 @@ class ONNXOpMapper(OpMapper):
def Upsample(self, node): def Upsample(self, node):
self._interpolate(node) self._interpolate(node)
def Slice(self, node): def Gather(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_node(node.layer.output[0], copy=True) indices = self.graph.get_input_node(node, idx=1, copy=True)
indices_shape = indices.out_shapes[0]
axis = node.get_attr('axis')
assert len(
indices_shape) <= 1, "Gather op don't support dim of indice >1 "
if axis == 0 and len(indices_shape) <= 1:
node.fluid_code.add_layer('gather',
inputs={
'input': val_x,
'index': indices
},
output=node,
param_attr=None)
elif axis > 0 and len(indices_shape) <= 1:
perm = list(range(len(val_x.out_shapes[0])))
perm = [axis] + perm[:axis] + perm[axis + 1:]
attr_trans = {'perm': perm}
name_trans = val_x.layer_name + '_trans'
node.fluid_code.add_layer('transpose',
inputs=val_x,
output=name_trans,
param_attr=attr_trans)
node.fluid_code.add_layer('gather',
inputs={
'input': name_trans,
'index': indices
},
output=node,
param_attr=None)
node.fluid_code.add_layer('transpose',
inputs=node,
output=node,
param_attr=attr_trans)
axes = node.get_attr('axes') def Slice(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_starts, val_ends, val_axes, val_steps = None, None, None, None
if len(node.inputs) > 1:
starts = self.graph.get_input_node(node, idx=1, copy=True)
ends = self.graph.get_input_node(node, idx=2, copy=True)
axes = self.graph.get_input_node(node, idx=3, copy=True)
steps = self.graph.get_input_node(node, idx=4, copy=True)
self.omit_nodes.append(starts.layer_name)
self.omit_nodes.append(ends.layer_name)
self.omit_nodes.append(axes.layer_name)
self.omit_nodes.append(steps.layer_name)
starts = _const_weight_or_none(starts).copy()
ends = _const_weight_or_none(ends).copy()
axes = _const_weight_or_none(axes)
steps = _const_weight_or_none(steps)
else:
starts = node.get_attr('starts') starts = node.get_attr('starts')
ends = node.get_attr('ends') ends = node.get_attr('ends')
axes = node.get_attr('axes')
val_y = self.graph.get_node(node.layer.output[0], copy=True)
shape = val_x.out_shapes[0] shape = val_x.out_shapes[0]
if shape is not None: if shape is not None:
for idx, value in enumerate(starts): for idx, value in enumerate(starts):
if value > 2**63 - 1 // 2: if value > shape[axes[idx]]:
value = value - ONNX_INT_MAX starts[idx] = shape[axes[idx]]
starts[idx] = shape[axes[idx]] + value
for idx, value in enumerate(ends): for idx, value in enumerate(ends):
if value > 2**63 - 1 // 2: if value > shape[axes[idx]]:
value = value - ONNX_INT_MAX ends[idx] = shape[axes[idx]]
ends[idx] = shape[axes[idx]] + value
attr = {"axes": axes, "starts": starts, "ends": ends} attr = {"axes": axes, "starts": starts, "ends": ends}
node.fluid_code.add_layer('slice', node.fluid_code.add_layer('slice',
inputs=val_x, inputs=val_x,
...@@ -426,7 +568,7 @@ class ONNXOpMapper(OpMapper): ...@@ -426,7 +568,7 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def ConstantOfShape(self, node): def ConstantOfShape(self, node):
val_shape = self.graph.get_node(node.layer.input[0], copy=True) val_shape = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_node(node.layer.output[0], copy=True) val_y = self.graph.get_node(node.layer.output[0], copy=True)
shape = _const_weight_or_none(val_shape) shape = _const_weight_or_none(val_shape)
...@@ -452,31 +594,36 @@ class ONNXOpMapper(OpMapper): ...@@ -452,31 +594,36 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Split(self, node): def Split(self, node):
val_input = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
var_outs = [val for val in node.layer.input] val_y = self.graph.get_node(node.layer.output[0], copy=True)
fluid_op = 'split' fluid_op = 'split'
split = node.get_attr['split'] split = node.get_attr('split')
axis = node.get_attr('axis', 0) axis = node.get_attr('axis', 0)
attr = {'split': split, 'axis': axis, 'name': string(node.layer_name)} attr = {
'num_or_sections': split,
'dim': axis,
'name': string(node.layer_name)
}
# generation # generation
node.fluid_code.add_layer('split', node.fluid_code.add_layer('split',
inputs=val_input, inputs=val_x,
output=var_outs, output=val_y,
param_attr=attr) param_attr=attr)
def Reshape(self, node): def Reshape(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_shape = self.graph.get_node(node.layer.input[1], copy=True) val_shape = self.graph.get_input_node(node, idx=1, copy=True)
val_reshaped = self.graph.get_node(node.layer.output[0], copy=True) val_reshaped = self.graph.get_node(node.layer.output[0], copy=True)
shape = None shape = None
if isinstance(val_shape, ONNXGraphDataNode): if isinstance(val_shape, ONNXGraphDataNode):
self.omit_nodes.append(val_shape.layer_name) self.omit_nodes.append(val_shape.layer_name)
# catch dynamic graph shape # catch dynamic graph shape
if isinstance(val_shape, ONNXGraphNode): if isinstance(val_shape, ONNXGraphNode):
shape, _, _ = self.decoder.onnx_graph.get_dynamic_shape( shape, _, _ = self.get_dynamic_shape(val_shape.layer_name)
val_shape.layer_name)
if shape is None: if shape is None:
shape = val_reshaped.out_shapes[0] shape = val_reshaped.out_shapes[0]
...@@ -503,7 +650,7 @@ class ONNXOpMapper(OpMapper): ...@@ -503,7 +650,7 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Cast(self, node): def Cast(self, node):
val_input = self.graph.get_node(node.layer.input[0], copy=True) val_input = self.graph.get_input_node(node, idx=0, copy=True)
val_output = self.graph.get_node(node.layer.output[0], copy=True) val_output = self.graph.get_node(node.layer.output[0], copy=True)
dtype = node.get_attr('to') dtype = node.get_attr('to')
...@@ -520,7 +667,7 @@ class ONNXOpMapper(OpMapper): ...@@ -520,7 +667,7 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def AveragePool(self, node): def AveragePool(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
auto_pad = node.get_attr('auto_pad', 'NOTSET') auto_pad = node.get_attr('auto_pad', 'NOTSET')
kernel_shape = node.get_attr("kernel_shape") kernel_shape = node.get_attr("kernel_shape")
...@@ -532,10 +679,10 @@ class ONNXOpMapper(OpMapper): ...@@ -532,10 +679,10 @@ class ONNXOpMapper(OpMapper):
fluid_op = 'pool{}d'.format(poolnd) fluid_op = 'pool{}d'.format(poolnd)
assert 2 <= poolnd <= 3, 'only pool2d and pool3d is supported' assert 2 <= poolnd <= 3, 'only pool2d and pool3d is supported'
input_shape = val_x.out_shapes[0]
paddings, val_x = self._pad_if_asymmetric(node, pads, val_x) paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)
if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER": if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
input_shape = val_x.out_shapes[0]
pad_h = get_same_padding(input_shape[2], kernel_shape[0], pad_h = get_same_padding(input_shape[2], kernel_shape[0],
strides[0]) strides[0])
pad_w = get_same_padding(input_shape[3], kernel_shape[1], pad_w = get_same_padding(input_shape[3], kernel_shape[1],
...@@ -560,7 +707,7 @@ class ONNXOpMapper(OpMapper): ...@@ -560,7 +707,7 @@ class ONNXOpMapper(OpMapper):
def Concat(self, node): def Concat(self, node):
inputs = [] inputs = []
for i in range(len(node.layer.input)): for i in range(len(node.layer.input)):
ipt = self.graph.get_node(node.layer.input[i], copy=True) ipt = self.graph.get_input_node(node, idx=i, copy=True)
if isinstance(ipt, str): if isinstance(ipt, str):
inputs.append(ipt) inputs.append(ipt)
else: else:
...@@ -568,12 +715,12 @@ class ONNXOpMapper(OpMapper): ...@@ -568,12 +715,12 @@ class ONNXOpMapper(OpMapper):
axis = node.get_attr('axis') axis = node.get_attr('axis')
attr = {'axis': axis} attr = {'axis': axis}
node.fluid_code.add_layer('concat', node.fluid_code.add_layer('concat',
inputs='[' + ', '.join(inputs) + ']', inputs=inputs,
output=node, output=node,
param_attr=attr) param_attr=attr)
def Flatten(self, node): def Flatten(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
axis = node.get_attr('axis', 1) axis = node.get_attr('axis', 1)
attr = {"axis": str(axis), "name": string(node.layer_name)} attr = {"axis": str(axis), "name": string(node.layer_name)}
node.fluid_code.add_layer('flatten', node.fluid_code.add_layer('flatten',
...@@ -582,9 +729,9 @@ class ONNXOpMapper(OpMapper): ...@@ -582,9 +729,9 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Gemm(self, node): def Gemm(self, node):
val_a = self.graph.get_node(node.layer.input[0], copy=True) val_a = self.graph.get_input_node(node, idx=0, copy=True)
val_b = self.graph.get_node(node.layer.input[1], copy=True) val_b = self.graph.get_input_node(node, idx=1, copy=True)
val_c = self.graph.get_node(node.layer.input[2], copy=True) val_c = self.graph.get_input_node(node, idx=2, copy=True)
alpha = node.get_attr('alpha', 1.) # optional alpha = node.get_attr('alpha', 1.) # optional
beta = node.get_attr('beta', 1.) # optional beta = node.get_attr('beta', 1.) # optional
...@@ -627,8 +774,8 @@ class ONNXOpMapper(OpMapper): ...@@ -627,8 +774,8 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Add(self, node): def Add(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_node(node.layer.input[1], copy=True) val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = { inputs = {
"x": val_x, "x": val_x,
"y": val_y, "y": val_y,
...@@ -639,26 +786,53 @@ class ONNXOpMapper(OpMapper): ...@@ -639,26 +786,53 @@ class ONNXOpMapper(OpMapper):
output=node, output=node,
param_attr=attr) param_attr=attr)
def Sub(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = {
"x": val_x,
"y": val_y,
}
attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("elementwise_sub",
inputs=inputs,
output=node,
param_attr=attr)
def Pow(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = {
"x": val_x,
"y": val_y,
}
attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("elementwise_pow",
inputs=inputs,
output=node,
param_attr=attr)
def Sum(self, node): def Sum(self, node):
val_inps = node.layer.input val_inps = node.layer.input
inputs = { inputs = {
"x": val_inps[0], "x": self.graph.get_input_node(node, idx=0, copy=True),
"y": val_inps[1], "y": self.graph.get_input_node(node, idx=1, copy=True),
} }
node.fluid_code.add_layer("elementwise_add", inputs=inputs, output=node) node.fluid_code.add_layer("elementwise_add", inputs=inputs, output=node)
for ipt in val_inps[2:]: for idx, ipt in enumerate(val_inps[2:]):
y = self.graph.get_input_node(node, idx=idx, copy=True)
inputs = { inputs = {
"x": node.layer_name, "x": node.layer_name,
"y": ipt, "y": y,
} }
node.fluid_code.add_layer("elementwise_add", node.fluid_code.add_layer("elementwise_add",
inputs=inputs, inputs=inputs,
output=node) output=node)
def MatMul(self, node): def MatMul(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_node(node.layer.input[1], copy=True) val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs = {"x": val_x, "y": val_y} inputs = {"x": val_x, "y": val_y}
attr = {"name": string(node.layer_name)} attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("matmul", node.fluid_code.add_layer("matmul",
...@@ -667,11 +841,11 @@ class ONNXOpMapper(OpMapper): ...@@ -667,11 +841,11 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def BatchNormalization(self, node): def BatchNormalization(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_scale = self.graph.get_node(node.layer.input[1], copy=True) val_scale = self.graph.get_input_node(node, idx=1, copy=True)
val_b = self.graph.get_node(node.layer.input[2], copy=True) val_b = self.graph.get_input_node(node, idx=2, copy=True)
val_mean = self.graph.get_node(node.layer.input[3], copy=True) val_mean = self.graph.get_input_node(node, idx=3, copy=True)
val_var = self.graph.get_node(node.layer.input[4], copy=True) val_var = self.graph.get_input_node(node, idx=4, copy=True)
self.omit_nodes.append(val_scale.layer_name) self.omit_nodes.append(val_scale.layer_name)
self.omit_nodes.append(val_b.layer_name) self.omit_nodes.append(val_b.layer_name)
...@@ -701,7 +875,7 @@ class ONNXOpMapper(OpMapper): ...@@ -701,7 +875,7 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Transpose(self, node): def Transpose(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
perm = node.get_attr('perm') perm = node.get_attr('perm')
attr = {'perm': perm, "name": string(node.layer_name)} attr = {'perm': perm, "name": string(node.layer_name)}
node.fluid_code.add_layer("transpose", node.fluid_code.add_layer("transpose",
...@@ -710,12 +884,9 @@ class ONNXOpMapper(OpMapper): ...@@ -710,12 +884,9 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Mul(self, node): def Mul(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_node(node.layer.input[1], copy=True) val_y = self.graph.get_input_node(node, idx=1, copy=True)
val_x_shape = val_x.out_shapes[0]
val_y_shape = val_y.out_shapes[0] val_y_shape = val_y.out_shapes[0]
slice_idx = 0 slice_idx = 0
for dim in val_y_shape: for dim in val_y_shape:
if dim == 1: if dim == 1:
...@@ -747,12 +918,9 @@ class ONNXOpMapper(OpMapper): ...@@ -747,12 +918,9 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Div(self, node): def Div(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_node(node.layer.input[1], copy=True) val_y = self.graph.get_input_node(node, idx=1, copy=True)
val_x_shape = val_x.out_shapes[0]
val_y_shape = val_y.out_shapes[0] val_y_shape = val_y.out_shapes[0]
slice_idx = 0 slice_idx = 0
for dim in val_y_shape: for dim in val_y_shape:
if dim == 1: if dim == 1:
...@@ -784,7 +952,7 @@ class ONNXOpMapper(OpMapper): ...@@ -784,7 +952,7 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Relu(self, node): def Relu(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
attr = {"name": string(node.layer_name)} attr = {"name": string(node.layer_name)}
node.fluid_code.add_layer("relu", node.fluid_code.add_layer("relu",
inputs=val_x, inputs=val_x,
...@@ -792,8 +960,8 @@ class ONNXOpMapper(OpMapper): ...@@ -792,8 +960,8 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def PRelu(self, node): def PRelu(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_slope = self.graph.get_node(node.layer.input[1], copy=True) val_slope = self.graph.get_input_node(node, idx=1, copy=True)
mode = 'channel' mode = 'channel'
shape_slope = val_slope.out_shapes[0] shape_slope = val_slope.out_shapes[0]
...@@ -811,20 +979,20 @@ class ONNXOpMapper(OpMapper): ...@@ -811,20 +979,20 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Squeeze(self, node): def Squeeze(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
squeeze_dims = node.get_attr('squeeze_dims') axes = node.get_attr('axes')
attr = {'axes': squeeze_dims, "name": string(node.layer_name)} attr = {'axes': axes, "name": string(node.layer_name)}
node.fluid_code.add_layer("squeeze", node.fluid_code.add_layer("squeeze",
inputs=val_x, inputs=val_x,
output=node, output=node,
param_attr=attr) param_attr=attr)
def Identity(self, node): def Identity(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
node.fluid_code.add_layer("assign", inputs=val_x, output=node) node.fluid_code.add_layer("assign", inputs=val_x, output=node)
def MaxPool(self, node): def MaxPool(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
auto_pad = node.get_attr('auto_pad', 'NOTSET') auto_pad = node.get_attr('auto_pad', 'NOTSET')
assert node.get_attr( assert node.get_attr(
...@@ -839,10 +1007,10 @@ class ONNXOpMapper(OpMapper): ...@@ -839,10 +1007,10 @@ class ONNXOpMapper(OpMapper):
fluid_op = 'pool{}d'.format(poolnd) fluid_op = 'pool{}d'.format(poolnd)
assert 2 <= poolnd <= 3, 'only pool2d and pool3d is supported' assert 2 <= poolnd <= 3, 'only pool2d and pool3d is supported'
input_shape = val_x.out_shapes[0]
paddings, val_x = self._pad_if_asymmetric(node, pads, val_x) paddings, val_x = self._pad_if_asymmetric(node, pads, val_x)
if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER": if auto_pad == "SAME_UPPER" or auto_pad == "SAME_LOWER":
input_shape = val_x.out_shapes[0]
pad_h = get_same_padding(input_shape[2], kernel_shape[0], pad_h = get_same_padding(input_shape[2], kernel_shape[0],
strides[0]) strides[0])
pad_w = get_same_padding(input_shape[3], kernel_shape[1], pad_w = get_same_padding(input_shape[3], kernel_shape[1],
...@@ -864,7 +1032,7 @@ class ONNXOpMapper(OpMapper): ...@@ -864,7 +1032,7 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def GlobalAveragePool(self, node): def GlobalAveragePool(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_node(node.layer.output[0], copy=True) val_y = self.graph.get_node(node.layer.output[0], copy=True)
input_shape = val_x.out_shapes[0] input_shape = val_x.out_shapes[0]
output_shape = val_y.out_shapes[0] output_shape = val_y.out_shapes[0]
...@@ -886,21 +1054,19 @@ class ONNXOpMapper(OpMapper): ...@@ -886,21 +1054,19 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def Conv(self, node): def Conv(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_w = self.graph.get_node(node.layer.input[1], copy=True) val_w = self.graph.get_input_node(node, idx=1, copy=True)
val_y = self.graph.get_node(node.layer.output[0], copy=True) val_y = self.graph.get_node(node.layer.output[0], copy=True)
self.omit_nodes.append(val_w.layer_name) self.omit_nodes.append(val_w.layer_name)
has_bias = len(node.layer.input) == 3 has_bias = len(node.layer.input) == 3
if has_bias: if has_bias:
val_b = self.graph.get_node(node.layer.input[2], copy=True) val_b = self.graph.get_input_node(node, idx=2, copy=True)
self.omit_nodes.append(val_b.layer_name) self.omit_nodes.append(val_b.layer_name)
auto_pad = node.get_attr('auto_pad', 'NOTSET') auto_pad = node.get_attr('auto_pad', 'NOTSET')
kernel_shape = val_w.out_shapes[0][2:] # OI... kernel_shape = node.get_attr('kernel_shape')
assert kernel_shape == node.get_attr(
'kernel_shape'), 'kernel_shape in attr unmatches value_info' # HW
convnd = len(kernel_shape) convnd = len(kernel_shape)
assert 2 <= convnd <= 3, 'only conv2d and conv3d is supported' assert 2 <= convnd <= 3, 'only conv2d and conv3d is supported'
num_out_channels = val_w.out_shapes[0][0] # OI... num_out_channels = val_w.out_shapes[0][0] # OI...
...@@ -941,9 +1107,9 @@ class ONNXOpMapper(OpMapper): ...@@ -941,9 +1107,9 @@ class ONNXOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def ConvTranspose(self, node): def ConvTranspose(self, node):
val_x = self.graph.get_node(node.layer.input[0], copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_w = self.graph.get_node(node.layer.input[1], copy=True) val_w = self.graph.get_input_node(node, idx=1, copy=True)
val_b = self.graph.get_node(node.layer.input[2], copy=True) val_b = self.graph.get_input_node(node, idx=2, copy=True)
self.omit_nodes.append(val_w.layer_name) self.omit_nodes.append(val_w.layer_name)
self.omit_nodes.append(val_b.layer_name) self.omit_nodes.append(val_b.layer_name)
...@@ -952,7 +1118,7 @@ class ONNXOpMapper(OpMapper): ...@@ -952,7 +1118,7 @@ class ONNXOpMapper(OpMapper):
auto_pad = node.get_attr('auto_pad', 'NOTSET') auto_pad = node.get_attr('auto_pad', 'NOTSET')
out_padding = node.get_attr('output_padding', [0, 0]) out_padding = node.get_attr('output_padding', [0, 0])
kernel_shape = node.get_attr('kernel_shape', val_w.out_shapes[0][2:]) kernel_shape = node.get_attr('kernel_shape')
assert kernel_shape, 'kernel_shape not inferred' assert kernel_shape, 'kernel_shape not inferred'
convnd = len(kernel_shape) convnd = len(kernel_shape)
assert 2 <= convnd <= 3, 'only conv2d_transpose and conv3d_transpose supported' assert 2 <= convnd <= 3, 'only conv2d_transpose and conv3d_transpose supported'
......
...@@ -785,6 +785,9 @@ class TFOpMapper(OpMapper): ...@@ -785,6 +785,9 @@ class TFOpMapper(OpMapper):
start = self.graph.get_node(node.layer.input[0], copy=True) start = self.graph.get_node(node.layer.input[0], copy=True)
limit = self.graph.get_node(node.layer.input[1], copy=True) limit = self.graph.get_node(node.layer.input[1], copy=True)
delta = self.graph.get_node(node.layer.input[2], copy=True) delta = self.graph.get_node(node.layer.input[2], copy=True)
self.add_omit_nodes(start.layer_name, node.layer_name)
self.add_omit_nodes(limit.layer_name, node.layer_name)
self.add_omit_nodes(delta.layer_name, node.layer_name)
if start.layer_type == "Const": if start.layer_type == "Const":
start = start.value start = start.value
else: else:
...@@ -797,9 +800,6 @@ class TFOpMapper(OpMapper): ...@@ -797,9 +800,6 @@ class TFOpMapper(OpMapper):
delta = delta.value delta = delta.value
else: else:
delta = self.decoder.infer_tensor(delta) delta = self.decoder.infer_tensor(delta)
self.add_omit_nodes(start.layer_name, node.layer_name)
self.add_omit_nodes(limit.layer_name, node.layer_name)
self.add_omit_nodes(delta.layer_name, node.layer_name)
inputs = {"start": start, "end": limit, "step": delta} inputs = {"start": start, "end": limit, "step": delta}
attr = {"dtype": string(node.dtype)} attr = {"dtype": string(node.dtype)}
......
...@@ -74,7 +74,7 @@ class TFOpMapperNHWC(OpMapper): ...@@ -74,7 +74,7 @@ class TFOpMapperNHWC(OpMapper):
unsupported_ops = set() unsupported_ops = set()
sys.stderr.write("Total nodes: {}\n".format(len(self.graph.topo_sort))) sys.stderr.write("Total nodes: {}\n".format(len(self.graph.topo_sort)))
for i, node_name in enumerate(self.graph.topo_sort): for i, node_name in enumerate(self.graph.topo_sort):
sys.stderr.write("\rConverting node {} ... ".format(i)) sys.stderr.write("\rConverting node {} ... ".format(i + 1))
node = self.graph.get_node(node_name) node = self.graph.get_node(node_name)
op = node.layer_type op = node.layer_type
if op in self.directly_map_ops: if op in self.directly_map_ops:
...@@ -99,7 +99,7 @@ class TFOpMapperNHWC(OpMapper): ...@@ -99,7 +99,7 @@ class TFOpMapperNHWC(OpMapper):
for op in unsupported_ops: for op in unsupported_ops:
print("========== {} ============".format(op)) print("========== {} ============".format(op))
sys.exit(-1) sys.exit(-1)
sys.stderr.write("\nDone\n") sys.stderr.write("\nDone!\n")
def add_omit_nodes(self, in_node_name, out_node_name): def add_omit_nodes(self, in_node_name, out_node_name):
in_node = self.graph.get_node(in_node_name) in_node = self.graph.get_node(in_node_name)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册