diff --git a/x2paddle/core/fluid_code.py b/x2paddle/core/fluid_code.py index 66cfb9352af5308f7d0d82c0cc5b0a3136d8b37a..ba2f57a8b3037b5768784c12bfe5802503785816 100644 --- a/x2paddle/core/fluid_code.py +++ b/x2paddle/core/fluid_code.py @@ -13,6 +13,7 @@ # limitations under the License. from x2paddle.core.graph import GraphNode +import collections class Layer(object): @@ -36,25 +37,34 @@ class Layer(object): if isinstance(self.inputs, list): in_list = "[" for input in self.inputs: - assert isinstance( - input, GraphNode), "Type of input should be GraphNode" - if hasattr(input, "index"): - in_list += (input.layer_name + "[{}]".format(input.index) + - ", ") + if isinstance(input, GraphNode): + if hasattr(input, "index"): + in_list += (input.layer_name + + "[{}]".format(input.index) + ", ") + else: + in_list += (input.layer_name + ", ") + elif isinstance(input, str): + in_list += (input + ", ") else: - in_list += (input.layer_name + ", ") + raise Exception( + "Element of inputs should GraphNode or String") in_list = in_list.strip(", ") + "], " layer_code += in_list elif isinstance(self.inputs, dict): - for key, input in self.inputs.items(): - assert isinstance( - input, GraphNode), "Type of input should be GraphNode" - if hasattr(input, "index"): - layer_code = layer_code + key + "={}, ".format( - input.layer_name + "[{}]".format(input.index)) + inputs = collections.OrderedDict(self.inputs) + for key, input in inputs.items(): + if isinstance(input, GraphNode): + if hasattr(input, "index"): + layer_code = layer_code + key + "={}, ".format( + input.layer_name + "[{}]".format(input.index)) + else: + layer_code = layer_code + key + "={}, ".format( + input.layer_name) + elif isinstance(input, str): + layer_code = layer_code + key + "={}, ".format(input) else: - layer_code = layer_code + key + "={}, ".format( - input.layer_name) + raise Exception( + "Element of inputs should GraphNode or String") elif isinstance(self.inputs, GraphNode): if hasattr(self.inputs, "index"): layer_code += (self.inputs.layer_name + @@ -66,7 +76,8 @@ class Layer(object): else: raise Exception("Unknown type of inputs.") - for key, value in self.param_attr.items(): + param_attr = collections.OrderedDict(self.param_attr) + for key, value in param_attr.items(): layer_code = layer_code + key + "={}, ".format(value) layer_code = layer_code.strip(", ") @@ -97,7 +108,8 @@ class Layer(object): else: raise Exception("Unknown type of inputs.") - for key, value in self.param_attr.items(): + param_attr = collections.OrderedDict(self.param_attr) + for key, value in param_attr.items(): layer_code = layer_code + key + "={}, ".format(value) layer_code = layer_code.strip(", ") diff --git a/x2paddle/decoder/tf_decoder.py b/x2paddle/decoder/tf_decoder.py index 898ea3b448ed06d2ed8174608ddfb9fa48eccf8a..fe64414b951c61a72054cfe75f83b98ecde37dee 100644 --- a/x2paddle/decoder/tf_decoder.py +++ b/x2paddle/decoder/tf_decoder.py @@ -176,8 +176,9 @@ class TFDecoder(object): self.sess.graph.as_default() tf.import_graph_def(graph_def, name='', input_map=input_map) - for node in graph_def.node: - print(node.name, node.op, node.input) + +# for node in graph_def.node: +# print(node.name, node.op, node.input) self.sess.run(tf.global_variables_initializer()) diff --git a/x2paddle/op_mapper/tf_op_mapper.py b/x2paddle/op_mapper/tf_op_mapper.py index ad2313869f38200cde91691db3186784794d33b9..f0a886f755592d8dbda853455f32cbde9d4d05ac 100644 --- a/x2paddle/op_mapper/tf_op_mapper.py +++ b/x2paddle/op_mapper/tf_op_mapper.py @@ -19,6 +19,31 @@ import numpy class TFOpMapper(OpMapper): + + directly_map_ops = { + 'Relu': ['relu'], + 'Relu6': ['relu6'], + 'Shape': ['shape'], + 'Abs': ['abs'], + 'Sigmoid': ['sigmoid'], + 'Exp': ['exp'], + 'Rsqrt': ['rsqrt'], + 'Squeeze': ['squeeze', { + 'squeeze_dims': 'axes' + }], + 'Softmax': ['softmax', { + 'axis': 'axis' + }], + } + elementwise_ops = { + 'Add': 'elementwise_add', + 'RealDiv': 'elementwise_div', + 'BiasAdd': 'elementwise_add', + 'Sub': 'elementwise_sub', + 'Maximum': 'elementwise_max', + 'Mul': 'elementwise_mul' + } + def __init__(self, decoder): super(TFOpMapper, self).__init__() self.decoder = decoder @@ -30,15 +55,20 @@ class TFOpMapper(OpMapper): print("Total nodes: {}".format(len(self.graph.topo_sort))) # check if ops in model are all supported - if not self.op_checker(): - raise Exception("Model are not supported yet.") + # TODO for node_name in self.graph.topo_sort: node = self.graph.get_node(node_name) op = node.layer_type - if hasattr(self, op): + if op in self.directly_map_ops: + self.directly_map(node) + elif op in self.elementwise_ops: + self.elementwise_map(node) + elif hasattr(self, op): func = getattr(self, op) func(node) + else: + raise Exception("OP: [{}] not support yet".format(op)) for i in range(len(self.graph.topo_sort)): node_name = self.graph.topo_sort[i] @@ -47,7 +77,24 @@ class TFOpMapper(OpMapper): node = self.graph.get_node(node_name) self.net_code += node.fluid_code.gen_codes() - def elementwise_operator(self, node, op_type): + def directly_map(self, node): + assert node.layer_type in self.directly_map_ops + op_info = self.directly_map_ops[node.layer_type] + input = self.graph.get_node(node.layer.input[0], copy=True) + attr = dict() + for param in op_info[1:]: + tf_param_name = list(param.keys())[0] + pd_param_name = list(param.values())[0] + tf_param = node.get_attr(tf_param_name) + attr[pd_param_name] = tf_param + node.fluid_code.add_layer(op_info[0], + inputs=input, + output=node, + param_attr=attr) + + def elementwise_map(self, node): + assert node.layer_type in self.elementwise_ops + op_type = self.elementwise_ops[node.layer_type] x = self.graph.get_node(node.layer.input[0], copy=True) y = self.graph.get_node(node.layer.input[1], copy=True) x_shape = x.out_shapes[0] @@ -161,41 +208,6 @@ class TFOpMapper(OpMapper): output=node, param_attr=attr) - def RealDiv(self, node): - self.elementwise_operator(node, "elementwise_div") - - def Relu(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - node.fluid_code.add_layer("relu", - inputs=input, - output=node, - param_attr=None) - - def Squeeze(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - squeeze_dims = node.get_attr('squeeze_dims') - attr = {'axes': squeeze_dims} - node.fluid_code.add_layer("squeeze", - inputs=input, - output=node, - param_attr=attr) - - def BiasAdd(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - bias = self.graph.get_node(node.layer.input[1], copy=True) - inputs = {'x': input, 'y': bias} - node.fluid_code.add_layer("elementwise_add", - inputs=inputs, - output=node, - param_attr=None) - - def Identity(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - node.fluid_code.add_layer("assign", - inputs=input, - output=node, - param_attr=None) - def MaxPool(self, node): input = self.graph.get_node(node.layer.input[0], copy=True) @@ -314,13 +326,6 @@ class TFOpMapper(OpMapper): output=node, param_attr=attr) - def Relu6(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - node.fluid_code.add_layer("relu6", - inputs=input, - output=node, - param_attr=None) - def FusedBatchNorm(self, node): input = self.graph.get_node(node.layer.input[0], copy=True) gamma = self.graph.get_node(node.layer.input[1], copy=True) @@ -433,13 +438,6 @@ class TFOpMapper(OpMapper): output=node, param_attr=attr) - def Shape(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - node.fluid_code.add_layer("shape", - inputs=input, - output=node, - param_attr=None) - def Reshape(self, node): input = self.graph.get_node(node.layer.input[0], copy=True) param = self.graph.get_node(node.layer.input[1], copy=True) @@ -474,27 +472,6 @@ class TFOpMapper(OpMapper): inputs=input, output=node, param_attr=attr) - # temporary shape inference fix - - -# if param.layer_type == "Pack": -# shape_slices = list() -# for i in range(len(param.layer.input)): -# slice = self.graph.get_node(param.layer.input[i], copy=True) -# if slice.layer_type == "Const": -# shape_slices.append(slice.value.tolist()) -# else: -# shape_slices.append(0) -# if shape_slices.count(-1) == 0: -# shape_slices[shape_slices.index(0)] = -1 -# attr = {"shape": shape_slices} -# node.fluid_code.add_layer("reshape", -# inputs=node, -# output=node, -# param_attr=attr) - - def Add(self, node): - self.elementwise_operator(node, "elementwise_add") def AvgPool(self, node): input = self.graph.get_node(node.layer.input[0], copy=True) @@ -542,23 +519,6 @@ class TFOpMapper(OpMapper): output=node, param_attr=attr) - def Softmax(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - node.fluid_code.add_layer("softmax", - inputs=input, - output=node, - param_attr=None) - - def Sigmoid(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - node.fluid_code.add_layer("sigmoid", - inputs=input, - output=node, - param_attr=None) - - def Maximum(self, node): - self.elementwise_operator(node, "elementwise_max") - def SplitV(self, node): input = self.graph.get_node(node.layer.input[0], copy=True) num_sections = self.graph.get_node(node.layer.input[1], copy=True) @@ -576,13 +536,6 @@ class TFOpMapper(OpMapper): output=node, param_attr=attr) - def Exp(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - node.fluid_code.add_layer("exp", - inputs=input, - output=node, - param_attr=None) - def ConcatV2(self, node): inputs = [ self.graph.get_node(name, copy=True) @@ -649,19 +602,6 @@ class TFOpMapper(OpMapper): output=node, param_attr=None) - def Mul(self, node): - self.elementwise_operator(node, "elementwise_mul") - - def Sub(self, node): - self.elementwise_operator(node, "elementwise_sub") - - def Rsqrt(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - node.fluid_code.add_layer("rsqrt", - inputs=input, - output=node, - param_attr=None) - def swish_f32(self, node): input = self.graph.get_node(node.layer.input[0], copy=True) node.fluid_code.add_layer("sigmoid", @@ -765,13 +705,6 @@ class TFOpMapper(OpMapper): output=node, param_attr=attr) - def Abs(self, node): - input = self.graph.get_node(node.layer.input[0], copy=True) - node.fluid_code.add_layer("abs", - inputs=input, - output=node, - param_attr=None) - def Conv2DBackpropInput(self, node): input = self.graph.get_node(node.layer.input[0], copy=True) kernel = self.graph.get_node(node.layer.input[1], copy=True) diff --git a/x2paddle/optimizer/tf_optimizer.py b/x2paddle/optimizer/tf_optimizer.py index 2111ee8bb6a58f7ee791448688d9a4a95b3aea86..fcf6cdfbbb8e4b44b163141b719201d06ff9a9ad 100644 --- a/x2paddle/optimizer/tf_optimizer.py +++ b/x2paddle/optimizer/tf_optimizer.py @@ -15,56 +15,8 @@ # TODO useless node remove from x2paddle.decoder.tf_decoder import TFGraph +# TODO bn merge -class TFGraphOptimizer(object): - def __init__(self): - print("Doint Nothing") +# TODO activation merge - def remove_isolated_node(self, graph): - # delete isolated nodes - isolated_nodes = list() - for node_name in graph.node_map.keys(): - if len(graph.get_node(node_name).inputs) == 0 or len( - graph.get_node(node_name).outputs) == 0: - isolated_nodes.append(node_name) - - graph.remove_node(node_name) - - def remove_identity_node(self, graph): - identity_node = list() - for node_name, node in graph.node_map.items(): - if node.layer_type == "Identity": - identity_node.append(node_name) - - for node_name in identity_node: - node = graph.get_node(node_name) - # Remind: Only 1 input for Identity node - input_node = graph.get_node(node.inputs[0]) - - # remove identity node from graph - idx = input_node.outputs.index(node_name) - del input_node.outputs[idx] - - output_names = node.outputs - for output_name in output_names: - output_node = graph.get_node(output_name) - idx = output_node.inputs.index(node_name) - output_node.inputs[idx] = input_node.layer_name - - idx = graph.topo_sort.index(node_name) - del graph.topo_sort[idx] - - def run(self, graph): - self.remove_isolated_node(graph) - self.remove_identity_node(graph) - - -# TODO identity node remove - -# TODO subgraph optimize - -# TODO compute optimize - -# activation merge - -# biasadd merge +# TODO biasadd merge