From 97df7f87ec415dcc3804283d69cf4ee2363ffbcd Mon Sep 17 00:00:00 2001 From: jiangjiajun Date: Tue, 29 Sep 2020 07:33:53 +0000 Subject: [PATCH] fix for keras bert --- x2paddle/decoder/tf_decoder.py | 2 +- x2paddle/op_mapper/tf_op_mapper.py | 12 ++++++++++-- x2paddle/optimizer/tensorflow/batch_norm.py | 1 + x2paddle/optimizer/tensorflow/bias.py | 1 + x2paddle/optimizer/tensorflow/transpose.py | 1 + 5 files changed, 14 insertions(+), 3 deletions(-) diff --git a/x2paddle/decoder/tf_decoder.py b/x2paddle/decoder/tf_decoder.py index dc04172..f23fc32 100644 --- a/x2paddle/decoder/tf_decoder.py +++ b/x2paddle/decoder/tf_decoder.py @@ -60,7 +60,7 @@ class TFGraphNode(GraphNode): @property def dtype(self): - keys = ['dtype', 'Tidx', 'T', 'DstT'] + keys = ['dtype', 'T', 'DstT'] for k in keys: dtype = self.layer.attr[k].type if dtype > 0: diff --git a/x2paddle/op_mapper/tf_op_mapper.py b/x2paddle/op_mapper/tf_op_mapper.py index c15ccb0..541ee3c 100644 --- a/x2paddle/op_mapper/tf_op_mapper.py +++ b/x2paddle/op_mapper/tf_op_mapper.py @@ -744,7 +744,7 @@ class TFOpMapper(OpMapper): input_names = [i.name for i in inputs] for i, ipt in enumerate(inputs): - if node.dtype == 'bool': + if ipt.dtype == 'bool': cast_name = gen_name('concat', 'cast') program.add_layer( kernel="fluid.layers.cast", @@ -1213,9 +1213,17 @@ class TFOpMapper(OpMapper): attr["dim"] = reduce_idx.value.tolist() attr["keep_dim"] = node.get_attr("keep_dims") + input_name = input.name + if input.dtype != "bool": + input_name = gen_name("all", "cast") + program.add_layer( + "fluid.layers.cast", + inputs={"x": input.name}, + outputs=[input_name], + dtype=string("bool")) program.add_layer( "fluid.layers.reduce_all", - inputs={"input": input.name}, + inputs={"input": input_name}, outputs=[node.name], **attr) diff --git a/x2paddle/optimizer/tensorflow/batch_norm.py b/x2paddle/optimizer/tensorflow/batch_norm.py index 315e949..3e3d813 100644 --- a/x2paddle/optimizer/tensorflow/batch_norm.py +++ b/x2paddle/optimizer/tensorflow/batch_norm.py @@ -8,6 +8,7 @@ class BatchNormOpt: pass def run(self, graph): + print("Optimize: BatchNormOpt...") layers = copy.deepcopy(graph.layers) for layer_id, layer in layers.items(): if layer.kernel != "fluid.layers.elementwise_add": diff --git a/x2paddle/optimizer/tensorflow/bias.py b/x2paddle/optimizer/tensorflow/bias.py index ced691a..5930951 100644 --- a/x2paddle/optimizer/tensorflow/bias.py +++ b/x2paddle/optimizer/tensorflow/bias.py @@ -13,6 +13,7 @@ class BiasOpt: ] def run(self, graph): + print("Optimize: BiasOpt...") layers = copy.deepcopy(graph.layers) for layer_id, layer in layers.items(): if layer.kernel in self.conv_layers or layer.kernel == "fluid.layers.transpose": diff --git a/x2paddle/optimizer/tensorflow/transpose.py b/x2paddle/optimizer/tensorflow/transpose.py index 81bd073..fbd10ab 100644 --- a/x2paddle/optimizer/tensorflow/transpose.py +++ b/x2paddle/optimizer/tensorflow/transpose.py @@ -36,6 +36,7 @@ class TransposeOpt: return count def run(self, graph): + print("Optimize: TransposeOpt...") total_layer_num = len(graph.layers) scanned_layers = set() optimized_transpose_layers = list() -- GitLab