diff --git a/x2paddle/decoder/tf_decoder.py b/x2paddle/decoder/tf_decoder.py index dc04172ac9e3fbc659cf5ce05ec297a1fc4764ab..f23fc32b2ed812e4a3adf9a5c506a8137f7f4cd0 100644 --- a/x2paddle/decoder/tf_decoder.py +++ b/x2paddle/decoder/tf_decoder.py @@ -60,7 +60,7 @@ class TFGraphNode(GraphNode): @property def dtype(self): - keys = ['dtype', 'Tidx', 'T', 'DstT'] + keys = ['dtype', 'T', 'DstT'] for k in keys: dtype = self.layer.attr[k].type if dtype > 0: diff --git a/x2paddle/op_mapper/tf_op_mapper.py b/x2paddle/op_mapper/tf_op_mapper.py index c15ccb0790e4ad58a46000ca7fb91b71a992dc84..541ee3c845d78df5e0ec20763b6e9b7d92b6791e 100644 --- a/x2paddle/op_mapper/tf_op_mapper.py +++ b/x2paddle/op_mapper/tf_op_mapper.py @@ -744,7 +744,7 @@ class TFOpMapper(OpMapper): input_names = [i.name for i in inputs] for i, ipt in enumerate(inputs): - if node.dtype == 'bool': + if ipt.dtype == 'bool': cast_name = gen_name('concat', 'cast') program.add_layer( kernel="fluid.layers.cast", @@ -1213,9 +1213,17 @@ class TFOpMapper(OpMapper): attr["dim"] = reduce_idx.value.tolist() attr["keep_dim"] = node.get_attr("keep_dims") + input_name = input.name + if input.dtype != "bool": + input_name = gen_name("all", "cast") + program.add_layer( + "fluid.layers.cast", + inputs={"x": input.name}, + outputs=[input_name], + dtype=string("bool")) program.add_layer( "fluid.layers.reduce_all", - inputs={"input": input.name}, + inputs={"input": input_name}, outputs=[node.name], **attr) diff --git a/x2paddle/optimizer/tensorflow/batch_norm.py b/x2paddle/optimizer/tensorflow/batch_norm.py index 315e94968da1c5094f9245173cfb53238a2f8924..3e3d81360598c2b66a00735cd23059998f3bef2d 100644 --- a/x2paddle/optimizer/tensorflow/batch_norm.py +++ b/x2paddle/optimizer/tensorflow/batch_norm.py @@ -8,6 +8,7 @@ class BatchNormOpt: pass def run(self, graph): + print("Optimize: BatchNormOpt...") layers = copy.deepcopy(graph.layers) for layer_id, layer in layers.items(): if layer.kernel != "fluid.layers.elementwise_add": diff --git a/x2paddle/optimizer/tensorflow/bias.py b/x2paddle/optimizer/tensorflow/bias.py index ced691a700afb4213351cdd6095cf8822602795f..593095127b5729f3ef6aae52af41341132b9e73b 100644 --- a/x2paddle/optimizer/tensorflow/bias.py +++ b/x2paddle/optimizer/tensorflow/bias.py @@ -13,6 +13,7 @@ class BiasOpt: ] def run(self, graph): + print("Optimize: BiasOpt...") layers = copy.deepcopy(graph.layers) for layer_id, layer in layers.items(): if layer.kernel in self.conv_layers or layer.kernel == "fluid.layers.transpose": diff --git a/x2paddle/optimizer/tensorflow/transpose.py b/x2paddle/optimizer/tensorflow/transpose.py index 81bd073444970eaac0e4a72535b68d4a1290dbdf..fbd10ab230d81e3463a74c64a7eda7a9fba6d93d 100644 --- a/x2paddle/optimizer/tensorflow/transpose.py +++ b/x2paddle/optimizer/tensorflow/transpose.py @@ -36,6 +36,7 @@ class TransposeOpt: return count def run(self, graph): + print("Optimize: TransposeOpt...") total_layer_num = len(graph.layers) scanned_layers = set() optimized_transpose_layers = list()