diff --git a/x2paddle/convert.py b/x2paddle/convert.py index 70783a73b0d81d07f3501d2d0815f0fa24deb825..31220d3a8eb2e45b1b5c2e03edbd5ba73c40a98a 100644 --- a/x2paddle/convert.py +++ b/x2paddle/convert.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +## Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License" # you may not use this file except in compliance with the License. @@ -117,27 +117,6 @@ def tf2paddle(model_path, return from x2paddle.decoder.tf_decoder import TFDecoder -<<<<<<< HEAD - from x2paddle.op_mapper.tf_op_mapper import TFOpMapper - from x2paddle.optimizer.tensorflow.bias import BiasOpt - from x2paddle.optimizer.tensorflow.transpose import TransposeOpt - from x2paddle.optimizer.tensorflow.batch_norm import BatchNormOpt - from x2paddle.optimizer.tensorflow.prelu import PReLUOpt - - print("Now translating model from tensorflow to paddle.") - model = TFDecoder(model_path, define_input_shape=define_input_shape) - mapper = TFOpMapper(model) - program.build() - bias_opt = BiasOpt() - transpose_opt = TransposeOpt() - batch_norm_opt = BatchNormOpt() - prelu_opt = PReLUOpt() - bias_opt.run(program) - batch_norm_opt.run(program) - prelu_opt.run(program) - transpose_opt.run(program) - program.gen_model(save_dir) -======= if paddle_type == "dygraph": from x2paddle.op_mapper.dygraph.tf2paddle.tf_op_mapper import TFOpMapper else: @@ -167,7 +146,6 @@ def tf2paddle(model_path, transpose_opt.run(mapper.paddle_graph) mapper.paddle_graph.gen_model(save_dir) ->>>>>>> paddle-2.0 def caffe2paddle(proto, weight, save_dir, caffe_proto, @@ -259,20 +237,7 @@ def pytorch2paddle(module, save_dir, jit_type="trace", input_examples=None): graph_opt = GraphOptimizer(source_frame="pytorch", paddle_type="dygraph", jit_type=jit_type) graph_opt.optimize(mapper.paddle_graph) print("Model optimized.") -<<<<<<< HEAD - if input_shapes is not None: - real_input_shapes = list() - for shape in input_shapes: - sp = shape[1:-1].split(",") - for i, s in enumerate(sp): - sp[i] = int(s) - real_input_shapes.append(sp) - else: - real_input_shapes = None - mapper.graph.gen_model(save_dir, real_input_shapes) -======= mapper.paddle_graph.gen_model(save_dir, jit_type=jit_type) ->>>>>>> paddle-2.0 def main(): @@ -343,12 +308,8 @@ def main(): else: raise Exception( -<<<<<<< HEAD - "--framework only support tensorflow/caffe/onnx/ now") -======= "--framework only support tensorflow/caffe/onnx now") ->>>>>>> paddle-2.0 if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/x2paddle/core/program.py b/x2paddle/core/program.py index 825766b682c81dff1762d6d81b776f1470c0c422..d3d3549b80473733cbb20768b668d4cdb3d368f8 100644 --- a/x2paddle/core/program.py +++ b/x2paddle/core/program.py @@ -293,7 +293,7 @@ class PaddleGraph(object): try: self.dygraph2static(save_dir, input_shapes, input_types) except Exception as e: - print("Fail to generate inference model! Problem happend while export inference model from python code '{}';\n".format(coda_path)) + print("Fail to generate inference model! Problem happend while export inference model from python code '{}';\n".format(code_path)) print("===================Error Information===============") raise e diff --git a/x2paddle/decoder/onnx_decoder.py b/x2paddle/decoder/onnx_decoder.py index cf2a29af619a506b4c552b80ab19265a4c53dff0..db8027963333bbb3c10a6451ecb617165dddb430 100644 --- a/x2paddle/decoder/onnx_decoder.py +++ b/x2paddle/decoder/onnx_decoder.py @@ -154,11 +154,7 @@ class ONNXGraph(Graph): if self.graph is None: print('[WARNING] Shape inference by ONNX offical interface.') onnx_model = shape_inference.infer_shapes(onnx_model) -<<<<<<< HEAD - self.graph = onnx_model.graph -======= self.graph = onnx_model.graph ->>>>>>> paddle-2.0 print("shape inferenced.") self.build() self.collect_value_infos() @@ -554,4 +550,4 @@ class ONNXDecoder(object): node.input[i] = self.make_variable_name(node.input[i]) for i in range(len(node.output)): node.output[i] = self.make_variable_name(node.output[i]) - return model + return model \ No newline at end of file diff --git a/x2paddle/decoder/onnx_shape_inference.py b/x2paddle/decoder/onnx_shape_inference.py index 01f966edb163cf4d6fab33b3b48c009777f99701..952add658367169cc6b30e565b21823bdfe431a1 100644 --- a/x2paddle/decoder/onnx_shape_inference.py +++ b/x2paddle/decoder/onnx_shape_inference.py @@ -1601,15 +1601,11 @@ class SymbolicShapeInference: in_mp) symbolic_shape_inference._update_output_from_vi() if not all_shapes_inferred: + print('!' * 10) symbolic_shape_inference.out_mp_ = shape_inference.infer_shapes( symbolic_shape_inference.out_mp_) - print('[INFO] Complete symbolic shape inference.') except: -<<<<<<< HEAD - print('[WARNING] Incomplete symbolic shape inference.') -======= print('[WARNING] Incomplete symbolic shape inference') ->>>>>>> paddle-2.0 symbolic_shape_inference.out_mp_ = shape_inference.infer_shapes( symbolic_shape_inference.out_mp_) - return symbolic_shape_inference.out_mp_.graph + return symbolic_shape_inference.out_mp_.graph \ No newline at end of file diff --git a/x2paddle/optimizer/tensorflow/prelu.py b/x2paddle/optimizer/tensorflow/prelu.py index a10c10afa6d9cff47b360cc4f8bc5d7246b45692..30b298d0b8ae70f92ec63f1f08ace63d41e9f9b3 100644 --- a/x2paddle/optimizer/tensorflow/prelu.py +++ b/x2paddle/optimizer/tensorflow/prelu.py @@ -118,12 +118,6 @@ class PReLUOpt: graph.layers[transpose0.id] = transpose0 graph.layers[prelu.id] = prelu graph.layers[transpose1.id] = transpose1 -<<<<<<< HEAD - graph.parameters[alpha.outputs[0]] = np.expand_dims(graph.parameters[alpha.outputs[0]], 0) - graph.build() - -======= first_axis = graph.parameters[alpha.outputs[0]].shape[0] graph.parameters[alpha.outputs[0]] = np.reshape(graph.parameters[alpha.outputs[0]], (1, first_axis, 1, 1)) - graph.build() ->>>>>>> paddle-2.0 + graph.build() \ No newline at end of file