提交 1055a2e1 编写于 作者: S SunAhong1993

fix the conflic

上级 9194d510
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
## Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
......@@ -117,27 +117,6 @@ def tf2paddle(model_path,
return
from x2paddle.decoder.tf_decoder import TFDecoder
<<<<<<< HEAD
from x2paddle.op_mapper.tf_op_mapper import TFOpMapper
from x2paddle.optimizer.tensorflow.bias import BiasOpt
from x2paddle.optimizer.tensorflow.transpose import TransposeOpt
from x2paddle.optimizer.tensorflow.batch_norm import BatchNormOpt
from x2paddle.optimizer.tensorflow.prelu import PReLUOpt
print("Now translating model from tensorflow to paddle.")
model = TFDecoder(model_path, define_input_shape=define_input_shape)
mapper = TFOpMapper(model)
program.build()
bias_opt = BiasOpt()
transpose_opt = TransposeOpt()
batch_norm_opt = BatchNormOpt()
prelu_opt = PReLUOpt()
bias_opt.run(program)
batch_norm_opt.run(program)
prelu_opt.run(program)
transpose_opt.run(program)
program.gen_model(save_dir)
=======
if paddle_type == "dygraph":
from x2paddle.op_mapper.dygraph.tf2paddle.tf_op_mapper import TFOpMapper
else:
......@@ -167,7 +146,6 @@ def tf2paddle(model_path,
transpose_opt.run(mapper.paddle_graph)
mapper.paddle_graph.gen_model(save_dir)
>>>>>>> paddle-2.0
def caffe2paddle(proto, weight, save_dir, caffe_proto,
......@@ -259,20 +237,7 @@ def pytorch2paddle(module, save_dir, jit_type="trace", input_examples=None):
graph_opt = GraphOptimizer(source_frame="pytorch", paddle_type="dygraph", jit_type=jit_type)
graph_opt.optimize(mapper.paddle_graph)
print("Model optimized.")
<<<<<<< HEAD
if input_shapes is not None:
real_input_shapes = list()
for shape in input_shapes:
sp = shape[1:-1].split(",")
for i, s in enumerate(sp):
sp[i] = int(s)
real_input_shapes.append(sp)
else:
real_input_shapes = None
mapper.graph.gen_model(save_dir, real_input_shapes)
=======
mapper.paddle_graph.gen_model(save_dir, jit_type=jit_type)
>>>>>>> paddle-2.0
def main():
......@@ -343,12 +308,8 @@ def main():
else:
raise Exception(
<<<<<<< HEAD
"--framework only support tensorflow/caffe/onnx/ now")
=======
"--framework only support tensorflow/caffe/onnx now")
>>>>>>> paddle-2.0
if __name__ == "__main__":
main()
main()
\ No newline at end of file
......@@ -293,7 +293,7 @@ class PaddleGraph(object):
try:
self.dygraph2static(save_dir, input_shapes, input_types)
except Exception as e:
print("Fail to generate inference model! Problem happend while export inference model from python code '{}';\n".format(coda_path))
print("Fail to generate inference model! Problem happend while export inference model from python code '{}';\n".format(code_path))
print("===================Error Information===============")
raise e
......
......@@ -154,11 +154,7 @@ class ONNXGraph(Graph):
if self.graph is None:
print('[WARNING] Shape inference by ONNX offical interface.')
onnx_model = shape_inference.infer_shapes(onnx_model)
<<<<<<< HEAD
self.graph = onnx_model.graph
=======
self.graph = onnx_model.graph
>>>>>>> paddle-2.0
print("shape inferenced.")
self.build()
self.collect_value_infos()
......@@ -554,4 +550,4 @@ class ONNXDecoder(object):
node.input[i] = self.make_variable_name(node.input[i])
for i in range(len(node.output)):
node.output[i] = self.make_variable_name(node.output[i])
return model
return model
\ No newline at end of file
......@@ -1601,15 +1601,11 @@ class SymbolicShapeInference:
in_mp)
symbolic_shape_inference._update_output_from_vi()
if not all_shapes_inferred:
print('!' * 10)
symbolic_shape_inference.out_mp_ = shape_inference.infer_shapes(
symbolic_shape_inference.out_mp_)
print('[INFO] Complete symbolic shape inference.')
except:
<<<<<<< HEAD
print('[WARNING] Incomplete symbolic shape inference.')
=======
print('[WARNING] Incomplete symbolic shape inference')
>>>>>>> paddle-2.0
symbolic_shape_inference.out_mp_ = shape_inference.infer_shapes(
symbolic_shape_inference.out_mp_)
return symbolic_shape_inference.out_mp_.graph
return symbolic_shape_inference.out_mp_.graph
\ No newline at end of file
......@@ -118,12 +118,6 @@ class PReLUOpt:
graph.layers[transpose0.id] = transpose0
graph.layers[prelu.id] = prelu
graph.layers[transpose1.id] = transpose1
<<<<<<< HEAD
graph.parameters[alpha.outputs[0]] = np.expand_dims(graph.parameters[alpha.outputs[0]], 0)
graph.build()
=======
first_axis = graph.parameters[alpha.outputs[0]].shape[0]
graph.parameters[alpha.outputs[0]] = np.reshape(graph.parameters[alpha.outputs[0]], (1, first_axis, 1, 1))
graph.build()
>>>>>>> paddle-2.0
graph.build()
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册