convert.py 14.9 KB
Newer Older
S
SunAhong1993 已提交
1
# Copyright (c) 2020  PaddlePaddle Authors. All Rights Reserved.
J
jiangjiajun 已提交
2 3 4 5 6 7 8 9 10 11 12 13
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
S
SunAhong1993 已提交
14

15
from six import text_type as _text_type
S
SunAhong1993 已提交
16
from x2paddle import program
W
wjj19950828 已提交
17
from x2paddle.utils import ConverterCheck
18
import argparse
J
jiangjiajun 已提交
19
import sys
W
WJJ1995 已提交
20
import logging
21

J
jiangjiajun 已提交
22

23 24
def arg_parser():
    parser = argparse.ArgumentParser()
25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
    parser.add_argument(
        "--model",
        "-m",
        type=_text_type,
        default=None,
        help="define model file path for tensorflow or onnx")
    parser.add_argument(
        "--prototxt",
        "-p",
        type=_text_type,
        default=None,
        help="prototxt file of caffe model")
    parser.add_argument(
        "--weight",
        "-w",
        type=_text_type,
        default=None,
        help="weight file of caffe model")
    parser.add_argument(
        "--save_dir",
        "-s",
        type=_text_type,
        default=None,
        help="path to save translated model")
J
upgrade  
jiangjiajun 已提交
49 50 51
    parser.add_argument(
        "--framework",
        "-f",
52 53 54 55
        type=_text_type,
        default=None,
        help="define which deeplearning framework(tensorflow/caffe/onnx/paddle2onnx)"
    )
S
SunAhong1993 已提交
56 57 58 59 60
    parser.add_argument(
        "--caffe_proto",
        "-c",
        type=_text_type,
        default=None,
J
upgrade  
jiangjiajun 已提交
61 62
        help="optional: the .py file compiled by caffe proto file of caffe model"
    )
63 64 65 66 67 68 69 70 71 72 73 74
    parser.add_argument(
        "--version",
        "-v",
        action="store_true",
        default=False,
        help="get version of x2paddle")
    parser.add_argument(
        "--define_input_shape",
        "-d",
        action="store_true",
        default=False,
        help="define input shape for tf model")
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91
    parser.add_argument(
        "--convert_torch_project",
        "-tp",
        action='store_true',
        help="Convert the PyTorch Project.")
    parser.add_argument(
        "--project_dir",
        "-pd",
        type=_text_type,
        default=None,
        help="define project folder path for pytorch")
    parser.add_argument(
        "--pretrain_model",
        "-pm",
        type=_text_type,
        default=None,
        help="pretrain model file of pytorch model")
92 93 94 95 96
    parser.add_argument(
        "--enable_code_optim",
        "-co",
        default=True,
        help="Turn on code optimization")
W
WJJ1995 已提交
97 98 99 100 101 102 103 104 105 106 107 108 109 110
    parser.add_argument(
        "--to_lite", "-tl", default=False, help="convert to Paddle-Lite format")
    parser.add_argument(
        "--lite_valid_places",
        "-vp",
        type=_text_type,
        default="arm",
        help="Specify the executable backend of the model")
    parser.add_argument(
        "--lite_model_type",
        "-mt",
        type=_text_type,
        default="naive_buffer",
        help="The type of lite model")
111

112
    return parser
J
jiangjiajun 已提交
113

C
Channingss 已提交
114

W
WJJ1995 已提交
115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
def convert2lite(save_dir,
                 lite_valid_places="arm",
                 lite_model_type="naive_buffer"):
    """Convert to Paddle-Lite format."""

    from paddlelite.lite import Opt
    opt = Opt()
    opt.set_model_dir(save_dir + "/inference_model")
    opt.set_valid_places(lite_valid_places)
    opt.set_model_type(lite_model_type)
    opt.set_optimize_out(save_dir + "/opt")
    opt.run()


def tf2paddle(model_path,
              save_dir,
              define_input_shape=False,
              convert_to_lite=False,
              lite_valid_places="arm",
              lite_model_type="naive_buffer"):
W
wjj19950828 已提交
135
    ConverterCheck(task="TensorFlow", convert_state="Start").start()
J
jiangjiajun 已提交
136 137
    # check tensorflow installation and version
    try:
138 139
        import os
        os.environ["TF_CPP_MIN_LOG_LEVEL"] = '3'
J
jiangjiajun 已提交
140 141 142
        import tensorflow as tf
        version = tf.__version__
        if version >= '2.0.0' or version < '1.0.0':
W
WJJ1995 已提交
143
            logging.info(
W
WJJ1995 已提交
144
                "[ERROR] 1.0.0<=TensorFlow<2.0.0 is required, and v1.14.0 is recommended"
J
jiangjiajun 已提交
145 146 147
            )
            return
    except:
W
WJJ1995 已提交
148
        logging.info(
W
WJJ1995 已提交
149
            "[ERROR] TensorFlow is not installed, use \"pip install TensorFlow\"."
J
jiangjiajun@baidu.com 已提交
150
        )
J
jiangjiajun 已提交
151
        return
152

J
jiangjiajun 已提交
153
    from x2paddle.decoder.tf_decoder import TFDecoder
S
SunAhong1993 已提交
154
    from x2paddle.op_mapper.tf2paddle.tf_op_mapper import TFOpMapper
155

W
WJJ1995 已提交
156
    logging.info("Now translating model from TensorFlow to Paddle.")
157
    model = TFDecoder(model_path, define_input_shape=define_input_shape)
S
SunAhong1993 已提交
158
    mapper = TFOpMapper(model)
S
SunAhong1993 已提交
159
    mapper.paddle_graph.build()
W
WJJ1995 已提交
160
    logging.info("Model optimizing ...")
S
SunAhong1993 已提交
161 162 163
    from x2paddle.optimizer.optimizer import GraphOptimizer
    graph_opt = GraphOptimizer(source_frame="tf")
    graph_opt.optimize(mapper.paddle_graph)
W
WJJ1995 已提交
164
    logging.info("Model optimized!")
S
SunAhong1993 已提交
165
    mapper.paddle_graph.gen_model(save_dir)
W
WJJ1995 已提交
166
    logging.info("Successfully exported Paddle static graph model!")
W
wjj19950828 已提交
167
    ConverterCheck(task="TensorFlow", convert_state="Success").start()
W
WJJ1995 已提交
168
    if convert_to_lite:
W
WJJ1995 已提交
169
        logging.info("Now translating model from Paddle to Paddle Lite ...")
W
wjj19950828 已提交
170
        ConverterCheck(task="TensorFlow", lite_state="Start").start()
W
WJJ1995 已提交
171
        convert2lite(save_dir, lite_valid_places, lite_model_type)
W
WJJ1995 已提交
172
        logging.info("Successfully exported Paddle Lite support model!")
W
wjj19950828 已提交
173
        ConverterCheck(task="TensorFlow", lite_state="Success").start()
174 175


W
WJJ1995 已提交
176 177 178 179 180 181 182
def caffe2paddle(proto_file,
                 weight_file,
                 save_dir,
                 caffe_proto,
                 convert_to_lite=False,
                 lite_valid_places="arm",
                 lite_model_type="naive_buffer"):
W
wjj19950828 已提交
183
    ConverterCheck(task="Caffe", convert_state="Start").start()
J
jiangjiajun 已提交
184
    from x2paddle.decoder.caffe_decoder import CaffeDecoder
S
SunAhong1993 已提交
185
    from x2paddle.op_mapper.caffe2paddle.caffe_op_mapper import CaffeOpMapper
S
SunAhong1993 已提交
186
    import google.protobuf as gpb
S
SunAhong1993 已提交
187 188 189
    ver_part = gpb.__version__.split('.')
    version_satisfy = False
    if (int(ver_part[0]) == 3 and int(ver_part[1]) >= 6) \
W
WJJ1995 已提交
190
            or (int(ver_part[0]) > 3):
S
SunAhong1993 已提交
191
        version_satisfy = True
J
jiangjiajun@baidu.com 已提交
192
    assert version_satisfy, '[ERROR] google.protobuf >= 3.6.0 is required'
W
WJJ1995 已提交
193
    logging.info("Now translating model from caffe to paddle.")
W
WJJ1995 已提交
194
    model = CaffeDecoder(proto_file, weight_file, caffe_proto)
J
jiangjiajun 已提交
195
    mapper = CaffeOpMapper(model)
S
SunAhong1993 已提交
196
    mapper.paddle_graph.build()
W
WJJ1995 已提交
197
    logging.info("Model optimizing ...")
S
SunAhong1993 已提交
198
    from x2paddle.optimizer.optimizer import GraphOptimizer
S
SunAhong1993 已提交
199
    graph_opt = GraphOptimizer(source_frame="caffe")
S
SunAhong1993 已提交
200
    graph_opt.optimize(mapper.paddle_graph)
W
WJJ1995 已提交
201
    logging.info("Model optimized!")
S
SunAhong1993 已提交
202
    mapper.paddle_graph.gen_model(save_dir)
W
WJJ1995 已提交
203
    logging.info("Successfully exported Paddle static graph model!")
W
wjj19950828 已提交
204
    ConverterCheck(task="Caffe", convert_state="Success").start()
W
WJJ1995 已提交
205
    if convert_to_lite:
W
WJJ1995 已提交
206
        logging.info("Now translating model from Paddle to Paddle Lite ...")
W
wjj19950828 已提交
207
        ConverterCheck(task="Caffe", lite_state="Start").start()
W
WJJ1995 已提交
208
        convert2lite(save_dir, lite_valid_places, lite_model_type)
W
WJJ1995 已提交
209
        logging.info("Successfully exported Paddle Lite support model!")
W
wjj19950828 已提交
210
        ConverterCheck(task="Caffe", lite_state="Success").start()
211 212


W
WJJ1995 已提交
213 214 215 216 217
def onnx2paddle(model_path,
                save_dir,
                convert_to_lite=False,
                lite_valid_places="arm",
                lite_model_type="naive_buffer"):
W
wjj19950828 已提交
218
    ConverterCheck(task="ONNX", convert_state="Start").start()
C
update  
channingss 已提交
219 220 221 222
    # check onnx installation and version
    try:
        import onnx
        version = onnx.version.version
W
WJJ1995 已提交
223 224 225
        v0, v1, v2 = version.split('.')
        version_sum = int(v0) * 100 + int(v1) * 10 + int(v2)
        if version_sum < 160:
W
WJJ1995 已提交
226
            logging.info("[ERROR] onnx>=1.6.0 is required")
C
update  
channingss 已提交
227 228
            return
    except:
W
WJJ1995 已提交
229 230
        logging.info(
            "[ERROR] onnx is not installed, use \"pip install onnx==1.6.0\".")
C
update  
channingss 已提交
231
        return
W
WJJ1995 已提交
232
    logging.info("Now translating model from onnx to paddle.")
C
update  
channingss 已提交
233 234

    from x2paddle.decoder.onnx_decoder import ONNXDecoder
S
SunAhong1993 已提交
235
    from x2paddle.op_mapper.onnx2paddle.onnx_op_mapper import ONNXOpMapper
R
root 已提交
236
    model = ONNXDecoder(model_path)
C
Channingss 已提交
237
    mapper = ONNXOpMapper(model)
S
SunAhong1993 已提交
238
    mapper.paddle_graph.build()
W
wjj19950828 已提交
239 240 241 242 243
    logging.info("Model optimizing ...")
    from x2paddle.optimizer.optimizer import GraphOptimizer
    graph_opt = GraphOptimizer(source_frame="onnx")
    graph_opt.optimize(mapper.paddle_graph)
    logging.info("Model optimized.")
S
SunAhong1993 已提交
244
    mapper.paddle_graph.gen_model(save_dir)
W
WJJ1995 已提交
245
    logging.info("Successfully exported Paddle static graph model!")
W
wjj19950828 已提交
246
    ConverterCheck(task="ONNX", convert_state="Success").start()
W
WJJ1995 已提交
247
    if convert_to_lite:
W
WJJ1995 已提交
248
        logging.info("Now translating model from Paddle to Paddle Lite ...")
W
wjj19950828 已提交
249
        ConverterCheck(task="ONNX", lite_state="Start").start()
W
WJJ1995 已提交
250
        convert2lite(save_dir, lite_valid_places, lite_model_type)
W
WJJ1995 已提交
251
        logging.info("Successfully exported Paddle Lite support model!")
W
wjj19950828 已提交
252
        ConverterCheck(task="ONNX", lite_state="Success").start()
C
Channingss 已提交
253 254


W
WJJ1995 已提交
255 256 257 258
def pytorch2paddle(module,
                   save_dir,
                   jit_type="trace",
                   input_examples=None,
259
                   enable_code_optim=True,
W
WJJ1995 已提交
260 261 262
                   convert_to_lite=False,
                   lite_valid_places="arm",
                   lite_model_type="naive_buffer"):
W
wjj19950828 已提交
263
    ConverterCheck(task="PyTorch", convert_state="Start").start()
S
SunAhong1993 已提交
264 265 266 267
    # check pytorch installation and version
    try:
        import torch
        version = torch.__version__
W
WJJ1995 已提交
268 269 270 271 272 273 274
        v0, v1, v2 = version.split('.')
        # Avoid the situation where the version is equal to 1.7.0+cu101
        if '+' in v2:
            v2 = v2.split('+')[0]
        version_sum = int(v0) * 100 + int(v1) * 10 + int(v2)
        if version_sum < 150:
            logging.info(
W
WJJ1995 已提交
275
                "[ERROR] PyTorch>=1.5.0 is required, 1.6.0 is the most recommended"
W
WJJ1995 已提交
276
            )
S
SunAhong1993 已提交
277
            return
W
WJJ1995 已提交
278
        if version_sum > 160:
W
WJJ1995 已提交
279
            logging.info("[WARNING] PyTorch==1.6.0 is recommended")
S
SunAhong1993 已提交
280
    except:
W
WJJ1995 已提交
281
        logging.info(
W
WJJ1995 已提交
282
            "[ERROR] PyTorch is not installed, use \"pip install torch==1.6.0 torchvision\"."
S
SunAhong1993 已提交
283 284
        )
        return
W
WJJ1995 已提交
285
    logging.info("Now translating model from PyTorch to Paddle.")
286

S
SunAhong1993 已提交
287
    from x2paddle.decoder.pytorch_decoder import ScriptDecoder, TraceDecoder
S
SunAhong1993 已提交
288
    from x2paddle.op_mapper.pytorch2paddle.pytorch_op_mapper import PyTorchOpMapper
S
SunAhong1993 已提交
289

S
SunAhong1993 已提交
290
    if jit_type == "trace":
S
SunAhong1993 已提交
291
        model = TraceDecoder(module, input_examples)
S
SunAhong1993 已提交
292
    else:
S
SunAhong1993 已提交
293
        model = ScriptDecoder(module, input_examples)
S
SunAhong1993 已提交
294 295
    mapper = PyTorchOpMapper(model)
    mapper.paddle_graph.build()
W
WJJ1995 已提交
296
    logging.info("Model optimizing ...")
S
SunAhong1993 已提交
297
    from x2paddle.optimizer.optimizer import GraphOptimizer
S
SunAhong1993 已提交
298
    graph_opt = GraphOptimizer(source_frame="pytorch", jit_type=jit_type)
S
SunAhong1993 已提交
299
    graph_opt.optimize(mapper.paddle_graph)
W
WJJ1995 已提交
300
    logging.info("Model optimized!")
301 302
    mapper.paddle_graph.gen_model(
        save_dir, jit_type=jit_type, enable_code_optim=enable_code_optim)
W
WJJ1995 已提交
303
    logging.info("Successfully exported Paddle static graph model!")
W
wjj19950828 已提交
304
    ConverterCheck(task="PyTorch", convert_state="Success").start()
W
WJJ1995 已提交
305
    if convert_to_lite:
W
WJJ1995 已提交
306
        logging.info("Now translating model from Paddle to Paddle Lite ...")
W
wjj19950828 已提交
307
        ConverterCheck(task="PyTorch", lite_state="Start").start()
W
WJJ1995 已提交
308
        convert2lite(save_dir, lite_valid_places, lite_model_type)
W
WJJ1995 已提交
309
        logging.info("Successfully exported Paddle Lite support model!")
W
wjj19950828 已提交
310
        ConverterCheck(task="PyTorch", lite_state="Success").start()
S
SunAhong1993 已提交
311 312


313
def main():
W
WJJ1995 已提交
314
    logging.basicConfig(level=logging.INFO)
J
jiangjiajun 已提交
315
    if len(sys.argv) < 2:
W
WJJ1995 已提交
316 317 318 319
        logging.info("Use \"x2paddle -h\" to print the help information")
        logging.info(
            "For more information, please follow our github repo below:)")
        logging.info("\nGithub: https://github.com/PaddlePaddle/X2Paddle.git\n")
J
jiangjiajun 已提交
320 321
        return

322 323 324
    parser = arg_parser()
    args = parser.parse_args()

J
jiangjiajun 已提交
325
    if args.version:
J
jiangjiajun 已提交
326
        import x2paddle
W
WJJ1995 已提交
327 328
        logging.info("x2paddle-{} with python>=3.5, paddlepaddle>=1.6.0\n".
                     format(x2paddle.__version__))
J
jiangjiajun 已提交
329 330
        return

331 332
    if not args.convert_torch_project:
        assert args.framework is not None, "--framework is not defined(support tensorflow/caffe/onnx)"
S
SunAhong1993 已提交
333 334
    assert args.save_dir is not None, "--save_dir is not defined"

M
mamingjie-China 已提交
335
    try:
S
add lrn  
SunAhong1993 已提交
336 337
        import platform
        v0, v1, v2 = platform.python_version().split('.')
338
        if not (int(v0) >= 3 and int(v1) >= 5):
W
WJJ1995 已提交
339
            logging.info("[ERROR] python>=3.5 is required")
S
add lrn  
SunAhong1993 已提交
340
            return
M
mamingjie-China 已提交
341 342
        import paddle
        v0, v1, v2 = paddle.__version__.split('.')
W
WJJ1995 已提交
343
        logging.info("paddle.__version__ = {}".format(paddle.__version__))
344
        if v0 == '0' and v1 == '0' and v2 == '0':
W
WJJ1995 已提交
345 346
            logging.info(
                "[WARNING] You are use develop version of paddlepaddle")
S
SunAhong1993 已提交
347
        elif int(v0) != 2 or int(v1) < 0:
W
WJJ1995 已提交
348
            logging.info("[ERROR] paddlepaddle>=2.0.0 is required")
M
mamingjie-China 已提交
349 350
            return
    except:
W
WJJ1995 已提交
351
        logging.info(
J
jiangjiajun@baidu.com 已提交
352 353
            "[ERROR] paddlepaddle not installed, use \"pip install paddlepaddle\""
        )
354

355 356 357 358 359 360 361 362 363 364
    if args.convert_torch_project:
        assert args.project_dir is not None, "--project_dir should be defined while translating pytorch project"
        from x2paddle.project_convertor.pytorch.convert import main as convert_torch
        convert_torch(args)
    else:
        if args.framework == "tensorflow":
            assert args.model is not None, "--model should be defined while translating tensorflow model"
            define_input_shape = False
            if args.define_input_shape:
                define_input_shape = True
W
WJJ1995 已提交
365 366 367 368 369 370 371
            tf2paddle(
                args.model,
                args.save_dir,
                define_input_shape,
                convert_to_lite=args.to_lite,
                lite_valid_places=args.lite_valid_places,
                lite_model_type=args.lite_model_type)
372

373 374
        elif args.framework == "caffe":
            assert args.prototxt is not None and args.weight is not None, "--prototxt and --weight should be defined while translating caffe model"
W
WJJ1995 已提交
375 376 377 378 379 380 381 382
            caffe2paddle(
                args.prototxt,
                args.weight,
                args.save_dir,
                args.caffe_proto,
                convert_to_lite=args.to_lite,
                lite_valid_places=args.lite_valid_places,
                lite_model_type=args.lite_model_type)
383 384
        elif args.framework == "onnx":
            assert args.model is not None, "--model should be defined while translating onnx model"
W
WJJ1995 已提交
385 386 387 388 389 390
            onnx2paddle(
                args.model,
                args.save_dir,
                convert_to_lite=args.to_lite,
                lite_valid_places=args.lite_valid_places,
                lite_model_type=args.lite_model_type)
391
        elif args.framework == "paddle2onnx":
W
WJJ1995 已提交
392
            logging.info(
393 394
                "Paddle to ONNX tool has been migrated to the new github: https://github.com/PaddlePaddle/paddle2onnx"
            )
395

396 397 398
        else:
            raise Exception(
                "--framework only support tensorflow/caffe/onnx now")
399 400 401


if __name__ == "__main__":
S
SunAhong1993 已提交
402
    main()