convert.py 18.3 KB
Newer Older
S
SunAhong1993 已提交
1
# Copyright (c) 2020  PaddlePaddle Authors. All Rights Reserved.
J
jiangjiajun 已提交
2 3 4 5 6 7 8 9 10 11 12 13
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
S
SunAhong1993 已提交
14

15
from six import text_type as _text_type
S
SunAhong1993 已提交
16
from x2paddle import program
W
wjj19950828 已提交
17
from x2paddle.utils import ConverterCheck
18
import argparse
J
jiangjiajun 已提交
19
import sys
W
WJJ1995 已提交
20
import logging
W
wjj19950828 已提交
21
import time
22

J
jiangjiajun 已提交
23

24 25
def arg_parser():
    parser = argparse.ArgumentParser()
26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
    parser.add_argument(
        "--model",
        "-m",
        type=_text_type,
        default=None,
        help="define model file path for tensorflow or onnx")
    parser.add_argument(
        "--prototxt",
        "-p",
        type=_text_type,
        default=None,
        help="prototxt file of caffe model")
    parser.add_argument(
        "--weight",
        "-w",
        type=_text_type,
        default=None,
        help="weight file of caffe model")
    parser.add_argument(
        "--save_dir",
        "-s",
        type=_text_type,
        default=None,
        help="path to save translated model")
J
upgrade  
jiangjiajun 已提交
50 51 52
    parser.add_argument(
        "--framework",
        "-f",
53 54 55 56
        type=_text_type,
        default=None,
        help="define which deeplearning framework(tensorflow/caffe/onnx/paddle2onnx)"
    )
S
SunAhong1993 已提交
57 58 59 60 61
    parser.add_argument(
        "--caffe_proto",
        "-c",
        type=_text_type,
        default=None,
J
upgrade  
jiangjiajun 已提交
62 63
        help="optional: the .py file compiled by caffe proto file of caffe model"
    )
64 65 66 67 68 69 70 71 72 73 74 75
    parser.add_argument(
        "--version",
        "-v",
        action="store_true",
        default=False,
        help="get version of x2paddle")
    parser.add_argument(
        "--define_input_shape",
        "-d",
        action="store_true",
        default=False,
        help="define input shape for tf model")
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92
    parser.add_argument(
        "--convert_torch_project",
        "-tp",
        action='store_true',
        help="Convert the PyTorch Project.")
    parser.add_argument(
        "--project_dir",
        "-pd",
        type=_text_type,
        default=None,
        help="define project folder path for pytorch")
    parser.add_argument(
        "--pretrain_model",
        "-pm",
        type=_text_type,
        default=None,
        help="pretrain model file of pytorch model")
93 94 95
    parser.add_argument(
        "--enable_code_optim",
        "-co",
96
        default=False,
97
        help="Turn on code optimization")
W
wjj19950828 已提交
98 99 100 101 102
    parser.add_argument(
        "--enable_onnx_checker",
        "-oc",
        default=True,
        help="Turn on onnx model checker")
W
wjj19950828 已提交
103 104 105 106
    parser.add_argument(
        "--disable_feedback",
        "-df",
        default=False,
W
wjj19950828 已提交
107
        help="Tune off feedback of model conversion.")
W
WJJ1995 已提交
108 109 110 111 112 113 114 115 116 117 118 119 120 121
    parser.add_argument(
        "--to_lite", "-tl", default=False, help="convert to Paddle-Lite format")
    parser.add_argument(
        "--lite_valid_places",
        "-vp",
        type=_text_type,
        default="arm",
        help="Specify the executable backend of the model")
    parser.add_argument(
        "--lite_model_type",
        "-mt",
        type=_text_type,
        default="naive_buffer",
        help="The type of lite model")
122

123
    return parser
J
jiangjiajun 已提交
124

C
Channingss 已提交
125

W
WJJ1995 已提交
126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144
def convert2lite(save_dir,
                 lite_valid_places="arm",
                 lite_model_type="naive_buffer"):
    """Convert to Paddle-Lite format."""

    from paddlelite.lite import Opt
    opt = Opt()
    opt.set_model_dir(save_dir + "/inference_model")
    opt.set_valid_places(lite_valid_places)
    opt.set_model_type(lite_model_type)
    opt.set_optimize_out(save_dir + "/opt")
    opt.run()


def tf2paddle(model_path,
              save_dir,
              define_input_shape=False,
              convert_to_lite=False,
              lite_valid_places="arm",
W
wjj19950828 已提交
145 146
              lite_model_type="naive_buffer",
              disable_feedback=False):
W
wjj19950828 已提交
147 148
    # for convert_id
    time_info = int(time.time())
W
wjj19950828 已提交
149
    if not disable_feedback:
W
wjj19950828 已提交
150 151 152
        ConverterCheck(
            task="TensorFlow", time_info=time_info,
            convert_state="Start").start()
J
jiangjiajun 已提交
153 154
    # check tensorflow installation and version
    try:
155 156
        import os
        os.environ["TF_CPP_MIN_LOG_LEVEL"] = '3'
J
jiangjiajun 已提交
157 158 159
        import tensorflow as tf
        version = tf.__version__
        if version >= '2.0.0' or version < '1.0.0':
W
WJJ1995 已提交
160
            logging.info(
W
WJJ1995 已提交
161
                "[ERROR] 1.0.0<=TensorFlow<2.0.0 is required, and v1.14.0 is recommended"
J
jiangjiajun 已提交
162 163 164
            )
            return
    except:
W
WJJ1995 已提交
165
        logging.info(
W
WJJ1995 已提交
166
            "[ERROR] TensorFlow is not installed, use \"pip install TensorFlow\"."
J
jiangjiajun@baidu.com 已提交
167
        )
J
jiangjiajun 已提交
168
        return
169

J
jiangjiajun 已提交
170
    from x2paddle.decoder.tf_decoder import TFDecoder
S
SunAhong1993 已提交
171
    from x2paddle.op_mapper.tf2paddle.tf_op_mapper import TFOpMapper
172

W
WJJ1995 已提交
173
    logging.info("Now translating model from TensorFlow to Paddle.")
174
    model = TFDecoder(model_path, define_input_shape=define_input_shape)
S
SunAhong1993 已提交
175
    mapper = TFOpMapper(model)
S
SunAhong1993 已提交
176
    mapper.paddle_graph.build()
W
WJJ1995 已提交
177
    logging.info("Model optimizing ...")
S
SunAhong1993 已提交
178 179 180
    from x2paddle.optimizer.optimizer import GraphOptimizer
    graph_opt = GraphOptimizer(source_frame="tf")
    graph_opt.optimize(mapper.paddle_graph)
W
WJJ1995 已提交
181
    logging.info("Model optimized!")
S
SunAhong1993 已提交
182
    mapper.paddle_graph.gen_model(save_dir)
W
WJJ1995 已提交
183
    logging.info("Successfully exported Paddle static graph model!")
W
wjj19950828 已提交
184
    if not disable_feedback:
W
wjj19950828 已提交
185 186 187
        ConverterCheck(
            task="TensorFlow", time_info=time_info,
            convert_state="Success").start()
W
WJJ1995 已提交
188
    if convert_to_lite:
W
WJJ1995 已提交
189
        logging.info("Now translating model from Paddle to Paddle Lite ...")
W
wjj19950828 已提交
190
        if not disable_feedback:
W
wjj19950828 已提交
191 192 193
            ConverterCheck(
                task="TensorFlow", time_info=time_info,
                lite_state="Start").start()
W
WJJ1995 已提交
194
        convert2lite(save_dir, lite_valid_places, lite_model_type)
W
WJJ1995 已提交
195
        logging.info("Successfully exported Paddle Lite support model!")
W
wjj19950828 已提交
196
        if not disable_feedback:
W
wjj19950828 已提交
197 198 199
            ConverterCheck(
                task="TensorFlow", time_info=time_info,
                lite_state="Success").start()
200 201 202 203
    # for convert survey
    logging.info("================================================")
    logging.info("")
    logging.info(
W
WJJ1995 已提交
204
        "Model Converted! Fill this survey to help X2Paddle better, https://iwenjuan.baidu.com/?code=npyd51 "
205 206 207
    )
    logging.info("")
    logging.info("================================================")
208 209


W
WJJ1995 已提交
210 211 212 213 214 215
def caffe2paddle(proto_file,
                 weight_file,
                 save_dir,
                 caffe_proto,
                 convert_to_lite=False,
                 lite_valid_places="arm",
W
wjj19950828 已提交
216 217
                 lite_model_type="naive_buffer",
                 disable_feedback=False):
W
wjj19950828 已提交
218 219
    # for convert_id
    time_info = int(time.time())
W
wjj19950828 已提交
220
    if not disable_feedback:
W
wjj19950828 已提交
221 222
        ConverterCheck(
            task="Caffe", time_info=time_info, convert_state="Start").start()
J
jiangjiajun 已提交
223
    from x2paddle.decoder.caffe_decoder import CaffeDecoder
S
SunAhong1993 已提交
224
    from x2paddle.op_mapper.caffe2paddle.caffe_op_mapper import CaffeOpMapper
S
SunAhong1993 已提交
225
    import google.protobuf as gpb
S
SunAhong1993 已提交
226 227 228
    ver_part = gpb.__version__.split('.')
    version_satisfy = False
    if (int(ver_part[0]) == 3 and int(ver_part[1]) >= 6) \
W
WJJ1995 已提交
229
            or (int(ver_part[0]) > 3):
S
SunAhong1993 已提交
230
        version_satisfy = True
J
jiangjiajun@baidu.com 已提交
231
    assert version_satisfy, '[ERROR] google.protobuf >= 3.6.0 is required'
W
WJJ1995 已提交
232
    logging.info("Now translating model from caffe to paddle.")
W
WJJ1995 已提交
233
    model = CaffeDecoder(proto_file, weight_file, caffe_proto)
J
jiangjiajun 已提交
234
    mapper = CaffeOpMapper(model)
S
SunAhong1993 已提交
235
    mapper.paddle_graph.build()
W
WJJ1995 已提交
236
    logging.info("Model optimizing ...")
S
SunAhong1993 已提交
237
    from x2paddle.optimizer.optimizer import GraphOptimizer
S
SunAhong1993 已提交
238
    graph_opt = GraphOptimizer(source_frame="caffe")
S
SunAhong1993 已提交
239
    graph_opt.optimize(mapper.paddle_graph)
W
WJJ1995 已提交
240
    logging.info("Model optimized!")
S
SunAhong1993 已提交
241
    mapper.paddle_graph.gen_model(save_dir)
W
WJJ1995 已提交
242
    logging.info("Successfully exported Paddle static graph model!")
W
wjj19950828 已提交
243
    if not disable_feedback:
W
wjj19950828 已提交
244 245
        ConverterCheck(
            task="Caffe", time_info=time_info, convert_state="Success").start()
W
WJJ1995 已提交
246
    if convert_to_lite:
W
WJJ1995 已提交
247
        logging.info("Now translating model from Paddle to Paddle Lite ...")
W
wjj19950828 已提交
248
        if not disable_feedback:
W
wjj19950828 已提交
249 250
            ConverterCheck(
                task="Caffe", time_info=time_info, lite_state="Start").start()
W
WJJ1995 已提交
251
        convert2lite(save_dir, lite_valid_places, lite_model_type)
W
WJJ1995 已提交
252
        logging.info("Successfully exported Paddle Lite support model!")
W
wjj19950828 已提交
253
        if not disable_feedback:
W
wjj19950828 已提交
254 255
            ConverterCheck(
                task="Caffe", time_info=time_info, lite_state="Success").start()
256 257 258 259
    # for convert survey
    logging.info("================================================")
    logging.info("")
    logging.info(
W
WJJ1995 已提交
260
        "Model Converted! Fill this survey to help X2Paddle better, https://iwenjuan.baidu.com/?code=npyd51 "
261 262 263
    )
    logging.info("")
    logging.info("================================================")
264 265


W
WJJ1995 已提交
266 267 268 269
def onnx2paddle(model_path,
                save_dir,
                convert_to_lite=False,
                lite_valid_places="arm",
W
wjj19950828 已提交
270
                lite_model_type="naive_buffer",
W
wjj19950828 已提交
271 272
                disable_feedback=False,
                enable_onnx_checker=True):
W
wjj19950828 已提交
273 274
    # for convert_id
    time_info = int(time.time())
W
wjj19950828 已提交
275
    if not disable_feedback:
W
wjj19950828 已提交
276 277
        ConverterCheck(
            task="ONNX", time_info=time_info, convert_state="Start").start()
C
update  
channingss 已提交
278 279 280 281
    # check onnx installation and version
    try:
        import onnx
        version = onnx.version.version
W
WJJ1995 已提交
282 283 284
        v0, v1, v2 = version.split('.')
        version_sum = int(v0) * 100 + int(v1) * 10 + int(v2)
        if version_sum < 160:
W
WJJ1995 已提交
285
            logging.info("[ERROR] onnx>=1.6.0 is required")
C
update  
channingss 已提交
286 287
            return
    except:
W
WJJ1995 已提交
288 289
        logging.info(
            "[ERROR] onnx is not installed, use \"pip install onnx==1.6.0\".")
C
update  
channingss 已提交
290
        return
W
WJJ1995 已提交
291
    logging.info("Now translating model from onnx to paddle.")
C
update  
channingss 已提交
292 293

    from x2paddle.decoder.onnx_decoder import ONNXDecoder
S
SunAhong1993 已提交
294
    from x2paddle.op_mapper.onnx2paddle.onnx_op_mapper import ONNXOpMapper
W
wjj19950828 已提交
295
    model = ONNXDecoder(model_path, enable_onnx_checker)
C
Channingss 已提交
296
    mapper = ONNXOpMapper(model)
S
SunAhong1993 已提交
297
    mapper.paddle_graph.build()
W
wjj19950828 已提交
298 299 300 301 302
    logging.info("Model optimizing ...")
    from x2paddle.optimizer.optimizer import GraphOptimizer
    graph_opt = GraphOptimizer(source_frame="onnx")
    graph_opt.optimize(mapper.paddle_graph)
    logging.info("Model optimized.")
S
SunAhong1993 已提交
303
    mapper.paddle_graph.gen_model(save_dir)
W
WJJ1995 已提交
304
    logging.info("Successfully exported Paddle static graph model!")
W
wjj19950828 已提交
305
    if not disable_feedback:
W
wjj19950828 已提交
306 307
        ConverterCheck(
            task="ONNX", time_info=time_info, convert_state="Success").start()
W
WJJ1995 已提交
308
    if convert_to_lite:
W
WJJ1995 已提交
309
        logging.info("Now translating model from Paddle to Paddle Lite ...")
W
wjj19950828 已提交
310
        if not disable_feedback:
W
wjj19950828 已提交
311 312
            ConverterCheck(
                task="ONNX", time_info=time_info, lite_state="Start").start()
W
WJJ1995 已提交
313
        convert2lite(save_dir, lite_valid_places, lite_model_type)
W
WJJ1995 已提交
314
        logging.info("Successfully exported Paddle Lite support model!")
W
wjj19950828 已提交
315
        if not disable_feedback:
W
wjj19950828 已提交
316 317
            ConverterCheck(
                task="ONNX", time_info=time_info, lite_state="Success").start()
318 319 320 321
    # for convert survey
    logging.info("================================================")
    logging.info("")
    logging.info(
W
WJJ1995 已提交
322
        "Model Converted! Fill this survey to help X2Paddle better, https://iwenjuan.baidu.com/?code=npyd51 "
323 324 325
    )
    logging.info("")
    logging.info("================================================")
C
Channingss 已提交
326 327


W
WJJ1995 已提交
328 329 330 331
def pytorch2paddle(module,
                   save_dir,
                   jit_type="trace",
                   input_examples=None,
332
                   enable_code_optim=False,
W
WJJ1995 已提交
333 334
                   convert_to_lite=False,
                   lite_valid_places="arm",
W
wjj19950828 已提交
335 336
                   lite_model_type="naive_buffer",
                   disable_feedback=False):
W
wjj19950828 已提交
337 338
    # for convert_id
    time_info = int(time.time())
W
wjj19950828 已提交
339
    if not disable_feedback:
W
wjj19950828 已提交
340 341
        ConverterCheck(
            task="PyTorch", time_info=time_info, convert_state="Start").start()
S
SunAhong1993 已提交
342 343 344 345
    # check pytorch installation and version
    try:
        import torch
        version = torch.__version__
W
WJJ1995 已提交
346 347 348 349 350 351 352
        v0, v1, v2 = version.split('.')
        # Avoid the situation where the version is equal to 1.7.0+cu101
        if '+' in v2:
            v2 = v2.split('+')[0]
        version_sum = int(v0) * 100 + int(v1) * 10 + int(v2)
        if version_sum < 150:
            logging.info(
W
WJJ1995 已提交
353
                "[ERROR] PyTorch>=1.5.0 is required, 1.6.0 is the most recommended"
W
WJJ1995 已提交
354
            )
S
SunAhong1993 已提交
355
            return
W
WJJ1995 已提交
356
        if version_sum > 160:
W
WJJ1995 已提交
357
            logging.info("[WARNING] PyTorch==1.6.0 is recommended")
S
SunAhong1993 已提交
358
    except:
W
WJJ1995 已提交
359
        logging.info(
W
WJJ1995 已提交
360
            "[ERROR] PyTorch is not installed, use \"pip install torch==1.6.0 torchvision\"."
S
SunAhong1993 已提交
361 362
        )
        return
W
WJJ1995 已提交
363
    logging.info("Now translating model from PyTorch to Paddle.")
364

S
SunAhong1993 已提交
365
    from x2paddle.decoder.pytorch_decoder import ScriptDecoder, TraceDecoder
S
SunAhong1993 已提交
366
    from x2paddle.op_mapper.pytorch2paddle.pytorch_op_mapper import PyTorchOpMapper
S
SunAhong1993 已提交
367

S
SunAhong1993 已提交
368
    if jit_type == "trace":
S
SunAhong1993 已提交
369
        model = TraceDecoder(module, input_examples)
S
SunAhong1993 已提交
370
    else:
S
SunAhong1993 已提交
371
        model = ScriptDecoder(module, input_examples)
S
SunAhong1993 已提交
372 373
    mapper = PyTorchOpMapper(model)
    mapper.paddle_graph.build()
W
WJJ1995 已提交
374
    logging.info("Model optimizing ...")
S
SunAhong1993 已提交
375
    from x2paddle.optimizer.optimizer import GraphOptimizer
S
SunAhong1993 已提交
376
    graph_opt = GraphOptimizer(source_frame="pytorch", jit_type=jit_type)
S
SunAhong1993 已提交
377
    graph_opt.optimize(mapper.paddle_graph)
W
WJJ1995 已提交
378
    logging.info("Model optimized!")
379 380
    mapper.paddle_graph.gen_model(
        save_dir, jit_type=jit_type, enable_code_optim=enable_code_optim)
W
WJJ1995 已提交
381
    logging.info("Successfully exported Paddle static graph model!")
W
wjj19950828 已提交
382
    if not disable_feedback:
W
wjj19950828 已提交
383 384 385
        ConverterCheck(
            task="PyTorch", time_info=time_info,
            convert_state="Success").start()
W
WJJ1995 已提交
386
    if convert_to_lite:
W
WJJ1995 已提交
387
        logging.info("Now translating model from Paddle to Paddle Lite ...")
W
wjj19950828 已提交
388
        if not disable_feedback:
W
wjj19950828 已提交
389 390
            ConverterCheck(
                task="PyTorch", time_info=time_info, lite_state="Start").start()
W
WJJ1995 已提交
391
        convert2lite(save_dir, lite_valid_places, lite_model_type)
W
WJJ1995 已提交
392
        logging.info("Successfully exported Paddle Lite support model!")
W
wjj19950828 已提交
393
        if not disable_feedback:
W
wjj19950828 已提交
394 395 396
            ConverterCheck(
                task="PyTorch", time_info=time_info,
                lite_state="Success").start()
397 398 399 400
    # for convert survey
    logging.info("================================================")
    logging.info("")
    logging.info(
W
WJJ1995 已提交
401
        "Model Converted! Fill this survey to help X2Paddle better, https://iwenjuan.baidu.com/?code=npyd51 "
402 403 404
    )
    logging.info("")
    logging.info("================================================")
S
SunAhong1993 已提交
405 406


407
def main():
W
WJJ1995 已提交
408
    logging.basicConfig(level=logging.INFO)
J
jiangjiajun 已提交
409
    if len(sys.argv) < 2:
W
WJJ1995 已提交
410 411 412 413
        logging.info("Use \"x2paddle -h\" to print the help information")
        logging.info(
            "For more information, please follow our github repo below:)")
        logging.info("\nGithub: https://github.com/PaddlePaddle/X2Paddle.git\n")
J
jiangjiajun 已提交
414 415
        return

416 417 418
    parser = arg_parser()
    args = parser.parse_args()

J
jiangjiajun 已提交
419
    if args.version:
J
jiangjiajun 已提交
420
        import x2paddle
W
WJJ1995 已提交
421 422
        logging.info("x2paddle-{} with python>=3.5, paddlepaddle>=1.6.0\n".
                     format(x2paddle.__version__))
J
jiangjiajun 已提交
423 424
        return

425 426
    if not args.convert_torch_project:
        assert args.framework is not None, "--framework is not defined(support tensorflow/caffe/onnx)"
S
SunAhong1993 已提交
427 428
    assert args.save_dir is not None, "--save_dir is not defined"

M
mamingjie-China 已提交
429
    try:
S
add lrn  
SunAhong1993 已提交
430 431
        import platform
        v0, v1, v2 = platform.python_version().split('.')
432
        if not (int(v0) >= 3 and int(v1) >= 5):
W
WJJ1995 已提交
433
            logging.info("[ERROR] python>=3.5 is required")
S
add lrn  
SunAhong1993 已提交
434
            return
M
mamingjie-China 已提交
435 436
        import paddle
        v0, v1, v2 = paddle.__version__.split('.')
W
WJJ1995 已提交
437
        logging.info("paddle.__version__ = {}".format(paddle.__version__))
438
        if v0 == '0' and v1 == '0' and v2 == '0':
W
WJJ1995 已提交
439 440
            logging.info(
                "[WARNING] You are use develop version of paddlepaddle")
S
SunAhong1993 已提交
441
        elif int(v0) != 2 or int(v1) < 0:
W
WJJ1995 已提交
442
            logging.info("[ERROR] paddlepaddle>=2.0.0 is required")
M
mamingjie-China 已提交
443 444
            return
    except:
W
WJJ1995 已提交
445
        logging.info(
J
jiangjiajun@baidu.com 已提交
446 447
            "[ERROR] paddlepaddle not installed, use \"pip install paddlepaddle\""
        )
448

449 450 451 452 453 454 455 456 457 458
    if args.convert_torch_project:
        assert args.project_dir is not None, "--project_dir should be defined while translating pytorch project"
        from x2paddle.project_convertor.pytorch.convert import main as convert_torch
        convert_torch(args)
    else:
        if args.framework == "tensorflow":
            assert args.model is not None, "--model should be defined while translating tensorflow model"
            define_input_shape = False
            if args.define_input_shape:
                define_input_shape = True
W
WJJ1995 已提交
459 460 461 462 463 464
            tf2paddle(
                args.model,
                args.save_dir,
                define_input_shape,
                convert_to_lite=args.to_lite,
                lite_valid_places=args.lite_valid_places,
W
wjj19950828 已提交
465 466
                lite_model_type=args.lite_model_type,
                disable_feedback=args.disable_feedback)
467

468 469
        elif args.framework == "caffe":
            assert args.prototxt is not None and args.weight is not None, "--prototxt and --weight should be defined while translating caffe model"
W
WJJ1995 已提交
470 471 472 473 474 475 476
            caffe2paddle(
                args.prototxt,
                args.weight,
                args.save_dir,
                args.caffe_proto,
                convert_to_lite=args.to_lite,
                lite_valid_places=args.lite_valid_places,
W
wjj19950828 已提交
477 478
                lite_model_type=args.lite_model_type,
                disable_feedback=args.disable_feedback)
479 480
        elif args.framework == "onnx":
            assert args.model is not None, "--model should be defined while translating onnx model"
W
WJJ1995 已提交
481 482 483 484 485
            onnx2paddle(
                args.model,
                args.save_dir,
                convert_to_lite=args.to_lite,
                lite_valid_places=args.lite_valid_places,
W
wjj19950828 已提交
486
                lite_model_type=args.lite_model_type,
W
wjj19950828 已提交
487 488
                disable_feedback=args.disable_feedback,
                enable_onnx_checker=args.enable_onnx_checker)
489
        elif args.framework == "paddle2onnx":
W
WJJ1995 已提交
490
            logging.info(
491 492
                "Paddle to ONNX tool has been migrated to the new github: https://github.com/PaddlePaddle/paddle2onnx"
            )
493

494 495 496
        else:
            raise Exception(
                "--framework only support tensorflow/caffe/onnx now")
497 498 499


if __name__ == "__main__":
S
SunAhong1993 已提交
500
    main()