From 950b738281db6411a88939dab03054d5824e1844 Mon Sep 17 00:00:00 2001 From: huzhiqiang <912790387@qq.com> Date: Wed, 13 May 2020 15:20:44 +0800 Subject: [PATCH] [Opt][Python][Framework] Add opt scripts for python installing package (#3612) --- lite/api/opt_base.cc | 31 +++++++++- lite/api/opt_base.h | 5 +- lite/api/python/bin/paddle_lite_opt | 91 +++++++++++++++++++++++++++++ lite/api/python/pybind/pybind.cc | 6 +- lite/api/python/setup.py.in | 7 ++- lite/api/python/setup_mac.py.in | 5 +- 6 files changed, 138 insertions(+), 7 deletions(-) create mode 100644 lite/api/python/bin/paddle_lite_opt diff --git a/lite/api/opt_base.cc b/lite/api/opt_base.cc index 5af001961a..712d1f7ebb 100644 --- a/lite/api/opt_base.cc +++ b/lite/api/opt_base.cc @@ -82,7 +82,7 @@ void OptBase::SetValidPlaces(const std::string& valid_places) { "command argument 'valid_targets'"; } -void OptBase::SetLiteOut(const std::string& lite_out_name) { +void OptBase::SetOptimizeOut(const std::string& lite_out_name) { lite_out_name_ = lite_out_name; } @@ -116,7 +116,7 @@ void OptBase::RunOptimize(const std::string& model_dir_path, SetModelFile(model_path); SetParamFile(param_path); SetValidPlaces(valid_places); - SetLiteOut(optimized_out_path); + SetOptimizeOut(optimized_out_path); CheckIfModelSupported(false); OpKernelInfoCollector::Global().SetKernel2path(kernel2path_map); opt_config_.set_valid_places(valid_places_); @@ -248,6 +248,33 @@ void OptBase::PrintHelpInfo() { "-----------------------------------------------------------\n"; std::cout << "opt version:" << opt_version << std::endl << help_info; } + +void OptBase::PrintExecutableBinHelpInfo() { + const std::string opt_version = lite::version(); + const char help_info[] = + "At least one argument should be inputed. Valid arguments are listed " + "below:\n" + " Arguments of model optimization:\n" + " `--model_dir=`\n" + " `--model_file=`\n" + " `--param_file=`\n" + " `--optimize_out_type=(protobuf|naive_buffer)`\n" + " `--optimize_out=`\n" + " `--valid_targets=(arm|opencl|x86|npu|xpu)`\n" + " `--record_tailoring_info=(true|false)`\n" + " Arguments of model checking and ops information:\n" + " `--print_all_ops=true` Display all the valid operators of " + "Paddle-Lite\n" + " `--print_supported_ops=true " + "--valid_targets=(arm|opencl|x86|npu|xpu)`" + " Display valid operators of input targets\n" + " `--print_model_ops=true --model_dir= " + "--valid_targets=(arm|opencl|x86|npu|xpu)`" + " Display operators in the input model\n"; + std::cout << "paddlelite opt version:" << opt_version << std::endl + << help_info << std::endl; +} + // 2. Print supported info of inputed ops void OptBase::PrintOpsInfo(const std::set& valid_ops) { std::vector lite_supported_targets = {"kHost", diff --git a/lite/api/opt_base.h b/lite/api/opt_base.h index 3c0051375d..d276f7a463 100644 --- a/lite/api/opt_base.h +++ b/lite/api/opt_base.h @@ -48,7 +48,7 @@ class LITE_API OptBase { void SetModelFile(const std::string &model_path); void SetParamFile(const std::string ¶m_path); void SetValidPlaces(const std::string &valid_places); - void SetLiteOut(const std::string &lite_out_name); + void SetOptimizeOut(const std::string &lite_out_name); void RecordModelInfo(bool record_strip_info = true); // set optimized_model type void SetModelType(std::string model_type); @@ -61,7 +61,10 @@ class LITE_API OptBase { const std::string &optimized_out_path = ""); // fuctions of printing info // 1. help info + // 1.1 Print help info for opt python api void PrintHelpInfo(); + // 1.2 Print help info for executable opt bin + void PrintExecutableBinHelpInfo(); // 2. PrintOpsInfo void PrintOpsInfo(const std::set &valid_ops = {}); // print supported ops on target_types diff --git a/lite/api/python/bin/paddle_lite_opt b/lite/api/python/bin/paddle_lite_opt new file mode 100644 index 0000000000..0d506df370 --- /dev/null +++ b/lite/api/python/bin/paddle_lite_opt @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# Copyright @ 2020 Baidu. All rights reserved. +""" python wrapper file for Paddle-Lite opt tool """ +from __future__ import print_function +import paddlelite.lite as lite +import argparse + + +def main(): + """ main funcion """ + a=lite.Opt() + parser = argparse.ArgumentParser() + parser.add_argument("--model_dir", type=str, required=False,\ + help="path of the model. This option will be ignored if model_file and param_file exist") + parser.add_argument("--model_file", type=str, required=False,\ + help="model file path of the combined-param model.") + parser.add_argument("--param_file", type=str, required=False,\ + help="param file path of the combined-param model.") + parser.add_argument("--optimize_out_type", type=str, required=False,default="naive_buffer",\ + choices=['protobuf', 'naive_buffer'], \ + help="store type of the output optimized model. protobuf/naive_buffer.") + parser.add_argument("--optimize_out", type=str, required=False,\ + help="path of the output optimized model") + parser.add_argument("--valid_targets", type=str, required=False,default="arm",\ + help="The targets this model optimized for, should be one of (arm,opencl, x86), splitted by space.") + + # arguments of help information + parser.add_argument("--print_supported_ops", type=str, default="false",\ + help="{true, false}\ + Print supported operators on the inputed target") + parser.add_argument("--print_all_ops", type=str, default="false",\ + help="{true, false}\ + Print all the valid operators of Paddle-Lite") + parser.add_argument("--print_model_ops", type=str, default="false",\ + help="{true, false}\ + Print operators in the input model") + parser.add_argument("--display_kernels", type=str, default="false",\ + help="{true, false}\ + Display kernel information") + + # arguments of strip lib according to input model + parser.add_argument("--record_tailoring_info", type=str, default="false",\ + help="{true, false}\ + Record kernels and operators information of the optimized model \ + for tailoring compiling, information are stored into optimized \ + model path as hidden files") + parser.add_argument("--model_set", type=str, required=False,\ + help="path of the models set. This option will be used to specific \ + tailoring") + + args = parser.parse_args() + """ input opt params """ + if args.model_dir is not None: + a.set_model_dir(args.model_dir) + if args.model_set is not None: + a.set_modelset_dir(args.model_set) + if args.model_file is not None: + a.set_model_file(args.model_file) + if args.param_file is not None: + a.set_param_file(args.param_file) + if args.optimize_out_type is not None: + a.set_model_type(args.optimize_out_type) + if args.optimize_out is not None: + a.set_optimize_out(args.optimize_out) + if args.valid_targets is not None: + a.set_valid_places(args.valid_targets) + if args.param_file is not None: + a.set_param_file(args.param_file) + if args.record_tailoring_info == "true": + a.record_model_info(True) + """ print ops info """ + if args.print_all_ops == "true": + a.print_all_ops() + return 0 + if args.print_supported_ops == "true": + a.print_supported_ops() + return 0 + if args.display_kernels == "true": + a.display_kernels_info() + return 0 + if args.print_model_ops == "true": + a.check_if_model_supported(True); + return 0 + if ((args.model_dir is None) and (args.model_file is None or args.param_file is None) and (args.model_set is None)) or (args.optimize_out is None): + a.executablebin_help() + return 1 + else: + a.run() + return 0 +if __name__ == "__main__": + main() diff --git a/lite/api/python/pybind/pybind.cc b/lite/api/python/pybind/pybind.cc index 104275e2e9..2aa265ecb6 100644 --- a/lite/api/python/pybind/pybind.cc +++ b/lite/api/python/pybind/pybind.cc @@ -62,15 +62,17 @@ void BindLiteOpt(py::module *m) { .def("set_model_file", &OptBase::SetModelFile) .def("set_param_file", &OptBase::SetParamFile) .def("set_valid_places", &OptBase::SetValidPlaces) - .def("set_lite_out", &OptBase::SetLiteOut) + .def("set_optimize_out", &OptBase::SetOptimizeOut) .def("set_model_type", &OptBase::SetModelType) .def("record_model_info", &OptBase::RecordModelInfo) .def("run", &OptBase::Run) .def("run_optimize", &OptBase::RunOptimize) .def("help", &OptBase::PrintHelpInfo) + .def("executablebin_help", &OptBase::PrintExecutableBinHelpInfo) .def("print_supported_ops", &OptBase::PrintSupportedOps) .def("display_kernels_info", &OptBase::DisplayKernelsInfo) - .def("print_all_ops", &OptBase::PrintAllOps); + .def("print_all_ops", &OptBase::PrintAllOps) + .def("check_if_model_supported", &OptBase::CheckIfModelSupported); } #endif static void BindLiteLightPredictor(py::module *m); diff --git a/lite/api/python/setup.py.in b/lite/api/python/setup.py.in index 596369f299..cf89a72332 100644 --- a/lite/api/python/setup.py.in +++ b/lite/api/python/setup.py.in @@ -41,6 +41,10 @@ for file in files: break LITE_PATH = INFERENCE_LITE_LIB_PATH + '/python/install/lite' PACKAGE_DATA = {'paddlelite': ['lite.so' if os.name!='nt' else 'lite.pyd']} + +# copy scripts of paddlelite +shutil.copy('${PADDLE_SOURCE_DIR}/lite/api/python/bin/paddle_lite_opt', LITE_PATH) + # put all thirdparty libraries in paddlelite.libs PACKAGE_DATA['paddlelite.libs'] = [] LIB_PATH = INFERENCE_LITE_LIB_PATH + '/python/install/libs/' @@ -55,7 +59,7 @@ if '${WITH_MKL}' == 'ON': PACKAGE_DATA['paddlelite.libs'] += ['msvcr120.dll'] # link lite.so to paddlelite.libs if os.name != 'nt': - COMMAND = "patchelf --set-rpath '$ORIGIN/../libs/' " + LITE_PATH + "/lite.so" + COMMAND = "patchelf --set-rpath '$ORIGIN/libs/' " + LITE_PATH + "/lite.so" if os.system(COMMAND) != 0: raise Exception("patch third_party libs failed, command: %s" % COMMAND) @@ -85,6 +89,7 @@ setup( name='paddlelite', version=PADDLELITE_VERSION, description='Paddle-Lite Library', + scripts=['lite/paddle_lite_opt'], packages=['paddlelite', 'paddlelite.libs'], package_dir=PACKAGE_DIR, package_data=PACKAGE_DATA, diff --git a/lite/api/python/setup_mac.py.in b/lite/api/python/setup_mac.py.in index c8dfe2cc5c..b4d53e8400 100644 --- a/lite/api/python/setup_mac.py.in +++ b/lite/api/python/setup_mac.py.in @@ -35,6 +35,8 @@ else: # core lib of paddlelite is stored as lite.so LITE_PATH = '${PADDLE_BINARY_DIR}/inference_lite_lib/python/install/lite' PACKAGE_DATA = {'paddlelite': ['lite.so']} +# copy scripts of paddlelite +shutil.copy('${PADDLE_SOURCE_DIR}/lite/api/python/bin/paddle_lite_opt', LITE_PATH) # put all thirdparty libraries in paddlelite.libs PACKAGE_DATA['paddlelite.libs'] = [] LIB_PATH = '${PADDLE_BINARY_DIR}/inference_lite_lib/python/install/libs' @@ -45,7 +47,7 @@ if '${WITH_MKL}' == 'ON': PACKAGE_DATA['paddlelite.libs'] += ['libmklml.dylib', 'libiomp5.dylib'] # link lite.so to paddlelite.libs -COMMAND = "install_name_tool -id \"@loader_path/../libs/\" ${PADDLE_BINARY_DIR}\ +COMMAND = "install_name_tool -id \"@loader_path/libs/\" ${PADDLE_BINARY_DIR}\ /inference_lite_lib/python/install/lite/lite.so" if os.system(COMMAND) != 0: raise Exception("patch third_party libs failed, command: %s" % COMMAND) @@ -66,6 +68,7 @@ setup( name='paddlelite', version=PADDLELITE_VERSION, description='Paddle-Lite Library', + scripts=['lite/paddle_lite_opt'], packages=['paddlelite', 'paddlelite.libs'], package_dir=PACKAGE_DIR, package_data=PACKAGE_DATA, -- GitLab