提交 950b7382 编写于 作者: H huzhiqiang 提交者: GitHub

[Opt][Python][Framework] Add opt scripts for python installing package (#3612)

上级 2db5023d
......@@ -82,7 +82,7 @@ void OptBase::SetValidPlaces(const std::string& valid_places) {
"command argument 'valid_targets'";
}
void OptBase::SetLiteOut(const std::string& lite_out_name) {
void OptBase::SetOptimizeOut(const std::string& lite_out_name) {
lite_out_name_ = lite_out_name;
}
......@@ -116,7 +116,7 @@ void OptBase::RunOptimize(const std::string& model_dir_path,
SetModelFile(model_path);
SetParamFile(param_path);
SetValidPlaces(valid_places);
SetLiteOut(optimized_out_path);
SetOptimizeOut(optimized_out_path);
CheckIfModelSupported(false);
OpKernelInfoCollector::Global().SetKernel2path(kernel2path_map);
opt_config_.set_valid_places(valid_places_);
......@@ -248,6 +248,33 @@ void OptBase::PrintHelpInfo() {
"-----------------------------------------------------------\n";
std::cout << "opt version:" << opt_version << std::endl << help_info;
}
void OptBase::PrintExecutableBinHelpInfo() {
const std::string opt_version = lite::version();
const char help_info[] =
"At least one argument should be inputed. Valid arguments are listed "
"below:\n"
" Arguments of model optimization:\n"
" `--model_dir=<model_param_dir>`\n"
" `--model_file=<model_path>`\n"
" `--param_file=<param_path>`\n"
" `--optimize_out_type=(protobuf|naive_buffer)`\n"
" `--optimize_out=<output_optimize_model_dir>`\n"
" `--valid_targets=(arm|opencl|x86|npu|xpu)`\n"
" `--record_tailoring_info=(true|false)`\n"
" Arguments of model checking and ops information:\n"
" `--print_all_ops=true` Display all the valid operators of "
"Paddle-Lite\n"
" `--print_supported_ops=true "
"--valid_targets=(arm|opencl|x86|npu|xpu)`"
" Display valid operators of input targets\n"
" `--print_model_ops=true --model_dir=<model_param_dir> "
"--valid_targets=(arm|opencl|x86|npu|xpu)`"
" Display operators in the input model\n";
std::cout << "paddlelite opt version:" << opt_version << std::endl
<< help_info << std::endl;
}
// 2. Print supported info of inputed ops
void OptBase::PrintOpsInfo(const std::set<std::string>& valid_ops) {
std::vector<std::string> lite_supported_targets = {"kHost",
......
......@@ -48,7 +48,7 @@ class LITE_API OptBase {
void SetModelFile(const std::string &model_path);
void SetParamFile(const std::string &param_path);
void SetValidPlaces(const std::string &valid_places);
void SetLiteOut(const std::string &lite_out_name);
void SetOptimizeOut(const std::string &lite_out_name);
void RecordModelInfo(bool record_strip_info = true);
// set optimized_model type
void SetModelType(std::string model_type);
......@@ -61,7 +61,10 @@ class LITE_API OptBase {
const std::string &optimized_out_path = "");
// fuctions of printing info
// 1. help info
// 1.1 Print help info for opt python api
void PrintHelpInfo();
// 1.2 Print help info for executable opt bin
void PrintExecutableBinHelpInfo();
// 2. PrintOpsInfo
void PrintOpsInfo(const std::set<std::string> &valid_ops =
{}); // print supported ops on target_types
......
#!/usr/bin/env python
# Copyright @ 2020 Baidu. All rights reserved.
""" python wrapper file for Paddle-Lite opt tool """
from __future__ import print_function
import paddlelite.lite as lite
import argparse
def main():
""" main funcion """
a=lite.Opt()
parser = argparse.ArgumentParser()
parser.add_argument("--model_dir", type=str, required=False,\
help="path of the model. This option will be ignored if model_file and param_file exist")
parser.add_argument("--model_file", type=str, required=False,\
help="model file path of the combined-param model.")
parser.add_argument("--param_file", type=str, required=False,\
help="param file path of the combined-param model.")
parser.add_argument("--optimize_out_type", type=str, required=False,default="naive_buffer",\
choices=['protobuf', 'naive_buffer'], \
help="store type of the output optimized model. protobuf/naive_buffer.")
parser.add_argument("--optimize_out", type=str, required=False,\
help="path of the output optimized model")
parser.add_argument("--valid_targets", type=str, required=False,default="arm",\
help="The targets this model optimized for, should be one of (arm,opencl, x86), splitted by space.")
# arguments of help information
parser.add_argument("--print_supported_ops", type=str, default="false",\
help="{true, false}\
Print supported operators on the inputed target")
parser.add_argument("--print_all_ops", type=str, default="false",\
help="{true, false}\
Print all the valid operators of Paddle-Lite")
parser.add_argument("--print_model_ops", type=str, default="false",\
help="{true, false}\
Print operators in the input model")
parser.add_argument("--display_kernels", type=str, default="false",\
help="{true, false}\
Display kernel information")
# arguments of strip lib according to input model
parser.add_argument("--record_tailoring_info", type=str, default="false",\
help="{true, false}\
Record kernels and operators information of the optimized model \
for tailoring compiling, information are stored into optimized \
model path as hidden files")
parser.add_argument("--model_set", type=str, required=False,\
help="path of the models set. This option will be used to specific \
tailoring")
args = parser.parse_args()
""" input opt params """
if args.model_dir is not None:
a.set_model_dir(args.model_dir)
if args.model_set is not None:
a.set_modelset_dir(args.model_set)
if args.model_file is not None:
a.set_model_file(args.model_file)
if args.param_file is not None:
a.set_param_file(args.param_file)
if args.optimize_out_type is not None:
a.set_model_type(args.optimize_out_type)
if args.optimize_out is not None:
a.set_optimize_out(args.optimize_out)
if args.valid_targets is not None:
a.set_valid_places(args.valid_targets)
if args.param_file is not None:
a.set_param_file(args.param_file)
if args.record_tailoring_info == "true":
a.record_model_info(True)
""" print ops info """
if args.print_all_ops == "true":
a.print_all_ops()
return 0
if args.print_supported_ops == "true":
a.print_supported_ops()
return 0
if args.display_kernels == "true":
a.display_kernels_info()
return 0
if args.print_model_ops == "true":
a.check_if_model_supported(True);
return 0
if ((args.model_dir is None) and (args.model_file is None or args.param_file is None) and (args.model_set is None)) or (args.optimize_out is None):
a.executablebin_help()
return 1
else:
a.run()
return 0
if __name__ == "__main__":
main()
......@@ -62,15 +62,17 @@ void BindLiteOpt(py::module *m) {
.def("set_model_file", &OptBase::SetModelFile)
.def("set_param_file", &OptBase::SetParamFile)
.def("set_valid_places", &OptBase::SetValidPlaces)
.def("set_lite_out", &OptBase::SetLiteOut)
.def("set_optimize_out", &OptBase::SetOptimizeOut)
.def("set_model_type", &OptBase::SetModelType)
.def("record_model_info", &OptBase::RecordModelInfo)
.def("run", &OptBase::Run)
.def("run_optimize", &OptBase::RunOptimize)
.def("help", &OptBase::PrintHelpInfo)
.def("executablebin_help", &OptBase::PrintExecutableBinHelpInfo)
.def("print_supported_ops", &OptBase::PrintSupportedOps)
.def("display_kernels_info", &OptBase::DisplayKernelsInfo)
.def("print_all_ops", &OptBase::PrintAllOps);
.def("print_all_ops", &OptBase::PrintAllOps)
.def("check_if_model_supported", &OptBase::CheckIfModelSupported);
}
#endif
static void BindLiteLightPredictor(py::module *m);
......
......@@ -41,6 +41,10 @@ for file in files:
break
LITE_PATH = INFERENCE_LITE_LIB_PATH + '/python/install/lite'
PACKAGE_DATA = {'paddlelite': ['lite.so' if os.name!='nt' else 'lite.pyd']}
# copy scripts of paddlelite
shutil.copy('${PADDLE_SOURCE_DIR}/lite/api/python/bin/paddle_lite_opt', LITE_PATH)
# put all thirdparty libraries in paddlelite.libs
PACKAGE_DATA['paddlelite.libs'] = []
LIB_PATH = INFERENCE_LITE_LIB_PATH + '/python/install/libs/'
......@@ -55,7 +59,7 @@ if '${WITH_MKL}' == 'ON':
PACKAGE_DATA['paddlelite.libs'] += ['msvcr120.dll']
# link lite.so to paddlelite.libs
if os.name != 'nt':
COMMAND = "patchelf --set-rpath '$ORIGIN/../libs/' " + LITE_PATH + "/lite.so"
COMMAND = "patchelf --set-rpath '$ORIGIN/libs/' " + LITE_PATH + "/lite.so"
if os.system(COMMAND) != 0:
raise Exception("patch third_party libs failed, command: %s" % COMMAND)
......@@ -85,6 +89,7 @@ setup(
name='paddlelite',
version=PADDLELITE_VERSION,
description='Paddle-Lite Library',
scripts=['lite/paddle_lite_opt'],
packages=['paddlelite', 'paddlelite.libs'],
package_dir=PACKAGE_DIR,
package_data=PACKAGE_DATA,
......
......@@ -35,6 +35,8 @@ else:
# core lib of paddlelite is stored as lite.so
LITE_PATH = '${PADDLE_BINARY_DIR}/inference_lite_lib/python/install/lite'
PACKAGE_DATA = {'paddlelite': ['lite.so']}
# copy scripts of paddlelite
shutil.copy('${PADDLE_SOURCE_DIR}/lite/api/python/bin/paddle_lite_opt', LITE_PATH)
# put all thirdparty libraries in paddlelite.libs
PACKAGE_DATA['paddlelite.libs'] = []
LIB_PATH = '${PADDLE_BINARY_DIR}/inference_lite_lib/python/install/libs'
......@@ -45,7 +47,7 @@ if '${WITH_MKL}' == 'ON':
PACKAGE_DATA['paddlelite.libs'] += ['libmklml.dylib', 'libiomp5.dylib']
# link lite.so to paddlelite.libs
COMMAND = "install_name_tool -id \"@loader_path/../libs/\" ${PADDLE_BINARY_DIR}\
COMMAND = "install_name_tool -id \"@loader_path/libs/\" ${PADDLE_BINARY_DIR}\
/inference_lite_lib/python/install/lite/lite.so"
if os.system(COMMAND) != 0:
raise Exception("patch third_party libs failed, command: %s" % COMMAND)
......@@ -66,6 +68,7 @@ setup(
name='paddlelite',
version=PADDLELITE_VERSION,
description='Paddle-Lite Library',
scripts=['lite/paddle_lite_opt'],
packages=['paddlelite', 'paddlelite.libs'],
package_dir=PACKAGE_DIR,
package_data=PACKAGE_DATA,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册