未验证 提交 bbcdb92e 编写于 作者: H huzhiqiang 提交者: GitHub

[ Opt Fix ] Add comments about opt version and version check (#2889)

上级 14abbc3c
......@@ -26,6 +26,7 @@
#include "lite/api/paddle_use_ops.h"
#include "lite/api/paddle_use_passes.h"
#include "lite/core/op_registry.h"
#include "lite/core/version.h"
#include "lite/model_parser/compatible_pb.h"
#include "lite/model_parser/pb/program_desc.h"
#include "lite/utils/cp_logging.h"
......@@ -239,6 +240,7 @@ void PrintOpsInfo(std::set<std::string> valid_ops = {}) {
/// Print help information
void PrintHelpInfo() {
// at least one argument should be inputed
const std::string opt_version = lite::version();
const char help_info[] =
"At least one argument should be inputed. Valid arguments are listed "
"below:\n"
......@@ -260,7 +262,8 @@ void PrintHelpInfo() {
" `--print_model_ops=true --model_dir=<model_param_dir> "
"--valid_targets=(arm|opencl|x86|npu|xpu)`"
" Display operators in the input model\n";
std::cout << help_info << std::endl;
std::cout << "opt version:" << opt_version << std::endl
<< help_info << std::endl;
exit(1);
}
......
......@@ -28,6 +28,9 @@ void RunModel(std::string model_dir) {
// 1. Set MobileConfig
MobileConfig config;
config.set_model_dir(model_dir);
// To load model transformed by opt after release/v2.3.0, plese use
// `set_model_from_file` listed below.
// config.set_model_from_file(model_dir);
// 2. Create PaddlePredictor by MobileConfig
std::shared_ptr<PaddlePredictor> predictor =
......
......@@ -696,6 +696,13 @@ void LoadModelNaive(const std::string &model_dir,
CHECK(scope);
cpp_prog->ClearBlocks();
LOG(WARNING)
<< "WARNING: MobileConfig::set_model_dir and "
"MobileConfig::set_model_buffer are deprecated APIs "
"and will be removed in latter release. \n"
" MobileConfig::set_model_from_file(const std::string& model_file)"
" and MobileConfig::set_model_from_buffer(const std::string& "
"model_buffer) are recommended.";
// Load model
const std::string prog_path = model_dir + "/__model__.nb";
naive_buffer::BinaryTable table;
......@@ -786,11 +793,23 @@ void LoadModelNaiveFromFile(const std::string &filename,
// (2)get opt version
char opt_version[16];
const uint64_t paddle_version_length = 16 * sizeof(char);
const uint64_t opt_version_length = 16 * sizeof(char);
ReadModelDataFromFile<char>(
opt_version, prog_path, &offset, paddle_version_length);
opt_version, prog_path, &offset, opt_version_length);
VLOG(4) << "Opt_version:" << opt_version;
// check version, opt's version should be consistent with current Paddle-Lite
// version.
const std::string paddle_version = version();
const std::string opt_version_str = opt_version;
if (paddle_version != opt_version_str) {
LOG(WARNING) << "warning: the version of opt that transformed this model "
"is not consistent with current Paddle-Lite version."
"\n version of opt:"
<< opt_version
<< "\n version of current Paddle-Lite:" << paddle_version;
}
// (3)get topo_size
uint64_t topo_size;
ReadModelDataFromFile<uint64_t>(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册