opt_base.h 3.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

/*
 * This file defines Opt and basic functions about model transformation.
 */

#ifndef PADDLE_LITE_OPT_H_  // NOLINT
#define PADDLE_LITE_OPT_H_
#include <algorithm>
#include <iomanip>
#include <set>
#include <string>
#include <vector>
// stores the map that records the source_file path of each kernel.
#include "kernel_src_map.h"  // NOLINT
#include "lite/api/cxx_api.h"
// version of Paddle-lite
#include "lite/core/version.h"
// model parser functions to pre-load model to verify if this model is supported
#include "lite/model_parser/compatible_pb.h"
#include "lite/model_parser/pb/program_desc.h"
#include "lite/utils/string.h"
// recorded all the ops supported by paddle-lite
#include "supported_kernel_op_info.h"  // NOLINT

namespace paddle {
namespace lite_api {

/// The PaddlePredictor defines the basic interfaces for different kinds of
/// predictors.
class LITE_API OptBase {
 public:
  OptBase() = default;
  void SetModelSetDir(const std::string &model_set_path);
47
  void SetModelDir(const std::string &model_dir_path);
48 49 50
  void SetModelFile(const std::string &model_path);
  void SetParamFile(const std::string &param_path);
  void SetValidPlaces(const std::string &valid_places);
51 52
  void SetLiteOut(const std::string &lite_out_name);
  void RecordModelInfo(bool record_strip_info = true);
53 54 55
  // set optimized_model type
  void SetModelType(std::string model_type);
  // transform and save the optimized model
56 57 58 59 60 61
  void Run();
  void RunOptimize(const std::string &model_dir_path = "",
                   const std::string &model_path = "",
                   const std::string &param_path = "",
                   const std::string &valid_places = "",
                   const std::string &optimized_out_path = "");
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
  // fuctions of printing info
  // 1. help info
  void PrintHelpInfo();
  // 2. PrintOpsInfo
  void PrintOpsInfo(const std::set<std::string> &valid_ops =
                        {});  // print supported ops on target_types
  void PrintAllOps();         // print all ops
  void PrintSupportedOps();   // print ops supported on valid_places_
  void DisplayKernelsInfo();  // Display kernel information
  // 3. Check if this model is supported
  void CheckIfModelSupported(bool print_ops_info = true);

 private:
  CxxConfig opt_config_;
  // valid places for the optimized_model
  std::vector<Place> valid_places_;
  // filename of the optimized_model
79
  std::string lite_out_name_;
80 81 82 83
  // type of the optimized_model, kNaiveBuffer default.
  LiteModelType model_type_{LiteModelType::kNaiveBuffer};
  // Dir path of a set of models, this should be combined with model
  std::string model_set_dir_;
84
  bool record_strip_info_{false};
85 86 87 88 89 90 91
  void RunOptimizeFromModelSet(bool record_strip_info = false);
};

}  // namespace lite_api
}  // namespace paddle

#endif  // NOLINT