optimizer.h 4.3 KB
Newer Older
S
superjomn 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once
S
superjomn 已提交
16
#include <memory>
S
superjomn 已提交
17 18
#include <string>
#include <vector>
S
Superjomn 已提交
19
#include "paddle/fluid/lite/core/mir/generate_program_pass.h"
S
superjomn 已提交
20 21
#include "paddle/fluid/lite/core/mir/pass_manager.h"
#include "paddle/fluid/lite/core/mir/ssa_graph.h"
S
superjomn 已提交
22
#include "paddle/fluid/lite/core/mir/static_kernel_pick_pass.h"
23
#include "paddle/fluid/lite/core/mir/type_target_transform_pass.h"
S
Superjomn 已提交
24
#include "paddle/fluid/lite/core/program.h"
25
#include "paddle/fluid/lite/core/types.h"
S
Superjomn 已提交
26
#include "paddle/fluid/lite/model_parser/model_parser.h"
S
superjomn 已提交
27 28 29 30 31 32 33 34 35 36

namespace paddle {
namespace lite {

/*
 * lite::Optimizer optimize a program. It utilize the mir passes to analysis the
 * program and export an optimized program.
 */
class Optimizer {
 public:
S
superjomn 已提交
37
  void Run(Program&& program, const std::vector<Place>& valid_places,
38
           core::KernelPickFactor kernel_pick_factor,
S
superjomn 已提交
39
           const std::vector<std::string>& passes = {}) {
S
Superjomn 已提交
40
    program_ = &program;
S
superjomn 已提交
41 42
    valid_places_ = valid_places;
    CHECK(!valid_places.empty()) << "At least one valid_place should be set";
S
superjomn 已提交
43 44
    CHECK(!graph_) << "duplicate optimize found";
    graph_.reset(new mir::SSAGraph);
S
superjomn 已提交
45
    graph_->Build(program, valid_places);
46
    SpecifyKernelPickTactic(kernel_pick_factor);
S
superjomn 已提交
47
    InitTargetTypeTransformPass();
S
Superjomn 已提交
48

49
#ifndef LITE_WITH_LIGHT_WEIGHT_FRAMEWORK
S
Superjomn 已提交
50 51
    if (passes.empty()) {
      RunPasses(std::vector<std::string>{{
Z
Zhen Wang 已提交
52
          "lite_fc_fuse_pass",              //
S
Superjomn 已提交
53 54 55
          "static_kernel_pick_pass",        //
          "variable_place_inference_pass",  //
          "argument_type_display_pass",     //
56
          "type_target_transform_pass",     //
S
Superjomn 已提交
57 58 59 60 61 62 63 64 65 66
          "argument_type_display_pass",     //
          "variable_place_inference_pass",  //
          "argument_type_display_pass",     //
          "io_copy_kernel_pick_pass",       //
          "variable_place_inference_pass",  //
          "runtime_context_assign_pass",    //
      }});
    } else {
      RunPasses(passes);
    }
67
#endif
Y
Yan Chunwei 已提交
68
    exec_scope_ = program.exec_scope();
S
superjomn 已提交
69 70
  }

S
superjomn 已提交
71 72 73 74 75 76 77
  void KernelPickPreferPlace(const Place& place) {
    auto* pass = mir::PassManager::Global().LookUp<mir::StaticKernelPickPass>(
        "static_kernel_pick_pass");
    CHECK(pass);
    pass->SetPreferPlace(place);
  }

S
superjomn 已提交
78
  // Generate a new program based on the mir graph.
S
Superjomn 已提交
79
  std::unique_ptr<RuntimeProgram> GenRuntimeProgram() {
S
superjomn 已提交
80
    LOG(INFO) << "generate program";
S
superjomn 已提交
81
    std::unique_ptr<Program> res;
S
Superjomn 已提交
82 83
    auto pass = mir::PassManager::Global().LookUp<mir::GenerateProgramPass>(
        "generate_program_pass");
S
superjomn 已提交
84
    pass->Apply(graph_);
85 86 87 88
    auto program = pass->GenProgram();
    CHECK(exec_scope_);
    program->set_exec_scope(exec_scope_);
    return program;
S
superjomn 已提交
89
  }
S
superjomn 已提交
90

S
superjomn 已提交
91
  void InitTargetTypeTransformPass() {
92 93 94
    auto* pass =
        mir::PassManager::Global().LookUp<mir::TypeTargetTransformPass>(
            "type_target_transform_pass");
S
superjomn 已提交
95 96 97 98 99 100
    CHECK(pass);
    CHECK(!valid_places_.empty());
    LOG(INFO) << "valid_places.size " << valid_places_.size();
    pass->SetValidPlaces(valid_places_);
  }

S
superjomn 已提交
101 102 103 104 105 106 107 108
  // Generate C++ code which combines the inference program, model and weights.
  void GenCode(const std::string& code_dir);

  const mir::SSAGraph& ssa_graph() const {
    CHECK(graph_);
    return *graph_;
  }

S
Superjomn 已提交
109 110 111 112 113
  mir::SSAGraph* mutable_ssa_graph() {
    CHECK(graph_);
    return graph_.get();
  }

S
superjomn 已提交
114
 protected:
115 116
  void SpecifyKernelPickTactic(core::KernelPickFactor factor);

S
superjomn 已提交
117
  // Specify the passes and run them.
S
Superjomn 已提交
118 119 120 121 122 123 124 125
  void RunPasses(const std::vector<std::string>& passes) {
    for (auto& x : passes) {
      LOG(INFO) << "== Running pass " << x;
      auto* pass = mir::PassManager::Global().LookUp(x);
      CHECK(pass);
      pass->Apply(graph_);
    }
  }
S
superjomn 已提交
126 127 128

 private:
  std::unique_ptr<mir::SSAGraph> graph_;
S
superjomn 已提交
129
  std::vector<Place> valid_places_;
130
  lite::Scope* exec_scope_{};
S
Superjomn 已提交
131
  Program* program_{};
S
superjomn 已提交
132 133 134 135
};

}  // namespace lite
}  // namespace paddle