optimizer.h 4.5 KB
Newer Older
S
superjomn 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once
S
superjomn 已提交
16
#include <memory>
S
superjomn 已提交
17 18
#include <string>
#include <vector>
S
Superjomn 已提交
19
#include "paddle/fluid/lite/core/mir/generate_program_pass.h"
S
superjomn 已提交
20 21
#include "paddle/fluid/lite/core/mir/pass_manager.h"
#include "paddle/fluid/lite/core/mir/ssa_graph.h"
S
superjomn 已提交
22
#include "paddle/fluid/lite/core/mir/static_kernel_pick_pass.h"
23
#include "paddle/fluid/lite/core/mir/type_target_transform_pass.h"
S
Superjomn 已提交
24
#include "paddle/fluid/lite/core/program.h"
25
#include "paddle/fluid/lite/core/types.h"
S
Superjomn 已提交
26
#include "paddle/fluid/lite/model_parser/model_parser.h"
S
superjomn 已提交
27 28 29 30 31 32 33 34 35 36

namespace paddle {
namespace lite {

/*
 * lite::Optimizer optimize a program. It utilize the mir passes to analysis the
 * program and export an optimized program.
 */
class Optimizer {
 public:
S
superjomn 已提交
37
  void Run(Program&& program, const std::vector<Place>& valid_places,
38
           core::KernelPickFactor kernel_pick_factor,
S
superjomn 已提交
39
           const std::vector<std::string>& passes = {}) {
S
Superjomn 已提交
40
    program_ = &program;
S
superjomn 已提交
41 42
    valid_places_ = valid_places;
    CHECK(!valid_places.empty()) << "At least one valid_place should be set";
S
superjomn 已提交
43 44
    CHECK(!graph_) << "duplicate optimize found";
    graph_.reset(new mir::SSAGraph);
S
superjomn 已提交
45
    graph_->Build(program, valid_places);
46
    SpecifyKernelPickTactic(kernel_pick_factor);
S
superjomn 已提交
47
    InitTargetTypeTransformPass();
S
Superjomn 已提交
48 49 50

    if (passes.empty()) {
      RunPasses(std::vector<std::string>{{
51 52 53
          "lite_conv_bn_fuse_pass",                   //
          "lite_conv_elementwise_add_act_fuse_pass",  //
          "lite_fc_fuse_pass",                        //
N
nhzlx 已提交
54
#ifndef LITE_WITH_LIGHT_WEIGHT_FRAMEWORK
55
          "static_kernel_pick_pass",        //
N
nhzlx 已提交
56 57 58 59 60 61 62 63 64 65
          "variable_place_inference_pass",  //
          "argument_type_display_pass",     //
          "type_target_transform_pass",     //
          "argument_type_display_pass",     //
          "variable_place_inference_pass",  //
          "argument_type_display_pass",     //
          "io_copy_kernel_pick_pass",       //
          "variable_place_inference_pass",  //
#endif
          "runtime_context_assign_pass",  //
S
Superjomn 已提交
66 67 68 69
      }});
    } else {
      RunPasses(passes);
    }
Y
Yan Chunwei 已提交
70
    exec_scope_ = program.exec_scope();
S
superjomn 已提交
71 72
  }

S
superjomn 已提交
73 74 75 76 77 78 79
  void KernelPickPreferPlace(const Place& place) {
    auto* pass = mir::PassManager::Global().LookUp<mir::StaticKernelPickPass>(
        "static_kernel_pick_pass");
    CHECK(pass);
    pass->SetPreferPlace(place);
  }

S
superjomn 已提交
80
  // Generate a new program based on the mir graph.
S
Superjomn 已提交
81
  std::unique_ptr<RuntimeProgram> GenRuntimeProgram() {
S
superjomn 已提交
82
    LOG(INFO) << "generate program";
S
superjomn 已提交
83
    std::unique_ptr<Program> res;
S
Superjomn 已提交
84 85
    auto pass = mir::PassManager::Global().LookUp<mir::GenerateProgramPass>(
        "generate_program_pass");
S
superjomn 已提交
86
    pass->Apply(graph_);
87 88 89 90
    auto program = pass->GenProgram();
    CHECK(exec_scope_);
    program->set_exec_scope(exec_scope_);
    return program;
S
superjomn 已提交
91
  }
S
superjomn 已提交
92

S
superjomn 已提交
93
  void InitTargetTypeTransformPass() {
94 95 96
    auto* pass =
        mir::PassManager::Global().LookUp<mir::TypeTargetTransformPass>(
            "type_target_transform_pass");
S
superjomn 已提交
97 98 99 100 101 102
    CHECK(pass);
    CHECK(!valid_places_.empty());
    LOG(INFO) << "valid_places.size " << valid_places_.size();
    pass->SetValidPlaces(valid_places_);
  }

S
superjomn 已提交
103 104 105 106 107 108 109 110
  // Generate C++ code which combines the inference program, model and weights.
  void GenCode(const std::string& code_dir);

  const mir::SSAGraph& ssa_graph() const {
    CHECK(graph_);
    return *graph_;
  }

S
Superjomn 已提交
111 112 113 114 115
  mir::SSAGraph* mutable_ssa_graph() {
    CHECK(graph_);
    return graph_.get();
  }

S
superjomn 已提交
116
 protected:
117 118
  void SpecifyKernelPickTactic(core::KernelPickFactor factor);

S
superjomn 已提交
119
  // Specify the passes and run them.
S
Superjomn 已提交
120 121 122 123 124 125 126 127
  void RunPasses(const std::vector<std::string>& passes) {
    for (auto& x : passes) {
      LOG(INFO) << "== Running pass " << x;
      auto* pass = mir::PassManager::Global().LookUp(x);
      CHECK(pass);
      pass->Apply(graph_);
    }
  }
S
superjomn 已提交
128 129 130

 private:
  std::unique_ptr<mir::SSAGraph> graph_;
S
superjomn 已提交
131
  std::vector<Place> valid_places_;
132
  lite::Scope* exec_scope_{};
S
Superjomn 已提交
133
  Program* program_{};
S
superjomn 已提交
134 135 136 137
};

}  // namespace lite
}  // namespace paddle