conv_conv_fuse_pass.cc 2.6 KB
Newer Older
H
HappyAngel 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "lite/core/mir/fusion/conv_conv_fuse_pass.h"
16
#include <list>
H
HappyAngel 已提交
17 18 19 20 21 22 23 24 25 26 27 28 29
#include <memory>
#include <vector>
#include "lite/core/mir/fusion/conv_conv_fuser.h"
#include "lite/core/mir/graph_visualize_pass.h"
#include "lite/core/mir/pass_registry.h"

namespace paddle {
namespace lite {
namespace mir {

void ConvConvFusePass::Apply(const std::unique_ptr<SSAGraph>& graph) {
  // initialze fuser params
  std::vector<bool> conv_has_bias_cases{true, false};
30
  std::vector<std::string> conv_type_cases{"conv2d"};
H
HappyAngel 已提交
31
  bool has_int8 = false;
32
  bool has_weight_quant = false;
H
HappyAngel 已提交
33
  for (auto& place : graph->valid_places()) {
34
    if (place.target == TARGET(kARM) || place.target == TARGET(kHost)) {
H
HappyAngel 已提交
35 36 37 38
      if (place.precision == PRECISION(kInt8)) {
        has_int8 = true;
      }
    } else {
39
      VLOG(5) << "place.target: " << static_cast<int>(place.target);
H
HappyAngel 已提交
40
      return;
H
HappyAngel 已提交
41 42
    }
  }
43 44 45 46 47 48 49 50 51 52
  const std::list<mir::Node>& nodes = graph->nodes();
  for (auto& node : nodes) {
    if (node.IsStmt()) {
      auto* op_info = (node.stmt())->op_info();
      if (op_info->HasAttr("quantization_type")) {
        has_weight_quant = true;
        break;
      }
    }
  }
H
HappyAngel 已提交
53
  // only support arm-fp32
54
  if (has_int8 || has_weight_quant) {
H
HappyAngel 已提交
55 56 57 58 59 60
    return;
  }
  // only support fp32 fusion
  for (auto conv_has_bias0 : conv_has_bias_cases) {
    for (auto conv_has_bias1 : conv_has_bias_cases) {
      for (auto conv_type0 : conv_type_cases) {
H
HappyAngel 已提交
61
        for (auto conv_type1 : {"conv2d"}) {  // it mustbe 1x1s1p0_conv
62
          VLOG(5) << "conv_has_bias0:" << conv_has_bias0
H
HappyAngel 已提交
63
                  << " conv_type0:" << conv_type0;
64
          VLOG(5) << "conv_has_bias1:" << conv_has_bias1
H
HappyAngel 已提交
65 66
                  << " conv_type1:" << conv_type1;
          fusion::ConvConvFuser fuser(
67
              conv_type0, conv_type1, conv_has_bias0, conv_has_bias1, graph);
H
HappyAngel 已提交
68 69 70 71 72 73 74 75 76 77 78 79 80
          fuser(graph.get());
        }
      }
    }
  }
}

}  // namespace mir
}  // namespace lite
}  // namespace paddle

REGISTER_MIR_PASS(lite_conv_conv_fuse_pass, paddle::lite::mir::ConvConvFusePass)
    .BindTargets({TARGET(kARM)});