conv_common.cpp 3.2 KB
Newer Older
Z
zhaojiaying01 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
朔-望's avatar
朔-望 已提交
14

H
backup  
hjchen2 已提交
15 16
#include "operators/kernel/arm/convolution/conv_common.h"
#include "operators/math/winograd/winograd_transform.h"
朔-望's avatar
朔-望 已提交
17 18

namespace paddle_mobile {
朔-望's avatar
朔-望 已提交
19 20
namespace operators {

H
backup  
hjchen2 已提交
21
void InitBaseConvKernel(ConvParam<CPU> *param) {
22 23
  bool conv3x3 = param->Filter()->dims()[2] == param->Filter()->dims()[3] &&
                 param->Filter()->dims()[2] == 3;
24 25
  bool conv5x5 = param->Filter()->dims()[2] == param->Filter()->dims()[3] &&
                 param->Filter()->dims()[2] == 5;
26 27
  bool depth3x3 = conv3x3 && param->Groups() == param->Input()->dims()[1] &&
                  param->Input()->dims()[1] == param->Output()->dims()[1];
H
backup  
hjchen2 已提交
28

29 30
  bool depth5x5 = conv5x5 && param->Groups() == param->Input()->dims()[1] &&
                  param->Input()->dims()[1] == param->Output()->dims()[1];
H
hjchen2 已提交
31
  if (param->Filter()->type() == typeid(int8_t)) {
32
#ifndef __aarch64__
33
    if (depth3x3 && param->Strides()[0] < 3 &&
34
        param->Strides()[0] == param->Strides()[1]) {
H
hjchen2 已提交
35
      param->ExecMode() = ConvParam<CPU>::EXEC_DEPTHWISE3x3_INT8;
36 37 38
    } else if (depth5x5 && param->Strides()[0] < 2 &&
               param->Strides()[0] == param->Strides()[1]) {
      param->ExecMode() = ConvParam<CPU>::EXEC_DEPTHWISE5x5_INT8;
H
hjchen2 已提交
39
    } else {
40
#endif  // __aarch64__
H
hjchen2 已提交
41
      param->ExecMode() = ConvParam<CPU>::EXEC_GEMM_INT8;
42
#ifndef __aarch64__
H
hjchen2 已提交
43
    }
44
#endif  // __aarch64__
H
hjchen2 已提交
45
  } else {
46
    if (depth3x3 && param->Strides()[0] == param->Strides()[1] &&
47 48
        param->Strides()[0] == 1) {
      param->ExecMode() = ConvParam<CPU>::EXEC_DEPTHWISE3x3S1_FLOAT;
49
    } else if (depth3x3 && param->Strides()[0] == param->Strides()[1] &&
50 51
               param->Strides()[0] == 2) {
      param->ExecMode() = ConvParam<CPU>::EXEC_DEPTHWISE3x3S2_FLOAT;
52 53
    } else if (depth5x5 && param->Strides()[0] == param->Strides()[1] &&
               param->Strides()[0] == 1) {
54
      param->ExecMode() = ConvParam<CPU>::EXEC_DEPTHWISE5x5_FLOAT;
55 56
    } else if (conv3x3 && !depth3x3 &&
               param->Strides()[0] == param->Strides()[1] &&
H
hjchen2 已提交
57
               param->Dilations()[0] == param->Dilations()[1] &&
58 59 60
               param->Strides()[0] == 1 && param->Dilations()[0] == 1
#if 0
               && param->Output()->dims()[1] >= 16 &&
61
               param->Input()->dims()[1] >= 16 &&
62 63 64
               param->Input()->dims()[2] <= 140 */ /* refered from ncnn */
#endif
    ) {
H
hjchen2 已提交
65 66
      param->ExecMode() = ConvParam<CPU>::EXEC_WINOGRAD3X3_FLOAT;
      // transform weight
67
      param->transformed_filter_ = new framework::LoDTensor;
H
hjchen2 已提交
68 69
      operators::math::winograd_transform_weight<8, 3>(
          *param->Filter(), param->transformed_filter_);
H
hjchen2 已提交
70 71 72 73
    } else {
      param->ExecMode() = ConvParam<CPU>::EXEC_GEMM_FLOAT;
    }
  }
朔-望's avatar
朔-望 已提交
74 75
}

朔-望's avatar
朔-望 已提交
76 77
}  // namespace operators
}  // namespace paddle_mobile