ConvBaseLayer.cpp 4.9 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Z
zhangjinchao01 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "ConvBaseLayer.h"
16
#include "paddle/math/MathUtils.h"
Y
Yu Yang 已提交
17
#include "paddle/utils/Logging.h"
Z
zhangjinchao01 已提交
18 19 20 21 22 23
namespace paddle {

bool ConvBaseLayer::init(const LayerMap& layerMap,
                         const ParameterMap& parameterMap) {
  /* Initialize the basic parent class */
  Layer::init(layerMap, parameterMap);
C
chengduoZH 已提交
24 25 26 27
  isDeconv_ = (config_.type() == "exconv" || config_.type() == "cudnn_conv" ||
               config_.type() == "conv3d" || config_.type() == "deconv3d")
                  ? false
                  : true;
28

Z
zhangjinchao01 已提交
29 30 31 32 33 34 35 36 37 38 39 40
  /* Initialize the convolutional layer parameter */
  numFilters_ = config_.num_filters();
  sharedBiases_ = config_.shared_biases();
  for (auto& inputConfig : config_.inputs()) {
    const ConvConfig& conf = inputConfig.conv_conf();
    padding_.push_back(conf.padding());
    stride_.push_back(conf.stride());
    filterSize_.push_back(conf.filter_size());
    paddingY_.push_back(conf.padding_y());
    strideY_.push_back(conf.stride_y());
    filterSizeY_.push_back(conf.filter_size_y());
    channels_.push_back(conf.channels());
L
Luo Tao 已提交
41 42
    imgSizeH_.push_back(conf.has_img_size_y() ? conf.img_size_y()
                                              : conf.img_size());
43
    imgSizeW_.push_back(conf.img_size());
Z
zhangjinchao01 已提交
44 45
    groups_.push_back(conf.groups());
    filterChannels_.push_back(conf.filter_channels());
L
Luo Tao 已提交
46
    outputH_.push_back(conf.has_output_y() ? conf.output_y() : conf.output_x());
47
    outputW_.push_back(conf.output_x());
48 49 50 51 52 53

    paddingZ_.push_back(conf.padding_z());
    strideZ_.push_back(conf.stride_z());
    filterSizeZ_.push_back(conf.filter_size_z());
    imgSizeD_.push_back(conf.img_size_z());
    outputD_.push_back(conf.output_z());
C
chengduoZH 已提交
54 55
    filterPixels_.push_back(filterSize_.back() * filterSizeY_.back() *
                            filterSizeZ_.back());
Z
zhangjinchao01 已提交
56 57
  }

58 59 60 61 62 63 64 65 66 67 68 69
  CHECK(inputLayers_.size() == parameters_.size());
  for (size_t i = 0; i < inputLayers_.size(); i++) {
    size_t height, width;
    height = filterPixels_[i] * filterChannels_[i];
    width = (!isDeconv_) ? numFilters_ : channels_[i];

    // create a new weight
    CHECK_EQ(parameters_[i]->getSize(), width * height);
    Weight* w = new Weight(height, width, parameters_[i]);
    weights_.emplace_back(w);
  }

Z
zhangjinchao01 已提交
70
  /* initialize the biases_ */
71
  if (biasParameter_.get()) {
Z
zhangjinchao01 已提交
72 73 74
    if (sharedBiases_) {
      CHECK_EQ((size_t)numFilters_, biasParameter_->getSize());
      biases_ =
C
chengduoZH 已提交
75
          std::unique_ptr<Weight>(new Weight(1, numFilters_, biasParameter_));
Z
zhangjinchao01 已提交
76 77
    } else {
      biases_ =
C
chengduoZH 已提交
78
          std::unique_ptr<Weight>(new Weight(1, getSize(), biasParameter_));
Z
zhangjinchao01 已提交
79 80 81 82 83 84 85 86 87
    }
  }

  // default caffe model
  caffeMode_ = true;

  return true;
}

88 89 90 91 92 93 94 95 96 97
size_t ConvBaseLayer::calOutputSize() {
  auto clearAndReserve = [this](IntV* vec) {
    vec->clear();
    vec->reserve(this->inputLayers_.size());
  };
  clearAndReserve(&imgSizeH_);
  clearAndReserve(&imgSizeW_);
  clearAndReserve(&outputH_);
  clearAndReserve(&outputW_);
  size_t layerSize = 0;
98

99
  auto setLayerSize = [&](IntV& inH, IntV& inW, IntV& outH, IntV& outW) {
100
    for (size_t i = 0; i < inputLayers_.size(); i++) {
101 102
      inH.push_back(inputLayers_[i]->getOutput().getFrameHeight());
      inW.push_back(inputLayers_[i]->getOutput().getFrameWidth());
L
Luo Tao 已提交
103
      const ConvConfig& conf = config_.inputs(i).conv_conf();
104
      if (isDeconv_) {
L
Luo Tao 已提交
105 106 107
        if (inH[i] == 0)
          inH[i] = conf.has_output_y() ? conf.output_y() : conf.output_x();
        if (inW[i] == 0) inW[i] = conf.output_x();
108 109 110 111 112
        outH.push_back(imageSize(
            inH[i], filterSizeY_[i], paddingY_[i], strideY_[i], caffeMode_));
        outW.push_back(imageSize(
            inW[i], filterSize_[i], padding_[i], stride_[i], caffeMode_));
      } else {
L
Luo Tao 已提交
113 114 115
        if (inH[i] == 0)
          inH[i] = conf.has_img_size_y() ? conf.img_size_y() : conf.img_size();
        if (inW[i] == 0) inW[i] = conf.img_size();
116 117 118 119 120 121 122
        outH.push_back(outputSize(
            inH[i], filterSizeY_[i], paddingY_[i], strideY_[i], caffeMode_));
        outW.push_back(outputSize(
            inW[i], filterSize_[i], padding_[i], stride_[i], caffeMode_));
      }
      CHECK_EQ(outH[i], outH[0]);
      CHECK_EQ(outW[i], outW[0]);
123
    }
124 125 126 127 128
    getOutput().setFrameHeight(outH[0]);
    getOutput().setFrameWidth(outW[0]);
    layerSize = outH[0] * outW[0] * size_t(numFilters_);
  };

129
  setLayerSize(imgSizeH_, imgSizeW_, outputH_, outputW_);
130

131
  return layerSize;
132 133
}

Z
zhangjinchao01 已提交
134
}  // namespace paddle