PadLayer.cpp 3.7 KB
Newer Older
D
dangqingqing 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "PadLayer.h"
#include "paddle/utils/Stat.h"

namespace paddle {

REGISTER_LAYER(pad, PadLayer);

bool PadLayer::init(const LayerMap& layerMap,
                    const ParameterMap& parameterMap) {
  /* Initialize the basic parent class */
  Layer::init(layerMap, parameterMap);

  auto& pad_conf = config_.inputs(0).pad_conf();
  auto& img_conf = pad_conf.image_conf();
  CHECK_EQ(config_.inputs_size(), 1);
D
dangqingqing 已提交
30 31 32 33 34
  inDims_ = TensorShape(
      {0,
       img_conf.channels(),
       img_conf.has_img_size_y() ? img_conf.img_size_y() : img_conf.img_size(),
       img_conf.img_size()});
D
dangqingqing 已提交
35

D
dangqingqing 已提交
36 37 38
  CHECK_EQ(2, pad_conf.pad_c_size());
  CHECK_EQ(2, pad_conf.pad_h_size());
  CHECK_EQ(2, pad_conf.pad_w_size());
D
dangqingqing 已提交
39 40 41 42 43 44 45
  padc_.push_back(pad_conf.pad_c(0));
  padc_.push_back(pad_conf.pad_c(1));
  padh_.push_back(pad_conf.pad_h(0));
  padh_.push_back(pad_conf.pad_h(1));
  padw_.push_back(pad_conf.pad_w(0));
  padw_.push_back(pad_conf.pad_w(1));

D
dangqingqing 已提交
46
  outDims_ = TensorShape(4);
D
dangqingqing 已提交
47 48 49 50 51
  setOutDims(0);

  createFunction(forward_,
                 "Pad",
                 FuncConfig()
D
dangqingqing 已提交
52 53 54 55 56 57
                     .set("cstart", padc_[0])
                     .set("cend", padc_[1])
                     .set("hstart", padh_[0])
                     .set("hend", padh_[1])
                     .set("wstart", padw_[0])
                     .set("wend", padw_[1]));
D
dangqingqing 已提交
58 59 60
  createFunction(backward_,
                 "PadGrad",
                 FuncConfig()
D
dangqingqing 已提交
61 62 63 64 65 66
                     .set("cstart", padc_[0])
                     .set("cend", padc_[1])
                     .set("hstart", padh_[0])
                     .set("hend", padh_[1])
                     .set("wstart", padw_[0])
                     .set("wend", padw_[1]));
D
dangqingqing 已提交
67 68 69 70

  return true;
}

D
dangqingqing 已提交
71 72 73 74 75
void PadLayer::setOutDims(const size_t batchSize) {
  outDims_.reshape({batchSize,
                    inDims_[1] + padc_[0] + padc_[1],
                    inDims_[2] + padh_[0] + padh_[1],
                    inDims_[3] + padw_[0] + padw_[1]});
D
dangqingqing 已提交
76 77
}

D
dangqingqing 已提交
78
void PadLayer::setTensorDim(const size_t batchSize) {
D
dangqingqing 已提交
79
  CHECK_EQ(static_cast<int>(inputLayers_.size()), 1);
D
dangqingqing 已提交
80
  inDims_.setDim(0, batchSize);
D
dangqingqing 已提交
81
  int h = inputLayers_[0]->getOutput().getFrameHeight();
D
dangqingqing 已提交
82
  if (h != 0) inDims_.setDim(2, h);
D
dangqingqing 已提交
83
  int w = inputLayers_[0]->getOutput().getFrameWidth();
D
dangqingqing 已提交
84
  if (w != 0) inDims_.setDim(3, w);
D
dangqingqing 已提交
85 86 87 88 89 90 91 92 93 94 95 96
  setOutDims(batchSize);
}

void PadLayer::forward(PassType passType) {
  Layer::forward(passType);
  MatrixPtr input = inputLayers_[0]->getOutputValue();
  size_t batchSize = input->getHeight();
  setTensorDim(batchSize);
  int size = outDims_[1] * outDims_[2] * outDims_[3];
  resetOutput(batchSize, size);
  MatrixPtr outV = getOutputValue();
  REGISTER_TIMER_INFO("PadForward", getName().c_str());
D
dangqingqing 已提交
97 98 99 100 101 102

  BufferArgs inputs;
  BufferArgs outputs;
  inputs.addArg(*getInputValue(0), inDims_);
  outputs.addArg(*getOutputValue(), outDims_, ASSIGN_TO);
  forward_[0]->calc(inputs, outputs);
D
dangqingqing 已提交
103 104 105 106 107
}

void PadLayer::backward(const UpdateCallback& callback) {
  (void)callback;
  REGISTER_TIMER_INFO("PadBackward", getName().c_str());
D
dangqingqing 已提交
108 109 110 111 112 113

  BufferArgs inputs;
  BufferArgs outputs;
  inputs.addArg(*getOutputGrad(), outDims_);
  outputs.addArg(*getInputGrad(0), inDims_, ADD_TO);
  backward_[0]->calc(inputs, outputs);
D
dangqingqing 已提交
114 115
}
}  // namespace paddle