SequenceLastInstanceLayer.cpp 3.5 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Z
zhangjinchao01 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/utils/Logging.h"

17
#include "SequencePoolLayer.h"
Z
zhangjinchao01 已提交
18 19 20 21 22 23 24 25 26 27 28 29 30
#include "paddle/math/Matrix.h"
#include "paddle/utils/Stat.h"

namespace paddle {

/**
 * A layer for extracting the last instance of the input sequence.
 * Input: a sequence
 * If SequenceLevel = kNonseq:
 *   Output: a sequence containing only the last instance of the input sequence
 * If SequenceLevel = kSeq:
 *   Check input sequence must has sub-sequence
 *   Output: a sequence containing only the last instance of each sub-sequence
31 32 33
 *           of the input sequence
 *
 * The config file api is last_seq and first_seq.
Z
zhangjinchao01 已提交
34 35
 */

36
class SequenceLastInstanceLayer : public SequencePoolLayer {
Z
zhangjinchao01 已提交
37 38 39 40 41 42
protected:
  MatrixPtr tmpSrc_;
  MatrixPtr tmpDest_;

public:
  explicit SequenceLastInstanceLayer(const LayerConfig& config)
43
      : SequencePoolLayer(config) {}
Z
zhangjinchao01 已提交
44

Y
Yu Yang 已提交
45 46
  bool init(const LayerMap& layerMap,
            const ParameterMap& parameterMap) override;
Z
zhangjinchao01 已提交
47

Y
Yu Yang 已提交
48 49
  void forward(PassType passType) override;
  void backward(const UpdateCallback& callback = nullptr) override;
Z
zhangjinchao01 已提交
50 51 52 53 54 55
};

REGISTER_LAYER(seqlastins, SequenceLastInstanceLayer);

bool SequenceLastInstanceLayer::init(const LayerMap& layerMap,
                                     const ParameterMap& parameterMap) {
56
  SequencePoolLayer::init(layerMap, parameterMap);
Z
zhangjinchao01 已提交
57 58 59 60 61 62 63 64 65 66

  tmpSrc_ =
      Matrix::create(nullptr, /* height= */ 1, 1, /* trans= */ false, useGpu_);
  tmpDest_ =
      Matrix::create(nullptr, /* height= */ 1, 1, /* trans= */ false, useGpu_);

  return true;
}

void SequenceLastInstanceLayer::forward(PassType passType) {
67
  SequencePoolLayer::forward(passType);
Z
zhangjinchao01 已提交
68

69
  const int* starts = startPositions_->getData(false);
Z
zhangjinchao01 已提交
70 71 72 73 74 75 76
  MatrixPtr inputValue = getInputValue(0);
  MatrixPtr outputValue = getOutputValue();

  {
    AsyncGpuBlock asyncGpuBlock;
    REGISTER_TIMER_INFO("SequenceLastInstanceLayerForward", getName().c_str());

77
    for (size_t seqId = 0; seqId < newBatchSize_; ++seqId) {
Z
zhangjinchao01 已提交
78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94
      int insId =
          config_.select_first() ? starts[seqId] : starts[seqId + 1] - 1;

      outputValue->subMatrix(seqId, 1, tmpDest_)
          ->assign(*(inputValue->subMatrix(insId, 1, tmpSrc_)));
    }
  }

  if (biases_.get() != NULL) {
    outputValue->addBias(*(biases_->getW()), 1);
  }

  /*  activation, should set to 'linear' in most cases */
  forwardActivation();
}

void SequenceLastInstanceLayer::backward(const UpdateCallback& callback) {
95
  SequencePoolLayer::backward(callback);
Z
zhangjinchao01 已提交
96 97 98

  MatrixPtr inputGrad = getInputGrad(0);
  MatrixPtr outputGrad = getOutputGrad();
99 100
  const int* starts = startPositions_->getData(false);
  size_t numSequences = startPositions_->getSize() - 1;
Z
zhangjinchao01 已提交
101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116

  if (inputGrad) {
    AsyncGpuBlock asyncGpuBlock;
    REGISTER_TIMER_INFO("SequenceLastInstanceLayerBackward", getName().c_str());

    for (size_t seqId = 0; seqId < numSequences; ++seqId) {
      int insId =
          config_.select_first() ? starts[seqId] : starts[seqId + 1] - 1;

      inputGrad->subMatrix(insId, 1, tmpDest_)
          ->add(*(outputGrad->subMatrix(seqId, 1, tmpSrc_)));
    }
  }
}

}  // namespace paddle