AddtoLayer.h 1.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Z
zhangjinchao01 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

#include "Layer.h"
X
Xin Pan 已提交
18
#include "paddle/legacy/math/Matrix.h"
X
Xin Pan 已提交
19
#include "paddle/legacy/utils/ThreadLocal.h"
Z
zhangjinchao01 已提交
20 21 22

namespace paddle {

23 24 25
/**
 * This layer just simply add all input layers together, then activate
 * the sum inputs. Each input of this layer should be the same size,
Z
zhangjinchao01 已提交
26 27 28 29
 * which is also the output size of this layer.
 * \f[
 *   y=f(\sum_{i}x_i + b)
 * \f]
30 31 32
 * where \f$y\f$ is output, \f$x\f$ is input, \f$b\f$ is bias, and \f$f\f$ is
 * activation function.
 *
Z
zhangjinchao01 已提交
33 34 35
 * The config file api is addto_layer.
 */
class AddtoLayer : public Layer {
W
Wu Yi 已提交
36
 protected:
Z
zhangjinchao01 已提交
37 38
  std::unique_ptr<Weight> biases_;

W
Wu Yi 已提交
39
 public:
Z
zhangjinchao01 已提交
40 41 42 43
  explicit AddtoLayer(const LayerConfig& config) : Layer(config) {}

  ~AddtoLayer() {}

44 45
  /**
   * Intialization of AddtoLayer.
Z
zhangjinchao01 已提交
46
   */
Y
Yu Yang 已提交
47 48
  bool init(const LayerMap& layerMap,
            const ParameterMap& parameterMap) override;
Z
zhangjinchao01 已提交
49

50
  /**
Z
zhangjinchao01 已提交
51
   * Forward propagation.
52
   * @note There is no weight matrix for each input,
Z
zhangjinchao01 已提交
53 54
   *       because it just a simple add operation.
   */
Y
Yu Yang 已提交
55
  void forward(PassType passType) override;
Z
zhangjinchao01 已提交
56

57 58
  /**
   * Backward propagation.
Z
zhangjinchao01 已提交
59
   */
Y
Yu Yang 已提交
60
  void backward(const UpdateCallback& callback = nullptr) override;
Z
zhangjinchao01 已提交
61 62 63
};

}  // namespace paddle