AddtoLayer.h 1.6 KB
Newer Older
Z
zhangjinchao01 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
/* Copyright (c) 2016 Baidu, Inc. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

#include "Layer.h"
#include "paddle/math/Matrix.h"
#include "paddle/utils/ThreadLocal.h"

namespace paddle {

23 24 25
/**
 * This layer just simply add all input layers together, then activate
 * the sum inputs. Each input of this layer should be the same size,
Z
zhangjinchao01 已提交
26 27 28 29
 * which is also the output size of this layer.
 * \f[
 *   y=f(\sum_{i}x_i + b)
 * \f]
30 31 32
 * where \f$y\f$ is output, \f$x\f$ is input, \f$b\f$ is bias, and \f$f\f$ is
 * activation function.
 *
Z
zhangjinchao01 已提交
33 34 35 36 37 38 39 40 41 42 43
 * The config file api is addto_layer.
 */
class AddtoLayer : public Layer {
protected:
  std::unique_ptr<Weight> biases_;

public:
  explicit AddtoLayer(const LayerConfig& config) : Layer(config) {}

  ~AddtoLayer() {}

44 45
  /**
   * Intialization of AddtoLayer.
Z
zhangjinchao01 已提交
46 47 48
   */
  bool init(const LayerMap& layerMap, const ParameterMap& parameterMap);

49
  /**
Z
zhangjinchao01 已提交
50
   * Forward propagation.
51
   * @note There is no weight matrix for each input,
Z
zhangjinchao01 已提交
52 53 54 55
   *       because it just a simple add operation.
   */
  void forward(PassType passType);

56 57
  /**
   * Backward propagation.
Z
zhangjinchao01 已提交
58 59 60 61 62
   */
  void backward(const UpdateCallback& callback = nullptr);
};

}  // namespace paddle