TransposedFullMatrixProjection.cpp 2.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Z
zhangjinchao01 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "Projection.h"
Y
Yu Yang 已提交
16
#include "paddle/utils/Stat.h"
Z
zhangjinchao01 已提交
17 18 19 20 21 22 23 24 25 26

namespace paddle {

/**
 * @brief TransposedFullMatrixProjection performs full matrix multiplication:
 * out.row[i] += in.row[i] * weight.transpose
 *
 * The config file api is trans_full_matrix_projection.
 */
class TransposedFullMatrixProjection : public Projection {
W
Wu Yi 已提交
27
 public:
Z
zhangjinchao01 已提交
28
  TransposedFullMatrixProjection(const ProjectionConfig& config,
29 30
                                 ParameterPtr parameter,
                                 bool useGPu);
Z
zhangjinchao01 已提交
31 32 33
  virtual void forward();
  virtual void backward(const UpdateCallback& callback);

W
Wu Yi 已提交
34
 protected:
Z
zhangjinchao01 已提交
35 36 37 38 39 40 41 42 43 44 45 46 47 48
  std::unique_ptr<Weight> weight_;
};

REGISTER_PROJECTION(trans_fc, TransposedFullMatrixProjection);

TransposedFullMatrixProjection::TransposedFullMatrixProjection(
    const ProjectionConfig& config, ParameterPtr parameter, bool useGpu)
    : Projection(config, parameter, useGpu) {
  weight_.reset(
      new Weight(config.output_size(), config.input_size(), parameter));
}

void TransposedFullMatrixProjection::forward() {
  REGISTER_TIMER_INFO("FwMulTimer", getName().c_str());
49
  out_->value->mul(*(in_->value), *(weight_->getW()->getTranspose()), 1, 1);
Z
zhangjinchao01 已提交
50 51 52 53 54 55 56 57
}

void TransposedFullMatrixProjection::backward(const UpdateCallback& callback) {
  bool syncFlag = hl_get_sync_flag();

  /* Calculate the W-gradient for the current layer */
  if (weight_->getWGrad()) {
    REGISTER_TIMER_INFO("GradMulTimer", getName().c_str());
58 59
    weight_->getWGrad()->mul(
        *(out_->grad->getTranspose()), *(in_->value), 1, 1);
Z
zhangjinchao01 已提交
60 61 62 63 64 65 66 67 68 69 70 71 72
  }

  // If callback does not change value, backprop error asynchronously so that
  // we can do the callback concurrently.
  // This is still a little bit dangerous since theoretically for
  // SyncMultiGpuMachine it is possible that the value copyback can still
  // happen at the same time as the error backprop where the value is being
  // used.
  hl_set_sync_flag(false);

  /* Calculate the input layers error */
  if (in_->grad) {
    REGISTER_TIMER_INFO("BpMulTimer", getName().c_str());
73
    in_->grad->mul(*(out_->grad), *(weight_->getW()), 1, 1);
Z
zhangjinchao01 已提交
74 75 76 77 78 79 80
  }

  hl_set_sync_flag(syncFlag);
  parameter_->incUpdate(callback);
}

}  // namespace paddle