MKLPackedWeight.h 2.4 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
T
tensor-tang 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

#include "paddle/math/MathFunctions.h"
#include "paddle/parameter/Parameter.h"
#include "paddle/parameter/Weight.h"

namespace paddle {

class MKLPackedWeight {
W
Wu Yi 已提交
24
 protected:
25
  /// The pointer of weight
T
tensor-tang 已提交
26
  real *weight_;
27
  /// The pointer of cblas packed gemm to weight
T
tensor-tang 已提交
28 29 30 31 32
  real *packedWeight_;
  size_t height_;
  size_t width_;
  bool transW_;

W
Wu Yi 已提交
33
 public:
T
tensor-tang 已提交
34
  explicit MKLPackedWeight(MatrixPtr weight, bool transW = false) {
T
tensor-tang 已提交
35 36 37 38 39 40 41 42 43 44 45
    packedWeight_ = nullptr;
    weight_ = weight->getData();
    height_ = weight->getHeight();
    width_ = weight->getWidth();
    transW_ = transW;
  }

  ~MKLPackedWeight() { free_(); }

  void pack() { pack_(weight_); }

46
  void gemm_compute(const MatrixPtr src, MatrixPtr dst) {
T
tensor-tang 已提交
47 48 49 50 51 52 53 54 55 56 57 58 59 60 61
    cblas_sgemm_compute(CblasRowMajor,
                        CblasNoTrans,
                        CblasPacked,
                        src->getHeight(),
                        transW_ ? height_ : width_,
                        transW_ ? width_ : height_,
                        src->getData(),
                        src->getWidth(),
                        packedWeight_,
                        width_,
                        1.0,
                        dst->getData(),
                        dst->getWidth());
  }

W
Wu Yi 已提交
62
 protected:
T
tensor-tang 已提交
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
  void pack_(real *src) {
    if (!packedWeight_) {
      packedWeight_ = cblas_sgemm_alloc(CblasBMatrix, 1, width_, height_);
    }
    cblas_sgemm_pack(CblasRowMajor,
                     CblasBMatrix,
                     transW_ ? CblasTrans : CblasNoTrans,
                     1,
                     transW_ ? height_ : width_,
                     transW_ ? width_ : height_,
                     1.0,
                     src,
                     width_,
                     packedWeight_);
  }

  void free_() {
    if (packedWeight_) {
      cblas_sgemm_free(packedWeight_);
    }
  }
};

}  // namespace paddle