ParameterUpdaterHook.cpp 7.8 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Z
zhangjinchao01 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "ParameterUpdaterHook.h"

Y
Yu Yang 已提交
17
#include <atomic>
Z
zhangjinchao01 已提交
18 19 20
#include <fstream>
#include <mutex>
#include <thread>
Y
Yu Yang 已提交
21
#include <unordered_map>
Z
zhangjinchao01 已提交
22 23 24 25

#include "paddle/math/Vector.h"
#include "paddle/parameter/Parameter.h"
#include "paddle/utils/Flags.h"
Y
Yu Yang 已提交
26
#include "paddle/utils/Util.h"
Z
zhangjinchao01 已提交
27

X
xzl 已提交
28 29 30
using std::vector;
using std::pair;

Z
zhangjinchao01 已提交
31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136
namespace paddle {

/**
 * The static pruning hook
 *
 * Static means user load a mask map before training started. This map will
 * define which link/weight between neural is disabled.
 */
class StaticPruningHook : public IParameterUpdaterHook {
public:
  /**
   * The Mask Map Header.
   * The map file started with this header.
   *
   * In Version 0, reset file will be:
   *  contains header.size bit, each bit means such weight is enabled or not.
   *    if bit is 1, then such weight is enabled.
   *  at end, the file will round to byte, and the low bits of end byte will be
   *  filled by zero.
   *
   */
  struct StaticMaskHeader {
    uint32_t version;
    size_t size;
  } __attribute__((__packed__));

  explicit StaticPruningHook(const std::string& mask_filename) : initCount_(0) {
    bool ok = this->loadMaskFile(mask_filename);
    if (!ok) {
      LOG(WARNING) << "Fail to load mask file " << mask_filename
                   << " in current directory, searching in init_model_path";
      std::string combineMaskFilename =
          path::join(FLAGS_init_model_path, mask_filename);
      CHECK(this->loadMaskFile(combineMaskFilename))
          << "Cannot load " << mask_filename << " in ./" << mask_filename
          << " and " << combineMaskFilename;
    }
    VLOG(3) << mask_filename << " mask size = " << this->mask_.size();
  }

  void update(Parameter* para) {
    updateThreadChecker_.check();
    auto& vec = para->getBuf(PARAMETER_GRADIENT);
    if (vec) {
      vec->dotMul(*maskVec_);
    }
  }

  void init(Parameter* para) {
    size_t initCount = this->initCount_.fetch_add(1);
    CHECK_EQ(initCount, 0UL) << "Currently the StaticPruningHook must invoke "
                                "in same ParamterUpdater";
    VLOG(3) << "Initialize Parameter " << para;
    SetDevice device(para->getDeviceId());

    auto maskVec = Vector::create(this->mask_.size(), false);
    {  // Initialize maskVec with float mask vector
      real* dataPtr = maskVec->getData();
      size_t i = 0;
      for (bool m : mask_) {
        dataPtr[i++] = m ? 1.0 : 0.0;
      }
    }

    // Currently just use a mask vector for hack.
    // @TODO(yuyang18): Implemented the mask operation in vector.
    if (para->useGpu()) {
      maskVec_ = Vector::create(this->mask_.size(), para->useGpu());
      maskVec_->copyFrom(*maskVec);
    } else {
      maskVec_ = maskVec;
    }

    auto& vec = para->getBuf(PARAMETER_VALUE);
    vec->dotMul(*maskVec_);
  }

private:
  bool loadMaskFile(const std::string& mask_filename) {
    std::ifstream fin;
    fin.open(mask_filename);
    if (fin.is_open()) {
      StaticMaskHeader header;
      fin.read(reinterpret_cast<char*>(&header), sizeof(StaticMaskHeader));
      CHECK_EQ(header.version, 0UL);
      mask_.resize(header.size);
      uint8_t buf;
      for (size_t i = 0; i < header.size; ++i, buf <<= 1) {
        if (i % 8 == 0) {
          fin.read(reinterpret_cast<char*>(&buf), sizeof(uint8_t));
        }
        mask_[i] = buf & 0x80;
      }
      fin.close();
      return true;
    } else {
      return false;
    }
  }

  SameThreadChecker updateThreadChecker_;
  std::atomic<size_t> initCount_;
  VectorPtr maskVec_;
  std::vector<bool> mask_;
};

X
xzl 已提交
137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203
class DynamicPruningHook : public IParameterUpdaterHook {
public:
  explicit DynamicPruningHook(const ParameterUpdaterHookConfig& hookConfig)
      : initCount_(0) {
    sparsityRatio_ = hookConfig.sparsity_ratio();
  }

  static bool sortPairAscend(const pair<real, size_t>& pair1,
                             const pair<real, size_t>& pair2) {
    return pair1.first > pair2.first;
  }

  void update(Parameter* para) {
    updateThreadChecker_.check();
    auto& vec = para->getBuf(PARAMETER_GRADIENT);
    if (vec) {
      vec->dotMul(*maskVec_);
    }
  }

  void generateMask(Parameter* para) {
    VectorPtr vec = para->getBuf(PARAMETER_VALUE);
    maskTemp_ = Vector::create(para->getSize(), false);
    maskTemp_->zeroMem();
    real* dataPtr = maskTemp_->getData();

    VectorPtr vecCpu = Vector::create(para->getSize(), false);
    vecCpu->copyFrom(*vec);
    vector<pair<real, size_t>> param;

    for (size_t i = 0; i < para->getSize(); i++)
      param.push_back(std::make_pair(fabs(vecCpu->getData()[i]), i));
    std::sort(param.begin(), param.end(), sortPairAscend);

    for (size_t i = 0; i < para->getSize() * sparsityRatio_; i++)
      dataPtr[param[i].second] = 1.0;
  }

  void init(Parameter* para) {
    generateMask(para);
    size_t initCount = this->initCount_.fetch_add(1);
    CHECK_EQ(initCount, 0UL) << "Currently the DynamicPruningHook must invoke "
                                "in same ParamterUpdater";
    VLOG(3) << "Initialize Parameter " << para;
    SetDevice device(para->getDeviceId());

    // Currently just use a mask vector for hack.
    // @TODO(yuyang18): Implemented the mask operation in vector.
    if (para->useGpu()) {
      maskVec_ = Vector::create(para->getSize(), para->useGpu());
      maskVec_->copyFrom(*maskTemp_);
    } else {
      maskVec_ = maskTemp_;
    }

    auto& vec = para->getBuf(PARAMETER_VALUE);
    vec->dotMul(*maskVec_);
  }

private:
  SameThreadChecker updateThreadChecker_;
  std::atomic<size_t> initCount_;
  VectorPtr maskVec_;
  VectorPtr maskTemp_;
  real sparsityRatio_;
};

Z
zhangjinchao01 已提交
204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226
IParameterUpdaterHook::IParameterUpdaterHook() {}

IParameterUpdaterHook::~IParameterUpdaterHook() {}

/**
 * A Hasher used by g_hooks.
 *
 * Use the independent hasher intendedly. There is a hasher in PServer for hash
 * ParameterBlock. But not to use same hasher to reduce dependency.
 *
 * May be extracted to Util.h to unify the hasher.
 */
class StringIntPairHasher {
public:
  size_t operator()(const std::pair<std::string, int>& k) const {
    return intHasher_(strHasher_(k.first) + k.second);
  }

private:
  std::hash<std::string> strHasher_;
  std::hash<int> intHasher_;
};

227 228
static WeakKVCache<std::pair<std::string, int>,
                   IParameterUpdaterHook,
X
xzl 已提交
229
                   StringIntPairHasher> g_hookCache_;
Z
zhangjinchao01 已提交
230 231 232 233 234 235 236

/**
 * ParameterUpdaterHook actually factory method.
 */
static IParameterUpdaterHook* createImpl(
    const ParameterUpdaterHookConfig& config) {
  auto& type = config.type();
X
xzl 已提交
237 238
  if (type == "pruning_static") {
    if (config.has_purning_mask_filename())
Z
zhangjinchao01 已提交
239
      return new StaticPruningHook(config.purning_mask_filename());
X
xzl 已提交
240 241 242 243 244 245 246 247 248 249
    else
      LOG(FATAL) << "There must be mask_filename parameter for " << type
                 << " Hook";

  } else if (type == "pruning") {
    if (config.has_sparsity_ratio())
      return new DynamicPruningHook(config);
    else
      LOG(FATAL) << "There must be sparsity_ratio parameter for " << type
                 << " Hook";
Z
zhangjinchao01 已提交
250
  }
X
xzl 已提交
251 252

  LOG(FATAL) << "Unknown Hook type:  " << type;
Z
zhangjinchao01 已提交
253 254 255 256 257 258 259 260 261 262 263
  return nullptr;
}

std::shared_ptr<IParameterUpdaterHook> IParameterUpdaterHook::create(
    const ParameterConfig& paramConfig, int idx) {
  std::pair<std::string, int> key = {paramConfig.name(), idx};
  return g_hookCache_.get(
      key, [&] { return createImpl(paramConfig.update_hooks(idx)); });
}

}  // namespace paddle