diff --git a/paddle/parameter/ParameterUpdaterHook.cpp b/paddle/parameter/ParameterUpdaterHook.cpp index ba2cb37fa2cecf9f04a1e52819d4e09ab6aacb19..968803fc0f0b81cfdea269423d99b8282b7fcc38 100644 --- a/paddle/parameter/ParameterUpdaterHook.cpp +++ b/paddle/parameter/ParameterUpdaterHook.cpp @@ -71,10 +71,9 @@ public: for (size_t i = 0; i < para->getSize(); i++) param.push_back(std::make_pair(fabs(paraCpuCopy->getData()[i]), i)); - std::partial_sort(param.begin(), param.begin() + nonZeroNum, param.end(), - sortPairAscend); - for (size_t i = 0; i < nonZeroNum; i++) - maskTempData[param[i].second] = 1.0; + std::partial_sort( + param.begin(), param.begin() + nonZeroNum, param.end(), sortPairAscend); + for (size_t i = 0; i < nonZeroNum; i++) maskTempData[param[i].second] = 1.0; // Currently just use a mask vector for hack. if (para->useGpu()) { @@ -127,14 +126,16 @@ private: std::hash intHasher_; }; -static WeakKVCache, IParameterUpdaterHook, - StringIntPairHasher> g_hookCache_; +static WeakKVCache, + IParameterUpdaterHook, + StringIntPairHasher> + g_hookCache_; /** * ParameterUpdaterHook actually factory method. */ -static IParameterUpdaterHook * -createImpl(const ParameterUpdaterHookConfig &config) { +static IParameterUpdaterHook *createImpl( + const ParameterUpdaterHookConfig &config) { auto &type = config.type(); if (type == "pruning") { return new StaticPruningHook(config); @@ -144,11 +145,11 @@ createImpl(const ParameterUpdaterHookConfig &config) { return nullptr; } -std::shared_ptr -IParameterUpdaterHook::create(const ParameterConfig ¶mConfig, int idx) { +std::shared_ptr IParameterUpdaterHook::create( + const ParameterConfig ¶mConfig, int idx) { std::pair key = {paramConfig.name(), idx}; return g_hookCache_.get( key, [&] { return createImpl(paramConfig.update_hooks(idx)); }); } -} // namespace paddle +} // namespace paddle