提交 4d4593b9 编写于 作者: Y Yibing Liu

code cleanup

上级 2e4c0bd2
......@@ -305,12 +305,13 @@ void AdamaxParameterOptimizer::update(const VectorPtr vecs[],
void OptimizerWithGradientClipping::update(const VectorPtr vecs[],
const ParameterConfig& config,
size_t sparseId) const {
// globalGradientClipping(vecs, config, FLAGS_log_clipping);
real global_thres_ = optConfig_.gradient_clipping_threshold();
real local_thres_ = config.gradient_clipping_threshold();
real threshold;
std::string field;
// Get the minimum of local and global threshold
// as the real threshold for clipping
if (global_thres_ > 0.0f && local_thres_ > 0.0f) {
threshold = global_thres_ < local_thres_ ? global_thres_ : local_thres_;
field = global_thres_ < local_thres_ ? "global" : "local";
......
......@@ -170,9 +170,6 @@ public:
real getLearningRate() const { return learningRate_; }
// real getGradientClippingThreshold() const {return
// gradientClippingThreshold_;}
virtual void setNoDecay() { applyDecay_ = false; }
static ParameterOptimizer* create(const OptimizationConfig& optConfig,
......@@ -206,11 +203,6 @@ protected:
*/
real learningRate_;
/**
* global threshold for gradient clipping,
* init value is opt_config.gradient_clipping_thresholod
*/
std::unique_ptr<LearningRateScheduler> learningRateScheduler_;
int64_t pass_; // current training pass (starting from 0)
bool firstTime_;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册