adadelta_optimizer.h 1.2 KB
Newer Older
D
dzhwinter 已提交
1
#pragma once
2 3 4 5 6 7

#include "parameter_optimizer.h"

namespace paddle {
namespace optimizer {

D
dzhwinter 已提交
8
class AdadeltaOptimizer : public ParameterOptimizer {
9
public:
D
dzhwinter 已提交
10 11 12
  AdadeltaOptimizer(
      Tensor *parameter, LrPolicy *lr, double rho, double epsilon, double decay)
      : ParameterOptimizer(parameter, lr),
13 14 15 16 17
        accum_gradient_(nullptr),
        accum_delta_(nullptr),
        update_delta_(nullptr),
        rho_(rho),
        epsilon_(epsilon),
D
dzhwinter 已提交
18 19 20 21 22 23 24 25 26
        decay_(decay) {
    size_t size = p->size();
    if (accum_gradient_) delete accum_gradient_;
    accum_gradient_ = new Tensor(size);
    if (accum_delta_) delete accum_delta_;
    accum_delta_ = new Tensor(size);
    if (update_delta_) delete update_delta_;
    update_delta_ = new Tensor(size);
  }
27
  ~AdadeltaOptimizer() {
D
dzhwinter 已提交
28 29 30
    if (accum_gradient_) delete accum_gradient_;
    if (accum_delta_) delete accum_delta_;
    if (update_delta_) delete update_delta_;
31
  }
D
dzhwinter 已提交
32
  void Update(const Tensor *gradient);
D
dzhwinter 已提交
33 34
  const char *SerializeState(int *state_len);
  void DeSerializeState(const std::string &state);
35 36

private:
D
dzhwinter 已提交
37 38 39 40 41 42
  Tensor *accum_gradient_;
  Tensor *accum_delta_;
  Tensor *update_delta_;
  double rho_;
  double epsilon_;
  double decay_;
43 44 45 46
};

}  // namespace optimizer
}  // namespace paddle