adadelta_optimizer.h 1.1 KB
Newer Older
D
dzhwinter 已提交
1
#pragma once
2 3 4 5 6 7

#include "parameter_optimizer.h"

namespace paddle {
namespace optimizer {

D
dzhwinter 已提交
8
class AdadeltaOptimizer : public ParameterOptimizer {
9
public:
D
dzhwinter 已提交
10 11 12
  AdadeltaOptimizer(
      Tensor *parameter, LrPolicy *lr, double rho, double epsilon, double decay)
      : ParameterOptimizer(parameter, lr),
13 14
        rho_(rho),
        epsilon_(epsilon),
D
dzhwinter 已提交
15
        decay_(decay) {
D
dzhwinter 已提交
16
    size_t size = parameter->size();
D
dzhwinter 已提交
17 18 19 20 21 22 23
    if (accum_gradient_) delete accum_gradient_;
    accum_gradient_ = new Tensor(size);
    if (accum_delta_) delete accum_delta_;
    accum_delta_ = new Tensor(size);
    if (update_delta_) delete update_delta_;
    update_delta_ = new Tensor(size);
  }
24
  ~AdadeltaOptimizer() {
D
dzhwinter 已提交
25 26 27
    if (accum_gradient_) delete accum_gradient_;
    if (accum_delta_) delete accum_delta_;
    if (update_delta_) delete update_delta_;
28
  }
D
dzhwinter 已提交
29
  void Update(const Tensor *gradient);
D
dzhwinter 已提交
30
  const char *SerializeState(int *state_len);
D
dzhwinter 已提交
31
  void DeserializeState(const std::string &state);
32 33

private:
D
dzhwinter 已提交
34 35 36 37 38 39
  Tensor *accum_gradient_;
  Tensor *accum_delta_;
  Tensor *update_delta_;
  double rho_;
  double epsilon_;
  double decay_;
40 41 42 43
};

}  // namespace optimizer
}  // namespace paddle