adam_optimizer.h 1010 字节
Newer Older
D
dzhwinter 已提交
1
#pragma once
2 3 4 5 6 7

#include "parameter_optimizer.h"

namespace paddle {
namespace optimizer {

D
dzhwinter 已提交
8
class AdamOptimizer : public ParameterOptimizer {
9
public:
D
dzhwinter 已提交
10
  AdamOptimizer(Tensor *parameter,
D
dzhwinter 已提交
11 12
                LrPolicy *lr,
                double beta_1,
D
dzhwinter 已提交
13 14 15 16
                double beta_2,
                double epsilon,
                double decay)
      : ParameterOptimizer(parameter, lr),
D
dzhwinter 已提交
17 18 19
        beta_1_(beta_1),
        beta_2_(beta_2),
        epsilon_(epsilon),
D
dzhwinter 已提交
20
        decay_(decay) {
D
dzhwinter 已提交
21
    size_t size = parameter->size();
D
dzhwinter 已提交
22 23 24
    momentums_ = new Tensor(size);
    velocitys_ = new Tensor(size);
  }
25 26 27 28
  ~AdamOptimizer() {
    if (momentums_) delete momentums_;
    if (velocitys_) delete velocitys_;
  }
D
dzhwinter 已提交
29
  void Update(const Tensor *gradient);
D
dzhwinter 已提交
30 31
  const char *SerializeState(int *state_len);
  void DeserializeState(const std::string &state);
32 33

private:
D
dzhwinter 已提交
34 35
  Tensor *momentums_;
  Tensor *velocitys_;
D
dzhwinter 已提交
36 37 38 39
  double beta_1_;
  double beta_2_;
  double epsilon_;
  double decay_;
40 41 42 43
};

}  // namespace optimizer
}  // namespace paddle