adagrad_optimizer.h 837 字节
Newer Older
D
dzhwinter 已提交
1
#pragma once
2 3 4 5 6 7

#include "parameter_optimizer.h"

namespace paddle {
namespace optimizer {

D
dzhwinter 已提交
8
class AdagradOptimizer : public ParameterOptimizer {
9
public:
D
dzhwinter 已提交
10 11 12 13 14
  AdagradOptimizer(Tensor *parameter,
                   LrPolicy *lr,
                   double epsilon,
                   double decay)
      : ParameterOptimizer(parameter, lr), epsilon_(epsilon), decay_(decay) {
D
dzhwinter 已提交
15
    size_t size = parameter->size();
D
dzhwinter 已提交
16 17 18
    if (accum_gradient_) delete accum_gradient_;
    accum_gradient_ = new Tensor(size);
  }
19
  ~AdagradOptimizer() {
D
dzhwinter 已提交
20
    if (accum_gradient_) delete accum_gradient_;
21
  }
D
dzhwinter 已提交
22
  void Update(const Tensor *gradient);
D
dzhwinter 已提交
23
  const char *SerializeState(int *state_len);
D
dzhwinter 已提交
24
  void DeserializeState(const std::string &state);
25 26

private:
D
dzhwinter 已提交
27 28 29
  Tensor *accum_gradient_;
  double epsilon_;
  double decay_;
30 31 32 33
};

}  // namespace optimizer
}  // namespace paddle