adagrad_optimizer.h 812 字节
Newer Older
1 2 3 4 5 6 7 8 9 10 11
#ifndef PADDLE_ADAGRAD_OPTIMIZER_H_
#define PADDLE_ADAGRAD_OPTIMIZER_H_

#include "parameter_optimizer.h"

namespace paddle {
namespace optimizer {

template <class T>
class AdagradOptimizer : public ParameterOptimizer<T> {
public:
12 13 14 15 16
  using ParameterOptimizer<T>::parameter_;
  using ParameterOptimizer<T>::num_sample_passed;
  using ParameterOptimizer<T>::lr_policy;
  AdagradOptimizer(double epsilon, double decay, BaseLr *lr)
      : ParameterOptimizer<T>(lr), epsilon(epsilon), decay(decay) {}
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33
  ~AdagradOptimizer() {
    if (accum_gradient) delete accum_gradient;
  }
  void update(const Tensor<T> &gradient);
  void set_weight(const Tensor<T> *p);
  T *get_weight() const;

private:
  Tensor<T> *accum_gradient;
  double epsilon;
  double decay;
};

}  // namespace optimizer
}  // namespace paddle

#endif