parameter_optimizer.h 1.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
#ifndef PADDLE_PARAMETER_OPTIMIZER_H_
#define PADDLE_PARAMETER_OPTIMIZER_H_

#include <glog/logging.h>
#include <functional>
#include <string>
#include "OptimizerConfig.pb.h"
#include "Tensor.h"
#include "lr_policy.h"

namespace paddle {
namespace optimizer {

template <class T>
class ParameterOptimizer {
public:
  /**
   * @brief  update hook for algorithm need to traverse parameter more than
   * once.
   */
  ParameterOptimizer(const OptimizerConfig &config) : config_(config){};

  static ParameterOptimizer *create(const ::std::string &config_proto);
  virtual void update(const Tensor &gradient) = 0;
  virtual void destroy() = 0;
  virtual T *get_weight() const;
  virtual void set_weight(const Tensor<T> *parameter);
  // package optimizer config proto in runtime for saving checkpoint
  virtual char *get_config_proto();
  ~ParameterOptimzier() { delete parameter_; }

private:
  bool config_valid(::std::string &config) const;
  OptimizerConfig config_;
  Tensor<T> *parameter_;

  // learning rate policy
  BaseLr *lr_policy;
  uint64_t num_sample_passed;

  ParameterOptimizer(const ParameterOptimizer &) = delete;
  ParameterOptimizer &operator=(const ParameterOptimizer &) = delete;
  /**
   * @brief indicate if use L1, L2 regularizer
   */
};

}  // namespace optimizer
}  // namespace paddle

#endif