From 0fc42012a0073a0614e2c0b2634ab9ef7e337916 Mon Sep 17 00:00:00 2001 From: dzhwinter Date: Sat, 10 Jun 2017 01:58:50 +0800 Subject: [PATCH] "update interface" --- paddle/optimizer/adadelta_optimizer.h | 3 --- paddle/optimizer/adagrad_optimizer.h | 2 ++ paddle/optimizer/adam_optimizer.cc | 7 ------- paddle/optimizer/adam_optimizer.h | 9 +++++---- paddle/optimizer/serialization.h | 2 +- 5 files changed, 8 insertions(+), 15 deletions(-) diff --git a/paddle/optimizer/adadelta_optimizer.h b/paddle/optimizer/adadelta_optimizer.h index 31f29f86752..e0f544a90e5 100644 --- a/paddle/optimizer/adadelta_optimizer.h +++ b/paddle/optimizer/adadelta_optimizer.h @@ -10,9 +10,6 @@ public: AdadeltaOptimizer( Tensor *parameter, LrPolicy *lr, double rho, double epsilon, double decay) : ParameterOptimizer(parameter, lr), - accum_gradient_(nullptr), - accum_delta_(nullptr), - update_delta_(nullptr), rho_(rho), epsilon_(epsilon), decay_(decay) { diff --git a/paddle/optimizer/adagrad_optimizer.h b/paddle/optimizer/adagrad_optimizer.h index 14a32cb683a..ebc0fe2acc6 100644 --- a/paddle/optimizer/adagrad_optimizer.h +++ b/paddle/optimizer/adagrad_optimizer.h @@ -20,6 +20,8 @@ public: if (accum_gradient_) delete accum_gradient_; } void Update(const Tensor *gradient); + const char *SerializeState(int *state_len); + void DeSerializeState(const std::string &state); private: Tensor *accum_gradient_; diff --git a/paddle/optimizer/adam_optimizer.cc b/paddle/optimizer/adam_optimizer.cc index 20cf4ef5a11..974039cf6dc 100644 --- a/paddle/optimizer/adam_optimizer.cc +++ b/paddle/optimizer/adam_optimizer.cc @@ -4,13 +4,6 @@ namespace paddle { namespace optimizer { -void AdamOptimizer::set_weight(Tensor *p) { - parameter_ = p; - size_t size = p->size(); - momentums_ = new Tensor(size); - velocitys_ = new Tensor(size); -} - void AdamOptimizer::Update(const Tensor *gradient) { num_sample_passed_ += 1; double learning_rate = lr_policy_->LearningRate(num_sample_passed_); diff --git a/paddle/optimizer/adam_optimizer.h b/paddle/optimizer/adam_optimizer.h index cb211c6d888..b8be2ca2227 100644 --- a/paddle/optimizer/adam_optimizer.h +++ b/paddle/optimizer/adam_optimizer.h @@ -13,18 +13,19 @@ public: double epsilon, double decay) : ParameterOptimizer(parameter, lr), - momentums_(nullptr), - velocitys_(nullptr), beta_1_(beta_1), beta_2_(beta_2), epsilon_(epsilon), - decay_(decay) {} + decay_(decay) { + size_t size = p->size(); + momentums_ = new Tensor(size); + velocitys_ = new Tensor(size); + } ~AdamOptimizer() { if (momentums_) delete momentums_; if (velocitys_) delete velocitys_; } void Update(const Tensor *gradient); - void set_weight(Tensor *p); private: Tensor *momentums_; diff --git a/paddle/optimizer/serialization.h b/paddle/optimizer/serialization.h index a330dd96e93..18088991754 100644 --- a/paddle/optimizer/serialization.h +++ b/paddle/optimizer/serialization.h @@ -17,7 +17,7 @@ unsigned CalStateSize(const HEAD& head, const TAIL&... tail) { if (std::is_fundamental::value) { return sizeof head + CalStateSize(tail...); } else { - return sizeof(head[0] * head->size()) + CalStateSize(tail...); + return sizeof(head[0]) * head->size() + CalStateSize(tail...); } } -- GitLab