From 8b6b0da062080a0392709bcb8a85800b49249bad Mon Sep 17 00:00:00 2001 From: minqiyang Date: Mon, 24 Dec 2018 14:15:08 +0800 Subject: [PATCH] Use adam_update test=develop --- paddle/fluid/operators/optimizers/adam_op.h | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/paddle/fluid/operators/optimizers/adam_op.h b/paddle/fluid/operators/optimizers/adam_op.h index b47307f5a..1138bb740 100644 --- a/paddle/fluid/operators/optimizers/adam_op.h +++ b/paddle/fluid/operators/optimizers/adam_op.h @@ -315,19 +315,8 @@ struct SparseAdamFunctor { for (size_t i = 0U, j = 0U; i != row_count; ++i) { if (i == *(rows_ + j)) { for (size_t k = 0U; k != row_numel_; ++k) { - T mom1 = moment1_[i * row_numel_ + k]; - T mom2 = moment2_[i * row_numel_ + k]; - T p = param_[i * row_numel_ + k]; - T g = grad_[j * row_numel_ + k]; - mom1 = beta1_ * mom1 + (1 - beta1_) * g; - mom2 = beta2_ * mom2 + (1 - beta2_) * g * g; - - p -= lr * (mom1 / (sqrt(mom2) + epsilon_)); - // Write back to global memory - moment1_out_[i * row_numel_ + k] = mom1; - moment2_out_[i * row_numel_ + k] = mom2; - param_out_[i * row_numel_ + k] = p; + adam_update(i * row_numel_ + k, g); } ++j; } else { -- GitLab