From 3dc29b390537cca68f43f21f44c2c2fde84fa297 Mon Sep 17 00:00:00 2001 From: Qiao Longfei Date: Thu, 13 Dec 2018 22:02:55 +0800 Subject: [PATCH] change sparse_update to adam_update --- paddle/fluid/operators/optimizers/adam_op.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/paddle/fluid/operators/optimizers/adam_op.h b/paddle/fluid/operators/optimizers/adam_op.h index ca5454ef0..25e23c5f9 100644 --- a/paddle/fluid/operators/optimizers/adam_op.h +++ b/paddle/fluid/operators/optimizers/adam_op.h @@ -202,7 +202,7 @@ struct SparseAdamFunctor { row_count_(row_count), sparse_mode_(sparse_mode) {} - inline HOSTDEVICE void sparse_update(size_t i, T g) const { + inline HOSTDEVICE void adam_update(size_t i, T g) const { // The following code is the same as dense T mom1 = moment1_[i]; T mom2 = moment2_[i]; @@ -228,7 +228,7 @@ struct SparseAdamFunctor { auto row_idx = math::BinarySearch(rows_, row_count_, i / row_numel_); T g = row_idx >= 0 ? grad_[row_idx * row_numel_ + i % row_numel_] : 0; - sparse_update(i, g); + adam_update(i, g); } }; @@ -364,7 +364,7 @@ class AdamOpKernel : public framework::OpKernel { for (size_t offset = 0; offset < row_numel; ++offset) { size_t i = rows[row_index] * row_numel + offset; T g = grad_data[row_index * row_numel + offset]; - functor.sparse_update(i, g); + functor.adam_update(i, g); } } } else { -- GitLab