From dfe85fb358d2b022ee4b4a73212e3d864b10ce4b Mon Sep 17 00:00:00 2001 From: Qiao Longfei Date: Fri, 28 Dec 2018 19:02:28 +0800 Subject: [PATCH] fix build --- paddle/fluid/operators/optimizers/adam_op.h | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/paddle/fluid/operators/optimizers/adam_op.h b/paddle/fluid/operators/optimizers/adam_op.h index dda4ffb9087..61b9384f842 100644 --- a/paddle/fluid/operators/optimizers/adam_op.h +++ b/paddle/fluid/operators/optimizers/adam_op.h @@ -431,17 +431,19 @@ class AdamOpKernel : public framework::OpKernel { } else { // merge duplicated rows if any. // The rows of grad_merge have been sorted inside MergeAdd functor + framework::SelectedRows* grad_merge_var; scatter::MergeAdd merge_func; if (platform::is_cpu_place(ctx.GetPlace())) { - grad_merge_ptr = &cpu_grad_merge; + grad_merge_var = &cpu_grad_merge; } else { // FIXME(qiao): GPU also need to fix this - auto* grad_merge_var = const_cast(ctx.scope()) - .Var() - ->GetMutable(); + grad_merge_var = const_cast(ctx.scope()) + .Var() + ->GetMutable(); } merge_func(ctx.template device_context(), grad, - grad_merge_ptr, true); + grad_merge_var, true); + grad_merge_ptr = grad_merge_var; } auto& grad_merge = *grad_merge_ptr; -- GitLab