From 076dcb9b45a1a8dd385eee58aa42da042798cada Mon Sep 17 00:00:00 2001 From: yangyaming Date: Fri, 8 Sep 2017 17:21:17 +0800 Subject: [PATCH] Simpify the initialization for weights. --- paddle/operators/smooth_l1_loss_op.h | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/paddle/operators/smooth_l1_loss_op.h b/paddle/operators/smooth_l1_loss_op.h index 218fb4c5a5..8af831ae35 100644 --- a/paddle/operators/smooth_l1_loss_op.h +++ b/paddle/operators/smooth_l1_loss_op.h @@ -141,22 +141,12 @@ class SmoothL1LossGradKernel : public framework::OpKernel { diff.device(place) = EigenVector::Flatten(*in2).unaryExpr( SmoothL1LossBackward(sigma2)); - auto* out0 = context.Output(framework::GradVarName("X")); - auto* out1 = context.Output(framework::GradVarName("Y")); - // compute weights Tensor paddle_weights; paddle_weights.mutable_data(mat_dims, context.GetPlace()); auto weights = EigenMatrix::From(paddle_weights); // initialize to 1.0 - if (platform::is_cpu_place(context.GetPlace())) { - weights.setConstant(static_cast(1.0)); - } else { - Tensor paddle_cpu_weights; - paddle_cpu_weights.mutable_data(mat_dims, platform::CPUPlace()); - EigenMatrix::From(paddle_cpu_weights).setConstant(static_cast(1.0)); - paddle_weights.CopyFrom(paddle_cpu_weights, context.GetPlace()); - } + weights.device(place) = weights.constant(static_cast(1.0)); if (has_weight) { auto inside_weight = EigenMatrix::From(*in0, mat_dims); auto outside_weight = EigenMatrix::From(*in1, mat_dims); @@ -170,6 +160,9 @@ class SmoothL1LossGradKernel : public framework::OpKernel { Eigen::array({{1, static_cast(cols)}})) * weights * diff_mat_view; + auto* out0 = context.Output(framework::GradVarName("X")); + auto* out1 = context.Output(framework::GradVarName("Y")); + if (out0) { out0->mutable_data(context.GetPlace()); auto x_grad = EigenMatrix::From(*out0, mat_dims); -- GitLab