diff --git a/paddle/operators/smooth_l1_loss_op.h b/paddle/operators/smooth_l1_loss_op.h index 218fb4c5a5200fb49d65857d0fa66748a9d92b59..8af831ae3577ccf79ff20cc65df7f9652cd88e6e 100644 --- a/paddle/operators/smooth_l1_loss_op.h +++ b/paddle/operators/smooth_l1_loss_op.h @@ -141,22 +141,12 @@ class SmoothL1LossGradKernel : public framework::OpKernel { diff.device(place) = EigenVector::Flatten(*in2).unaryExpr( SmoothL1LossBackward(sigma2)); - auto* out0 = context.Output(framework::GradVarName("X")); - auto* out1 = context.Output(framework::GradVarName("Y")); - // compute weights Tensor paddle_weights; paddle_weights.mutable_data(mat_dims, context.GetPlace()); auto weights = EigenMatrix::From(paddle_weights); // initialize to 1.0 - if (platform::is_cpu_place(context.GetPlace())) { - weights.setConstant(static_cast(1.0)); - } else { - Tensor paddle_cpu_weights; - paddle_cpu_weights.mutable_data(mat_dims, platform::CPUPlace()); - EigenMatrix::From(paddle_cpu_weights).setConstant(static_cast(1.0)); - paddle_weights.CopyFrom(paddle_cpu_weights, context.GetPlace()); - } + weights.device(place) = weights.constant(static_cast(1.0)); if (has_weight) { auto inside_weight = EigenMatrix::From(*in0, mat_dims); auto outside_weight = EigenMatrix::From(*in1, mat_dims); @@ -170,6 +160,9 @@ class SmoothL1LossGradKernel : public framework::OpKernel { Eigen::array({{1, static_cast(cols)}})) * weights * diff_mat_view; + auto* out0 = context.Output(framework::GradVarName("X")); + auto* out1 = context.Output(framework::GradVarName("Y")); + if (out0) { out0->mutable_data(context.GetPlace()); auto x_grad = EigenMatrix::From(*out0, mat_dims);