diff --git a/paddle/operators/smooth_l1_loss_op.cc b/paddle/operators/smooth_l1_loss_op.cc index e9a38474174978d268c4ed20909f5382a84e31de..d2c6d955a72d46ddbb2b5533378e2aca4f2cc929 100644 --- a/paddle/operators/smooth_l1_loss_op.cc +++ b/paddle/operators/smooth_l1_loss_op.cc @@ -111,7 +111,8 @@ class SmoothL1LossGradOp : public framework::OperatorWithKernel { namespace ops = paddle::operators; REGISTER_OP(smooth_l1_loss, ops::SmoothL1LossOp, - ops::SmoothL1LossOpMaker, ops::SmoothL1LossGradOp); + ops::SmoothL1LossOpMaker, smooth_l1_loss_grad, + ops::SmoothL1LossGradOp); REGISTER_OP_CPU_KERNEL( smooth_l1_loss, ops::SmoothL1LossKernel); REGISTER_OP_CPU_KERNEL( diff --git a/paddle/operators/smooth_l1_loss_op.h b/paddle/operators/smooth_l1_loss_op.h index bb823a56a324a1c0295684c0a7bfc3e6dc0ebdd2..218fb4c5a5200fb49d65857d0fa66748a9d92b59 100644 --- a/paddle/operators/smooth_l1_loss_op.h +++ b/paddle/operators/smooth_l1_loss_op.h @@ -59,7 +59,7 @@ class SmoothL1LossKernel : public framework::OpKernel { out1->mutable_data(context.GetPlace()); auto place = context.GetEigenDevice(); - auto sigma = static_cast(context.op_.GetAttr("sigma")); + auto sigma = static_cast(context.op().Attr("sigma")); T sigma2 = sigma * sigma; bool has_weight = (in2 != nullptr) && (in3 != nullptr); @@ -122,7 +122,7 @@ class SmoothL1LossGradKernel : public framework::OpKernel { auto* in1 = context.Input("OutsideWeight"); auto* in2 = context.Input("diff"); auto* og = context.Input(framework::GradVarName("Out")); - auto sigma = static_cast(context.op_.GetAttr("sigma")); + auto sigma = static_cast(context.op().Attr("sigma")); T sigma2 = sigma * sigma; bool has_weight = (in0 != nullptr) && (in1 != nullptr);