diff --git a/paddle/operators/mul_op.cc b/paddle/operators/mul_op.cc index 89e0375a7a043730685c4c0883ac672bdd688159..cd74c8b976d18ffecd50077cc81e1fce56bea155 100644 --- a/paddle/operators/mul_op.cc +++ b/paddle/operators/mul_op.cc @@ -67,7 +67,7 @@ protected: } // namespace paddle REGISTER_OP(mul, paddle::operators::MulOp, paddle::operators::MulOpMaker); -REGISTER_GRADIENT_OP(mul, paddle::operators::MulOpGrad); +REGISTER_GRADIENT_OP(mul, mul_grad, paddle::operators::MulOpGrad); REGISTER_OP_CPU_KERNEL( mul, paddle::operators::MulKernel); diff --git a/paddle/operators/sigmoid_op.cc b/paddle/operators/sigmoid_op.cc index 7dc58bbb10007545cd281ae7da359e4c2b32fae0..bf63af28b003daad0ab8c223e71a561437ee663a 100644 --- a/paddle/operators/sigmoid_op.cc +++ b/paddle/operators/sigmoid_op.cc @@ -56,7 +56,7 @@ protected: REGISTER_OP(sigmoid, paddle::operators::SigmoidOp, paddle::operators::SigmoidOpMaker); -REGISTER_GRADIENT_OP(sigmoid, paddle::operators::SigmoidOpGrad); +REGISTER_GRADIENT_OP(sigmoid, sigmoid_grad, paddle::operators::SigmoidOpGrad); REGISTER_OP_CPU_KERNEL( sigmoid, diff --git a/paddle/operators/softmax_op.cc b/paddle/operators/softmax_op.cc index 1d10a415d0208e1edb881eacad951a07fcbb8b5c..82f72fa19f690bebdff01629e75d17eecd6ada74 100644 --- a/paddle/operators/softmax_op.cc +++ b/paddle/operators/softmax_op.cc @@ -59,6 +59,6 @@ protected: namespace ops = paddle::operators; REGISTER_OP(softmax, ops::SoftmaxOp, ops::SoftmaxOpMaker); -REGISTER_GRADIENT_OP(softmax, paddle::operators::SoftmaxOpGrad); +REGISTER_GRADIENT_OP(softmax, softmax_grad, paddle::operators::SoftmaxOpGrad); REGISTER_OP_CPU_KERNEL(softmax, ops::SoftmaxKernel);