提交 61cb4f2f 编写于 作者: D dzhwinter

"fix ci"

上级 425a1e76
...@@ -13,16 +13,18 @@ See the License for the specific language governing permissions and ...@@ -13,16 +13,18 @@ See the License for the specific language governing permissions and
limitations under the License. */ limitations under the License. */
#include "paddle/fluid/operators/activation_op.h" #include "paddle/fluid/operators/activation_op.h"
#include <string>
#include "paddle/fluid/operators/mkldnn_activation_op.h" #include "paddle/fluid/operators/mkldnn_activation_op.h"
namespace paddle { namespace paddle {
namespace operators { namespace operators {
#define REGISTER_ACTIVATION_OP_MAKER(OP_NAME, OP_COMMENT) \ #define REGISTER_ACTIVATION_OP_MAKER(OP_NAME, OP_COMMENT) \
class OP_NAME##OpMaker : public framework::OpProtoAndCheckerMaker { \ class OP_NAME##OpMaker \
: public ::paddle::framework::OpProtoAndCheckerMaker { \
public: \ public: \
OP_NAME##OpMaker(OpProto *proto, OpAttrChecker *op_checker) \ OP_NAME##OpMaker(OpProto *proto, OpAttrChecker *op_checker) \
: framework::OpProtoAndCheckerMaker(proto, op_checker) { \ : ::paddle::framework::OpProtoAndCheckerMaker(proto, op_checker) { \
AddInput("X", "Input of " #OP_NAME "operator"); \ AddInput("X", "Input of " #OP_NAME "operator"); \
AddOutput("Out", "Output of" #OP_NAME "operator"); \ AddOutput("Out", "Output of" #OP_NAME "operator"); \
AddAttr<bool>("use_mkldnn", \ AddAttr<bool>("use_mkldnn", \
...@@ -30,26 +32,28 @@ namespace operators { ...@@ -30,26 +32,28 @@ namespace operators {
.SetDefault(false); \ .SetDefault(false); \
AddComment(#OP_COMMENT); \ AddComment(#OP_COMMENT); \
} \ } \
} };
#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME, KERNEL_TYPE) \ #define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME, KERNEL_TYPE) \
class OP_NAME##GradMaker : public framework::SingleGradOpDescMaker { \ class OP_NAME##GradMaker \
: public ::paddle::framework::SingleGradOpDescMaker { \
public: \ public: \
using framework::SingleGradOpDescMaker::SingleGradOpDescMaker; \ using ::paddle::framework::SingleGradOpDescMaker::SingleGradOpDescMaker; \
\ \
protected: \ protected: \
std::unique_ptr<framework::OpDesc> Apply() const override { \ std::unique_ptr<::paddle::framework::OpDesc> Apply() const override { \
auto *op = new framework::OpDesc(); \ auto *op = new ::paddle::framework::OpDesc(); \
op->SetType(#KERNEL_TYPE "_grad"); \ op->SetType(#KERNEL_TYPE "_grad"); \
op->SetInput("Out", Output("Out")); \ op->SetInput("Out", Output("Out")); \
op->SetInput(framework::GradVarName("Out"), OutputGrad("Out")); \ op->SetInput(::paddle::framework::GradVarName("Out"), \
OutputGrad("Out")); \
\ \
op->SetAttrMap(Attrs()); \ op->SetAttrMap(Attrs()); \
\ \
op->SetOutput(framework::GradVarName("X"), InputGrad("X")); \ op->SetOutput(::paddle::framework::GradVarName("X"), InputGrad("X")); \
return std::unique_ptr<framework::OpDesc>(op); \ return std::unique_ptr<::paddle::framework::OpDesc>(op); \
} \ } \
} };
class ActivationOp : public framework::OperatorWithKernel { class ActivationOp : public framework::OperatorWithKernel {
public: public:
...@@ -449,70 +453,67 @@ REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc); ...@@ -449,70 +453,67 @@ REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc); REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc);
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc); REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);
// NOTE(*) only gradient can be inplaced need to register its gradient maker,
// To tell the executor which input variable is used. By default, every Input
// variable
// is used in gradient operator.
// The operator name written in lowercase intentionally.
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sigmoid, sigmoid); REGISTER_ACTIVATION_OP_GRAD_MAKER(Sigmoid, sigmoid);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Exp, exp);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu, relu); REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu, relu);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Exp, exp);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Tanh, tanh); REGISTER_ACTIVATION_OP_GRAD_MAKER(Tanh, tanh);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sqrt, sqrt);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Ceil, ceil); REGISTER_ACTIVATION_OP_GRAD_MAKER(Ceil, ceil);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Floor, floor); REGISTER_ACTIVATION_OP_GRAD_MAKER(Floor, floor);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Reciprocal, reciprocal); REGISTER_ACTIVATION_OP_GRAD_MAKER(Sqrt, sqrt);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu6, relu6);
REGISTER_ACTIVATION_OP_GRAD_MAKER(SoftRelu, soft_relu); REGISTER_ACTIVATION_OP_GRAD_MAKER(SoftRelu, soft_relu);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu6, relu6);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Reciprocal, reciprocal);
REGISTER_ACTIVATION_OP_GRAD_MAKER(HardSigmoid, hard_sigmoid); REGISTER_ACTIVATION_OP_GRAD_MAKER(HardSigmoid, hard_sigmoid);
} // namespace operators } // namespace operators
} // namespace paddle } // namespace paddle
namespace ops = paddle::operators; namespace ops = paddle::operators;
#define REGISTER_INPLACE_ACTIVATION_OP(act_type, op_name) \ void DummyFunctor() {}
REGISTER_OPERATOR(act_type, ops::ActivationOp, ops::op_name##OpMaker, \
ops::op_name##GradMaker); \
REGISTER_OPERATOR(act_type##grad, ops::ActivationOpGrad)
#define REGISTER_ACTIVATION_OP(act_type, op_name) \
REGISTER_OP(act_type, ops::ActivationOp, ops::op_name##OpMaker, \
act_type##_grad, ops::ActivationOpGrad);
#define FOR_EACH_INPLACE_OP_FUNCTOR(__macro) \ #define FOR_EACH_INPLACE_OP_FUNCTOR(__macro) \
__macro(sigmoid, Sigmoid); \ __macro(Sigmoid, sigmoid); \
__macro(relu, Relu); \ __macro(Relu, relu); \
__macro(exp, Exp); \ __macro(Exp, exp); \
__macro(tanh, Tanh); \ __macro(Tanh, tanh); \
__macro(ceil, Ceil); \ __macro(Ceil, ceil); \
__macro(floor, Floor); \ __macro(Floor, floor); \
__macro(sqrt, Sqrt); \ __macro(Sqrt, sqrt); \
__macro(soft_relu, SoftRelu); \ __macro(SoftRelu, soft_relu); \
__macro(relu6, Relu6); \ __macro(Relu6, relu6); \
__macro(reciprocal, Reciprocal); \ __macro(Reciprocal, reciprocal); \
__macro(hard_sigmoid, HardSigmoid); __macro(HardSigmoid, hard_sigmoid);
#define FOR_EACH_OP_FUNCTOR(__macro) \ #define FOR_EACH_OP_FUNCTOR(__macro) \
__macro(logsigmoid, LogSigmoid); \ __macro(LogSigmoid, logsigmoid); \
__macro(softshrink, SoftShrink); \ __macro(SoftShrink, softshrink); \
__macro(abs, Abs); \ __macro(Abs, abs); \
__macro(cos, Cos); \ __macro(Cos, cos); \
__macro(sin, Sin); \ __macro(Sin, sin); \
__macro(round, Round); \ __macro(Round, round); \
__macro(log, Log); \ __macro(Log, log); \
__macro(square, Square); \ __macro(Square, square); \
__macro(brelu, BRelu); \ __macro(BRelu, brelu); \
__macro(pow, Pow); \ __macro(Pow, pow); \
__macro(stanh, STanh); \ __macro(STanh, stanh); \
__macro(softplus, Softplus); \ __macro(Softplus, softplus); \
__macro(softsign, Softsign); \ __macro(Softsign, softsign); \
__macro(leaky_relu, LeakyRelu); \ __macro(LeakyRelu, leaky_relu); \
__macro(tanh_shrink, TanhShrink); \ __macro(TanhShrink, tanh_shrink); \
__macro(elu, ELU); \ __macro(ELU, elu); \
__macro(hard_shrink, HardShrink); \ __macro(HardShrink, hard_shrink); \
__macro(swish, Swish); \ __macro(Swish, swish); \
__macro(thresholded_relu, ThresholdedRelu); __macro(ThresholdedRelu, thresholded_relu);
#define REGISTER_INPLACE_ACTIVATION_OP(OP_NAME, KERNEL_TYPE) \
REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp, \
::paddle::operators::OP_NAME##OpMaker, \
::paddle::operators::OP_NAME##GradMaker); \
REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)
#define REGISTER_ACTIVATION_OP(OP_NAME, KERNEL_TYPE) \
REGISTER_OP(KERNEL_TYPE, ops::ActivationOp, ops::OP_NAME##OpMaker, \
KERNEL_TYPE##_grad, ops::ActivationOpGrad);
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor) \ #define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor) \
REGISTER_OP_CPU_KERNEL( \ REGISTER_OP_CPU_KERNEL( \
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册