activation_op.cc 15.8 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/activation_op.h"
K
Krzysztof Binias 已提交
16
#include "paddle/fluid/operators/mkldnn_activation_op.h"
Q
qijun 已提交
17 18 19 20

namespace paddle {
namespace operators {

D
dzhwinter 已提交
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
#define REGISTER_ACTIVATION_OP_MAKER(OP_NAME, OP_COMMENT)               \
  class OP_NAME##OpMaker : public framework::OpProtoAndCheckerMaker {   \
   public:                                                              \
    OP_NAME##OpMaker(OpProto *proto, OpAttrChecker *op_checker)         \
        : framework::OpProtoAndCheckerMaker(proto, op_checker) {        \
      AddInput("X", "Input of " #OP_NAME "operator");                   \
      AddOutput("Out", "Output of" #OP_NAME "operator");                \
      AddAttr<bool>("use_mkldnn",                                       \
                    "(bool, default false) Only used in mkldnn kernel") \
          .SetDefault(false);                                           \
      AddComment(#OP_COMMENT);                                          \
    }                                                                   \
  }

#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME)                     \
  class OP_NAME##GradMaker : public framework::SingleGradOpDescMaker { \
   public:                                                             \
   protected:                                                          \
    std::unique_ptr<framework::OpDesc> Apply() const override {        \
      auto *op = new framework::OpDesc();                              \
      op->SetType(#OP_NAME "_grad");                                   \
      op->SetInput("Out", Input("Out"));                               \
      op->SetInput(framework::GradVarName("Out"), OutputGrad("Out"));  \
                                                                       \
      op->SetAttrMap(Attrs());                                         \
                                                                       \
      op->SetOutput(framework::GradVarName("X"), InputGrad("X"));      \
      return std::unique_ptr<framework::OpDesc>(op);                   \
    }                                                                  \
  }

Q
qijun 已提交
52 53 54 55
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

56
  void InferShape(framework::InferShapeContext *ctx) const override {
F
fengjiayi 已提交
57 58
    ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Out");
Q
qijun 已提交
59
  }
Q
qijun 已提交
60 61
};

Q
qijun 已提交
62 63 64 65
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

66
  void InferShape(framework::InferShapeContext *ctx) const override {
F
fengjiayi 已提交
67
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Out"));
Q
qijun 已提交
68 69 70
  }
};

D
dzhwinter 已提交
71
constexpr char SigmoidDoc[] = R"DOC(
72
Sigmoid Activation Operator
K
Kexin Zhao 已提交
73

F
fengjiayi 已提交
74
$$out = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
75

D
dzhwinter 已提交
76
)DOC";
Q
qijun 已提交
77

D
dzhwinter 已提交
78
constexpr char LogSigmoidDoc[] = R"DOC(
79
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
80

F
fengjiayi 已提交
81
$$out = \log \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
82

D
dzhwinter 已提交
83
)DOC";
84

D
dzhwinter 已提交
85
constexpr char ExpDoc[] = R"DOC(
K
kexinzhao 已提交
86
Exp Activation Operator.
K
Kexin Zhao 已提交
87

F
fengjiayi 已提交
88
$out = e^x$
K
Kexin Zhao 已提交
89

D
dzhwinter 已提交
90
)DOC";
Q
qijun 已提交
91

D
dzhwinter 已提交
92
constexpr char ReluDoc[] = R"DOC(
K
kexinzhao 已提交
93
Relu Activation Operator.
K
Kexin Zhao 已提交
94

F
fengjiayi 已提交
95
$out = \max(x, 0)$
K
Kexin Zhao 已提交
96

D
dzhwinter 已提交
97
)DOC";
K
Kexin Zhao 已提交
98

D
dzhwinter 已提交
99
constexpr char TanhDoc[] = R"DOC(
K
kexinzhao 已提交
100
Tanh Activation Operator.
K
Kexin Zhao 已提交
101

F
fengjiayi 已提交
102
$$out = \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
103

D
dzhwinter 已提交
104
)DOC";
105

D
dzhwinter 已提交
106
constexpr char TanhShrinkDoc[] = R"DOC(
K
kexinzhao 已提交
107
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
108

F
fengjiayi 已提交
109
$$out = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
110

D
dzhwinter 已提交
111
)DOC";
K
Kexin Zhao 已提交
112

D
dzhwinter 已提交
113
constexpr char SqrtDoc[] = R"DOC(
K
kexinzhao 已提交
114
Sqrt Activation Operator.
K
Kexin Zhao 已提交
115

F
fengjiayi 已提交
116
$out = \sqrt{x}$
K
Kexin Zhao 已提交
117

D
dzhwinter 已提交
118
)DOC";
119

D
dzhwinter 已提交
120
constexpr char AbsDoc[] = R"DOC(
K
kexinzhao 已提交
121
Abs Activation Operator.
K
Kexin Zhao 已提交
122

F
fengjiayi 已提交
123
$out = |x|$
K
Kexin Zhao 已提交
124

D
dzhwinter 已提交
125
)DOC";
126

D
dzhwinter 已提交
127
constexpr char CeilDoc[] = R"DOC(
D
dzhwinter 已提交
128 129
Ceil Activation Operator.

F
fengjiayi 已提交
130
$out = ceil(x)$
D
dzhwinter 已提交
131

D
dzhwinter 已提交
132
)DOC";
D
dzhwinter 已提交
133

D
dzhwinter 已提交
134
constexpr char FloorDoc[] = R"DOC(
D
dzhwinter 已提交
135 136
Floor Activation Operator.

F
fengjiayi 已提交
137
$out = floor(x)$
D
dzhwinter 已提交
138

D
dzhwinter 已提交
139
)DOC";
D
dzhwinter 已提交
140

D
dzhwinter 已提交
141
constexpr char CosDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
142
Cosine Activation Operator.
C
add cos  
chengduoZH 已提交
143 144 145

$out = cos(x)$

D
dzhwinter 已提交
146
)DOC";
C
add cos  
chengduoZH 已提交
147

D
dzhwinter 已提交
148
constexpr char SinDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
149 150 151 152
Sine Activation Operator.

$out = sin(x)$

D
dzhwinter 已提交
153
)DOC";
C
add sin  
chengduoZH 已提交
154

D
dzhwinter 已提交
155
constexpr char RoundDoc[] = R"DOC(
D
dzhwinter 已提交
156 157
Round Activation Operator.

F
fengjiayi 已提交
158
$out = [x]$
D
dzhwinter 已提交
159

D
dzhwinter 已提交
160
)DOC";
D
dzhwinter 已提交
161

D
dzhwinter 已提交
162
constexpr char ReciprocalDoc[] = R"DOC(
K
kexinzhao 已提交
163
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
164

F
fengjiayi 已提交
165
$$out = \frac{1}{x}$$
K
Kexin Zhao 已提交
166

D
dzhwinter 已提交
167
)DOC";
168

D
dzhwinter 已提交
169
constexpr char LogDoc[] = R"DOC(
K
kexinzhao 已提交
170
Log Activation Operator.
K
Kexin Zhao 已提交
171

F
fengjiayi 已提交
172
$out = \ln(x)$
K
Kexin Zhao 已提交
173 174 175

Natural logarithm of x.

D
dzhwinter 已提交
176 177 178 179 180 181
)DOC";

constexpr char SquareDoc[] = R"DOC(
Square Activation Operator.

$out = x^2$
182

D
dzhwinter 已提交
183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199
)DOC";

constexpr char SoftplusDoc[] = R"DOC(
Softplus Activation Operator.

$out = \ln(1 + e^{x})$

)DOC";

constexpr char SoftsignDoc[] = R"DOC(
Softsign Activation Operator.

$$out = \frac{x}{1 + |x|}$$

)DOC";

class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
200
 public:
D
dzhwinter 已提交
201
  LeakyReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
202
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
203 204 205
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Out", "Output of LeakyRelu operator");
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
206
    AddComment(R"DOC(
D
dzhwinter 已提交
207
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
208

D
dzhwinter 已提交
209
$out = \max(x, \alpha * x)$
K
Kexin Zhao 已提交
210 211

)DOC");
212 213 214
  }
};

D
dzhwinter 已提交
215
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
K
kexinzhao 已提交
216
 public:
D
dzhwinter 已提交
217
  SoftShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
218
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
219 220 221
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Out", "Output of Softshrink operator");
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
222
    AddComment(R"DOC(
D
dzhwinter 已提交
223
Softshrink Activation Operator.
K
Kexin Zhao 已提交
224

D
dzhwinter 已提交
225 226 227 228 229 230 231
$$
out = \begin{cases} 
    x - \lambda, \text{if } x > \lambda \\
    x + \lambda, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$
K
Kexin Zhao 已提交
232 233

)DOC");
K
kexinzhao 已提交
234 235 236
  }
};

D
dzhwinter 已提交
237
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
238
 public:
D
dzhwinter 已提交
239
  HardShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
240
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
241 242 243 244
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Out", "Output of HardShrink operator");
    AddAttr<float>("threshold", "The value of threshold for HardShrink")
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
245
    AddComment(R"DOC(
D
dzhwinter 已提交
246
HardShrink Activation Operator.
K
Kexin Zhao 已提交
247

D
dzhwinter 已提交
248 249 250 251 252 253 254
$$
out = \begin{cases} 
    x, \text{if } x > \lambda \\
    x, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$
K
Kexin Zhao 已提交
255 256

)DOC");
257 258 259
  }
};

260 261
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
262 263
  BReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
264
    AddInput("X", "Input of BRelu operator");
F
fengjiayi 已提交
265
    AddOutput("Out", "Output of BRelu operator");
266 267 268 269
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
270
    AddComment(R"DOC(
K
kexinzhao 已提交
271
BRelu Activation Operator.
K
Kexin Zhao 已提交
272

F
fengjiayi 已提交
273
$out = \max(\min(x, t_{min}), t_{max})$
K
Kexin Zhao 已提交
274 275

)DOC");
276 277 278 279 280
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
281 282
  SoftReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
283
    AddInput("X", "Input of SoftRelu operator");
F
fengjiayi 已提交
284
    AddOutput("Out", "Output of SoftRelu operator");
285 286
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
287
    AddComment(R"DOC(
K
kexinzhao 已提交
288
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
289

F
fengjiayi 已提交
290
$out = \ln(1 + \exp(\max(\min(x, threshold), threshold))$
K
Kexin Zhao 已提交
291 292

)DOC");
293 294 295
  }
};

296 297
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
298 299
  ELUOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kexin Zhao 已提交
300
    AddInput("X", "Input of ELU operator");
F
fengjiayi 已提交
301
    AddOutput("Out", "Output of ELU operator");
302
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
303
    AddComment(R"DOC(
K
kexinzhao 已提交
304
ELU Activation Operator.
K
Kexin Zhao 已提交
305 306 307 308

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

F
fengjiayi 已提交
309
$out = \max(0, x) + \min(0, \alpha * (e^x - 1))$
K
Kexin Zhao 已提交
310 311

)DOC");
312 313 314
  }
};

315 316
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
317 318
  Relu6OpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
319
    AddInput("X", "Input of Relu6 operator");
F
fengjiayi 已提交
320
    AddOutput("Out", "Output of Relu6 operator");
321 322
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
323
    AddComment(R"DOC(
K
kexinzhao 已提交
324
Relu6 Activation Operator.
K
Kexin Zhao 已提交
325

F
fengjiayi 已提交
326
$out = \min(\max(0, x), 6)$
K
Kexin Zhao 已提交
327 328

)DOC");
329 330 331
  }
};

332 333
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
334 335
  PowOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
336
    AddInput("X", "Input of Pow operator");
F
fengjiayi 已提交
337
    AddOutput("Out", "Output of Pow operator");
338
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
339
    AddComment(R"DOC(
K
kexinzhao 已提交
340
Pow Activation Operator.
K
Kexin Zhao 已提交
341

F
fengjiayi 已提交
342
$out = x^{factor}$
K
Kexin Zhao 已提交
343 344

)DOC");
345 346 347 348 349
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
350 351
  STanhOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
352
    AddInput("X", "Input of STanh operator");
F
fengjiayi 已提交
353
    AddOutput("Out", "Output of STanh operator");
354 355 356 357
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
358
    AddComment(R"DOC(
K
kexinzhao 已提交
359
STanh Activation Operator.
K
Kexin Zhao 已提交
360

F
fengjiayi 已提交
361
$$out = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
K
Kexin Zhao 已提交
362 363

)DOC");
Q
qijun 已提交
364 365 366
  }
};

367 368
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
369 370
  ThresholdedReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
371
    AddInput("X", "Input of ThresholdedRelu operator");
F
fengjiayi 已提交
372
    AddOutput("Out", "Output of ThresholdedRelu operator");
373 374
    AddAttr<float>("threshold", "The threshold location of activation")
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
375
    AddComment(R"DOC(
K
kexinzhao 已提交
376
ThresholdedRelu Activation Operator.
K
Kexin Zhao 已提交
377 378

$$
F
fengjiayi 已提交
379
out = \begin{cases} 
K
Kexin Zhao 已提交
380 381 382 383 384 385
    x, \text{if } x > threshold \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
386 387 388
  }
};

389 390
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
391 392
  HardSigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
393
    AddInput("X", "Input of HardSigmoid operator");
F
fengjiayi 已提交
394
    AddOutput("Out", "Output of HardSigmoid operator");
395 396 397 398
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
399
    AddComment(R"DOC(
K
kexinzhao 已提交
400
HardSigmoid Activation Operator.
401

K
Kexin Zhao 已提交
402 403
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
404

F
fengjiayi 已提交
405
$out = \max(0, \min(1, slope * x + shift))$
406 407

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
408
The default slope and shift are set according to the above reference.
409 410
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
411
)DOC");
412 413 414
  }
};

A
Abhinav Arora 已提交
415 416
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
417 418
  SwishOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
A
Abhinav Arora 已提交
419
    AddInput("X", "Input of Swish operator");
F
fengjiayi 已提交
420
    AddOutput("Out", "Output of Swish operator");
A
Abhinav Arora 已提交
421 422 423 424
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

F
fengjiayi 已提交
425
$$out = \frac{x}{1 + e^{- \beta x}}$$
A
Abhinav Arora 已提交
426 427 428 429 430

)DOC");
  }
};

D
dzhwinter 已提交
431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465
REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(Exp, ExpDoc);
REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc);
REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc);
REGISTER_ACTIVATION_OP_MAKER(TanhShrink, TanhShrinkDoc);
REGISTER_ACTIVATION_OP_MAKER(Sqrt, SqrtDoc);
REGISTER_ACTIVATION_OP_MAKER(Abs, AbsDoc);
REGISTER_ACTIVATION_OP_MAKER(Ceil, CeilDoc);
REGISTER_ACTIVATION_OP_MAKER(Floor, FloorDoc);
REGISTER_ACTIVATION_OP_MAKER(Cos, CosDoc);
REGISTER_ACTIVATION_OP_MAKER(Sin, SinDoc);
REGISTER_ACTIVATION_OP_MAKER(Round, RoundDoc);
REGISTER_ACTIVATION_OP_MAKER(Reciprocal, ReciprocalDoc);
REGISTER_ACTIVATION_OP_MAKER(Log, LogDoc);
REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc);
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);

// NOTE(*) only gradient can be inplaced need to register its gradient maker,
// To tell the executor which input variable is used. By default, every Input
// variable
// is used in gradient operator.
// The operator name written in lowercase intentionally.
REGISTER_ACTIVATION_OP_GRAD_MAKER(sigmoid);
REGISTER_ACTIVATION_OP_GRAD_MAKER(exp);
REGISTER_ACTIVATION_OP_GRAD_MAKER(relu);
REGISTER_ACTIVATION_OP_GRAD_MAKER(tanh);
REGISTER_ACTIVATION_OP_GRAD_MAKER(sqrt);
REGISTER_ACTIVATION_OP_GRAD_MAKER(ceil);
REGISTER_ACTIVATION_OP_GRAD_MAKER(floor);
REGISTER_ACTIVATION_OP_GRAD_MAKER(reciprocal);
REGISTER_ACTIVATION_OP_GRAD_MAKER(relu6);
REGISTER_ACTIVATION_OP_GRAD_MAKER(soft_relu);
REGISTER_ACTIVATION_OP_GRAD_MAKER(hard_sigmoid);
Q
qijun 已提交
466 467 468 469
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
470

D
dzhwinter 已提交
471 472 473 474 475 476 477 478
#define REGISTER_ACTIVATION_OP(act_type, op_name)                 \
  REGISTER_OP(act_type, ops::ActivationOp, ops::op_name##OpMaker, \
              act_type##_grad, ops::ActivationOpGrad);

#define FOR_EACH_OP_FUNCTOR(__macro)  \
  __macro(sigmoid, Sigmoid);          \
  __macro(logsigmoid, LogSigmoid);    \
  __macro(exp, Exp);                  \
D
dzhwinter 已提交
479
  __macro(relu, Relu);                \
D
dzhwinter 已提交
480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505
  __macro(tanh, Tanh);                \
  __macro(softshrink, SoftShrink);    \
  __macro(sqrt, Sqrt);                \
  __macro(abs, Abs);                  \
  __macro(ceil, Ceil);                \
  __macro(floor, Floor);              \
  __macro(cos, Cos);                  \
  __macro(sin, Sin);                  \
  __macro(round, Round);              \
  __macro(reciprocal, Reciprocal);    \
  __macro(log, Log);                  \
  __macro(square, Square);            \
  __macro(brelu, BRelu);              \
  __macro(soft_relu, SoftRelu);       \
  __macro(pow, Pow);                  \
  __macro(stanh, STanh);              \
  __macro(softplus, Softplus);        \
  __macro(softsign, Softsign);        \
  __macro(relu6, Relu6);              \
  __macro(leaky_relu, LeakyRelu);     \
  __macro(tanh_shrink, TanhShrink);   \
  __macro(elu, ELU);                  \
  __macro(hard_shrink, HardShrink);   \
  __macro(hard_sigmoid, HardSigmoid); \
  __macro(swish, Swish);              \
  __macro(thresholded_relu, ThresholdedRelu);
A
Abhinav Arora 已提交
506

Q
QI JUN 已提交
507 508 509 510 511 512 513 514 515 516 517
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
518
                                ops::grad_functor<double>>);
519

D
dzhwinter 已提交
520
FOR_EACH_OP_FUNCTOR(REGISTER_ACTIVATION_OP);
521
FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);