activation_op.cc 16.0 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/activation_op.h"
D
dzhwinter 已提交
16
#include <string>
K
Krzysztof Binias 已提交
17
#include "paddle/fluid/operators/mkldnn_activation_op.h"
Q
qijun 已提交
18 19 20 21

namespace paddle {
namespace operators {

Y
Yu Yang 已提交
22 23 24 25 26 27 28 29 30 31 32 33
#define REGISTER_ACTIVATION_OP_MAKER(OP_NAME, OP_COMMENT)               \
  class OP_NAME##OpMaker                                                \
      : public ::paddle::framework::OpProtoAndCheckerMaker {            \
   public:                                                              \
    void Make() override {                                              \
      AddInput("X", "Input of " #OP_NAME "operator");                   \
      AddOutput("Out", "Output of" #OP_NAME "operator");                \
      AddAttr<bool>("use_mkldnn",                                       \
                    "(bool, default false) Only used in mkldnn kernel") \
          .SetDefault(false);                                           \
      AddComment(#OP_COMMENT);                                          \
    }                                                                   \
D
dzhwinter 已提交
34
  }
D
dzhwinter 已提交
35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54

#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME, KERNEL_TYPE)              \
  class OP_NAME##GradMaker                                                   \
      : public ::paddle::framework::SingleGradOpDescMaker {                  \
   public:                                                                   \
    using ::paddle::framework::SingleGradOpDescMaker::SingleGradOpDescMaker; \
                                                                             \
   protected:                                                                \
    std::unique_ptr<::paddle::framework::OpDesc> Apply() const override {    \
      auto *op = new ::paddle::framework::OpDesc();                          \
      op->SetType(#KERNEL_TYPE "_grad");                                     \
      op->SetInput("Out", Output("Out"));                                    \
      op->SetInput(::paddle::framework::GradVarName("Out"),                  \
                   OutputGrad("Out"));                                       \
                                                                             \
      op->SetAttrMap(Attrs());                                               \
                                                                             \
      op->SetOutput(::paddle::framework::GradVarName("X"), InputGrad("X"));  \
      return std::unique_ptr<::paddle::framework::OpDesc>(op);               \
    }                                                                        \
D
dzhwinter 已提交
55
  }
D
dzhwinter 已提交
56

Q
qijun 已提交
57 58 59 60
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

61
  void InferShape(framework::InferShapeContext *ctx) const override {
F
fengjiayi 已提交
62 63
    ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Out");
Q
qijun 已提交
64
  }
Q
qijun 已提交
65 66
};

Q
qijun 已提交
67 68 69 70
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

71
  void InferShape(framework::InferShapeContext *ctx) const override {
F
fengjiayi 已提交
72
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Out"));
Q
qijun 已提交
73 74 75
  }
};

Q
qiaolongfei 已提交
76
__attribute__((unused)) constexpr char SigmoidDoc[] = R"DOC(
77
Sigmoid Activation Operator
K
Kexin Zhao 已提交
78

F
fengjiayi 已提交
79
$$out = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
80

D
dzhwinter 已提交
81
)DOC";
Q
qijun 已提交
82

Q
qiaolongfei 已提交
83
__attribute__((unused)) constexpr char LogSigmoidDoc[] = R"DOC(
84
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
85

F
fengjiayi 已提交
86
$$out = \log \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
87

D
dzhwinter 已提交
88
)DOC";
89

Q
qiaolongfei 已提交
90
__attribute__((unused)) constexpr char ExpDoc[] = R"DOC(
K
kexinzhao 已提交
91
Exp Activation Operator.
K
Kexin Zhao 已提交
92

F
fengjiayi 已提交
93
$out = e^x$
K
Kexin Zhao 已提交
94

D
dzhwinter 已提交
95
)DOC";
Q
qijun 已提交
96

Q
qiaolongfei 已提交
97
__attribute__((unused)) constexpr char ReluDoc[] = R"DOC(
K
kexinzhao 已提交
98
Relu Activation Operator.
K
Kexin Zhao 已提交
99

F
fengjiayi 已提交
100
$out = \max(x, 0)$
K
Kexin Zhao 已提交
101

D
dzhwinter 已提交
102
)DOC";
K
Kexin Zhao 已提交
103

Q
qiaolongfei 已提交
104
__attribute__((unused)) constexpr char TanhDoc[] = R"DOC(
K
kexinzhao 已提交
105
Tanh Activation Operator.
K
Kexin Zhao 已提交
106

F
fengjiayi 已提交
107
$$out = \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
108

D
dzhwinter 已提交
109
)DOC";
110

Q
qiaolongfei 已提交
111
__attribute__((unused)) constexpr char TanhShrinkDoc[] = R"DOC(
K
kexinzhao 已提交
112
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
113

F
fengjiayi 已提交
114
$$out = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
115

D
dzhwinter 已提交
116
)DOC";
K
Kexin Zhao 已提交
117

Q
qiaolongfei 已提交
118
__attribute__((unused)) constexpr char SqrtDoc[] = R"DOC(
K
kexinzhao 已提交
119
Sqrt Activation Operator.
K
Kexin Zhao 已提交
120

F
fengjiayi 已提交
121
$out = \sqrt{x}$
K
Kexin Zhao 已提交
122

D
dzhwinter 已提交
123
)DOC";
124

Q
qiaolongfei 已提交
125
__attribute__((unused)) constexpr char AbsDoc[] = R"DOC(
K
kexinzhao 已提交
126
Abs Activation Operator.
K
Kexin Zhao 已提交
127

F
fengjiayi 已提交
128
$out = |x|$
K
Kexin Zhao 已提交
129

D
dzhwinter 已提交
130
)DOC";
131

Q
qiaolongfei 已提交
132
__attribute__((unused)) constexpr char CeilDoc[] = R"DOC(
D
dzhwinter 已提交
133 134
Ceil Activation Operator.

F
fengjiayi 已提交
135
$out = ceil(x)$
D
dzhwinter 已提交
136

D
dzhwinter 已提交
137
)DOC";
D
dzhwinter 已提交
138

Q
qiaolongfei 已提交
139
__attribute__((unused)) constexpr char FloorDoc[] = R"DOC(
D
dzhwinter 已提交
140 141
Floor Activation Operator.

F
fengjiayi 已提交
142
$out = floor(x)$
D
dzhwinter 已提交
143

D
dzhwinter 已提交
144
)DOC";
D
dzhwinter 已提交
145

Q
qiaolongfei 已提交
146
__attribute__((unused)) constexpr char CosDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
147
Cosine Activation Operator.
C
add cos  
chengduoZH 已提交
148 149 150

$out = cos(x)$

D
dzhwinter 已提交
151
)DOC";
C
add cos  
chengduoZH 已提交
152

Q
qiaolongfei 已提交
153
__attribute__((unused)) constexpr char SinDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
154 155 156 157
Sine Activation Operator.

$out = sin(x)$

D
dzhwinter 已提交
158
)DOC";
C
add sin  
chengduoZH 已提交
159

Q
qiaolongfei 已提交
160
__attribute__((unused)) constexpr char RoundDoc[] = R"DOC(
D
dzhwinter 已提交
161 162
Round Activation Operator.

F
fengjiayi 已提交
163
$out = [x]$
D
dzhwinter 已提交
164

D
dzhwinter 已提交
165
)DOC";
D
dzhwinter 已提交
166

Q
qiaolongfei 已提交
167
__attribute__((unused)) constexpr char ReciprocalDoc[] = R"DOC(
K
kexinzhao 已提交
168
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
169

F
fengjiayi 已提交
170
$$out = \frac{1}{x}$$
K
Kexin Zhao 已提交
171

D
dzhwinter 已提交
172
)DOC";
173

Q
qiaolongfei 已提交
174
__attribute__((unused)) constexpr char LogDoc[] = R"DOC(
K
kexinzhao 已提交
175
Log Activation Operator.
K
Kexin Zhao 已提交
176

F
fengjiayi 已提交
177
$out = \ln(x)$
K
Kexin Zhao 已提交
178 179 180

Natural logarithm of x.

D
dzhwinter 已提交
181 182
)DOC";

Q
qiaolongfei 已提交
183
__attribute__((unused)) constexpr char SquareDoc[] = R"DOC(
D
dzhwinter 已提交
184 185 186
Square Activation Operator.

$out = x^2$
187

D
dzhwinter 已提交
188 189
)DOC";

Q
qiaolongfei 已提交
190
__attribute__((unused)) constexpr char SoftplusDoc[] = R"DOC(
D
dzhwinter 已提交
191 192 193 194 195 196
Softplus Activation Operator.

$out = \ln(1 + e^{x})$

)DOC";

Q
qiaolongfei 已提交
197
__attribute__((unused)) constexpr char SoftsignDoc[] = R"DOC(
D
dzhwinter 已提交
198 199 200 201 202 203 204
Softsign Activation Operator.

$$out = \frac{x}{1 + |x|}$$

)DOC";

class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
205
 public:
Y
Yu Yang 已提交
206
  void Make() override {
D
dzhwinter 已提交
207 208 209
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Out", "Output of LeakyRelu operator");
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
210
    AddComment(R"DOC(
D
dzhwinter 已提交
211
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
212

D
dzhwinter 已提交
213
$out = \max(x, \alpha * x)$
K
Kexin Zhao 已提交
214 215

)DOC");
216 217 218
  }
};

D
dzhwinter 已提交
219
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
K
kexinzhao 已提交
220
 public:
Y
Yu Yang 已提交
221
  void Make() override {
D
dzhwinter 已提交
222 223 224
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Out", "Output of Softshrink operator");
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
225
    AddComment(R"DOC(
D
dzhwinter 已提交
226
Softshrink Activation Operator.
K
Kexin Zhao 已提交
227

D
dzhwinter 已提交
228 229 230 231 232 233 234
$$
out = \begin{cases} 
    x - \lambda, \text{if } x > \lambda \\
    x + \lambda, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$
K
Kexin Zhao 已提交
235 236

)DOC");
K
kexinzhao 已提交
237 238 239
  }
};

D
dzhwinter 已提交
240
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
241
 public:
Y
Yu Yang 已提交
242
  void Make() override {
D
dzhwinter 已提交
243 244 245 246
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Out", "Output of HardShrink operator");
    AddAttr<float>("threshold", "The value of threshold for HardShrink")
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
247
    AddComment(R"DOC(
D
dzhwinter 已提交
248
HardShrink Activation Operator.
K
Kexin Zhao 已提交
249

D
dzhwinter 已提交
250 251 252 253 254 255 256
$$
out = \begin{cases} 
    x, \text{if } x > \lambda \\
    x, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$
K
Kexin Zhao 已提交
257 258

)DOC");
259 260 261
  }
};

262 263
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
264
  void Make() override {
265
    AddInput("X", "Input of BRelu operator");
F
fengjiayi 已提交
266
    AddOutput("Out", "Output of BRelu operator");
267 268 269 270
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
271
    AddComment(R"DOC(
K
kexinzhao 已提交
272
BRelu Activation Operator.
K
Kexin Zhao 已提交
273

F
fengjiayi 已提交
274
$out = \max(\min(x, t_{min}), t_{max})$
K
Kexin Zhao 已提交
275 276

)DOC");
277 278 279 280 281
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
282
  void Make() override {
283
    AddInput("X", "Input of SoftRelu operator");
F
fengjiayi 已提交
284
    AddOutput("Out", "Output of SoftRelu operator");
285 286
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
287
    AddComment(R"DOC(
K
kexinzhao 已提交
288
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
289

F
fengjiayi 已提交
290
$out = \ln(1 + \exp(\max(\min(x, threshold), threshold))$
K
Kexin Zhao 已提交
291 292

)DOC");
293 294 295
  }
};

296 297
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
298
  void Make() override {
K
Kexin Zhao 已提交
299
    AddInput("X", "Input of ELU operator");
F
fengjiayi 已提交
300
    AddOutput("Out", "Output of ELU operator");
301
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
302
    AddComment(R"DOC(
K
kexinzhao 已提交
303
ELU Activation Operator.
K
Kexin Zhao 已提交
304 305 306 307

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

F
fengjiayi 已提交
308
$out = \max(0, x) + \min(0, \alpha * (e^x - 1))$
K
Kexin Zhao 已提交
309 310

)DOC");
311 312 313
  }
};

314 315
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
316
  void Make() override {
317
    AddInput("X", "Input of Relu6 operator");
F
fengjiayi 已提交
318
    AddOutput("Out", "Output of Relu6 operator");
319 320
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
321
    AddComment(R"DOC(
K
kexinzhao 已提交
322
Relu6 Activation Operator.
K
Kexin Zhao 已提交
323

F
fengjiayi 已提交
324
$out = \min(\max(0, x), 6)$
K
Kexin Zhao 已提交
325 326

)DOC");
327 328 329
  }
};

330 331
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
332
  void Make() override {
333
    AddInput("X", "Input of Pow operator");
F
fengjiayi 已提交
334
    AddOutput("Out", "Output of Pow operator");
335
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
336
    AddComment(R"DOC(
K
kexinzhao 已提交
337
Pow Activation Operator.
K
Kexin Zhao 已提交
338

F
fengjiayi 已提交
339
$out = x^{factor}$
K
Kexin Zhao 已提交
340 341

)DOC");
342 343 344 345 346
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
347
  void Make() override {
348
    AddInput("X", "Input of STanh operator");
F
fengjiayi 已提交
349
    AddOutput("Out", "Output of STanh operator");
350 351 352 353
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
354
    AddComment(R"DOC(
K
kexinzhao 已提交
355
STanh Activation Operator.
K
Kexin Zhao 已提交
356

F
fengjiayi 已提交
357
$$out = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
K
Kexin Zhao 已提交
358 359

)DOC");
Q
qijun 已提交
360 361 362
  }
};

363 364
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
365
  void Make() override {
366
    AddInput("X", "Input of ThresholdedRelu operator");
F
fengjiayi 已提交
367
    AddOutput("Out", "Output of ThresholdedRelu operator");
368 369
    AddAttr<float>("threshold", "The threshold location of activation")
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
370
    AddComment(R"DOC(
K
kexinzhao 已提交
371
ThresholdedRelu Activation Operator.
K
Kexin Zhao 已提交
372 373

$$
F
fengjiayi 已提交
374
out = \begin{cases} 
K
Kexin Zhao 已提交
375 376 377 378 379 380
    x, \text{if } x > threshold \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
381 382 383
  }
};

384 385
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
386
  void Make() override {
387
    AddInput("X", "Input of HardSigmoid operator");
F
fengjiayi 已提交
388
    AddOutput("Out", "Output of HardSigmoid operator");
389 390 391 392
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
393
    AddComment(R"DOC(
K
kexinzhao 已提交
394
HardSigmoid Activation Operator.
395

K
Kexin Zhao 已提交
396 397
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
398

F
fengjiayi 已提交
399
$out = \max(0, \min(1, slope * x + shift))$
400 401

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
402
The default slope and shift are set according to the above reference.
403 404
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
405
)DOC");
406 407 408
  }
};

A
Abhinav Arora 已提交
409 410
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
411
  void Make() override {
A
Abhinav Arora 已提交
412
    AddInput("X", "Input of Swish operator");
F
fengjiayi 已提交
413
    AddOutput("Out", "Output of Swish operator");
A
Abhinav Arora 已提交
414 415 416 417
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

F
fengjiayi 已提交
418
$$out = \frac{x}{1 + e^{- \beta x}}$$
A
Abhinav Arora 已提交
419 420 421 422 423

)DOC");
  }
};

D
dzhwinter 已提交
424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442
REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(Exp, ExpDoc);
REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc);
REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc);
REGISTER_ACTIVATION_OP_MAKER(TanhShrink, TanhShrinkDoc);
REGISTER_ACTIVATION_OP_MAKER(Sqrt, SqrtDoc);
REGISTER_ACTIVATION_OP_MAKER(Abs, AbsDoc);
REGISTER_ACTIVATION_OP_MAKER(Ceil, CeilDoc);
REGISTER_ACTIVATION_OP_MAKER(Floor, FloorDoc);
REGISTER_ACTIVATION_OP_MAKER(Cos, CosDoc);
REGISTER_ACTIVATION_OP_MAKER(Sin, SinDoc);
REGISTER_ACTIVATION_OP_MAKER(Round, RoundDoc);
REGISTER_ACTIVATION_OP_MAKER(Reciprocal, ReciprocalDoc);
REGISTER_ACTIVATION_OP_MAKER(Log, LogDoc);
REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc);
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);

D
dzhwinter 已提交
443 444
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sigmoid, sigmoid);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu, relu);
D
dzhwinter 已提交
445
REGISTER_ACTIVATION_OP_GRAD_MAKER(Exp, exp);
D
dzhwinter 已提交
446 447 448
REGISTER_ACTIVATION_OP_GRAD_MAKER(Tanh, tanh);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Ceil, ceil);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Floor, floor);
D
dzhwinter 已提交
449
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sqrt, sqrt);
D
dzhwinter 已提交
450
REGISTER_ACTIVATION_OP_GRAD_MAKER(SoftRelu, soft_relu);
D
dzhwinter 已提交
451 452
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu6, relu6);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Reciprocal, reciprocal);
D
dzhwinter 已提交
453
REGISTER_ACTIVATION_OP_GRAD_MAKER(HardSigmoid, hard_sigmoid);
Q
qijun 已提交
454 455 456 457
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
458

D
dzhwinter 已提交
459
#define FOR_EACH_INPLACE_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
460 461 462 463 464 465 466 467 468 469 470
  __macro(Sigmoid, sigmoid);                 \
  __macro(Relu, relu);                       \
  __macro(Exp, exp);                         \
  __macro(Tanh, tanh);                       \
  __macro(Ceil, ceil);                       \
  __macro(Floor, floor);                     \
  __macro(Sqrt, sqrt);                       \
  __macro(SoftRelu, soft_relu);              \
  __macro(Relu6, relu6);                     \
  __macro(Reciprocal, reciprocal);           \
  __macro(HardSigmoid, hard_sigmoid);
D
dzhwinter 已提交
471 472

#define FOR_EACH_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498
  __macro(LogSigmoid, logsigmoid);   \
  __macro(SoftShrink, softshrink);   \
  __macro(Abs, abs);                 \
  __macro(Cos, cos);                 \
  __macro(Sin, sin);                 \
  __macro(Round, round);             \
  __macro(Log, log);                 \
  __macro(Square, square);           \
  __macro(BRelu, brelu);             \
  __macro(Pow, pow);                 \
  __macro(STanh, stanh);             \
  __macro(Softplus, softplus);       \
  __macro(Softsign, softsign);       \
  __macro(LeakyRelu, leaky_relu);    \
  __macro(TanhShrink, tanh_shrink);  \
  __macro(ELU, elu);                 \
  __macro(HardShrink, hard_shrink);  \
  __macro(Swish, swish);             \
  __macro(ThresholdedRelu, thresholded_relu);

#define REGISTER_INPLACE_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)        \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp, \
                    ::paddle::operators::OP_NAME##OpMaker,          \
                    ::paddle::operators::OP_NAME##GradMaker);       \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)

D
dzhwinter 已提交
499 500 501 502 503
#define REGISTER_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)                    \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp,     \
                    ::paddle::operators::OP_NAME##OpMaker,              \
                    ::paddle::framework::DefaultGradOpDescMaker<true>); \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)
A
Abhinav Arora 已提交
504

Q
QI JUN 已提交
505 506 507 508 509 510 511 512 513 514 515
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
516
                                ops::grad_functor<double>>);
517

D
dzhwinter 已提交
518
FOR_EACH_OP_FUNCTOR(REGISTER_ACTIVATION_OP);
D
dzhwinter 已提交
519
FOR_EACH_INPLACE_OP_FUNCTOR(REGISTER_INPLACE_ACTIVATION_OP);
520
FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);