activation_op.cc 18.4 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/activation_op.h"
D
dzhwinter 已提交
16
#include <string>
K
Krzysztof Binias 已提交
17
#include "paddle/fluid/operators/mkldnn_activation_op.h"
D
dzhwinter 已提交
18
#include "paddle/fluid/platform/port.h"
Q
qijun 已提交
19 20 21 22

namespace paddle {
namespace operators {

23 24
using paddle::framework::Tensor;

25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41
#define REGISTER_ACTIVATION_OP_MAKER(OP_NAME, OP_COMMENT)                \
  class OP_NAME##OpMaker                                                 \
      : public ::paddle::framework::OpProtoAndCheckerMaker {             \
   public:                                                               \
    void Make() override {                                               \
      AddInput("X", "Input of " #OP_NAME " operator");                   \
      AddOutput("Out", "Output of " #OP_NAME " operator");               \
      AddAttr<bool>("use_mkldnn",                                        \
                    "(bool, default false) Only used in mkldnn kernel")  \
          .SetDefault(false);                                            \
      AddAttr<bool>(                                                     \
          "is_test",                                                     \
          "(bool, default false) Set to true for inference only, false " \
          "for training. Some layers may run faster when this is true.") \
          .SetDefault(false);                                            \
      AddComment(#OP_COMMENT);                                           \
    }                                                                    \
D
dzhwinter 已提交
42
  }
D
dzhwinter 已提交
43 44 45 46 47 48 49 50 51

#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME, KERNEL_TYPE)              \
  class OP_NAME##GradMaker                                                   \
      : public ::paddle::framework::SingleGradOpDescMaker {                  \
   public:                                                                   \
    using ::paddle::framework::SingleGradOpDescMaker::SingleGradOpDescMaker; \
                                                                             \
   protected:                                                                \
    std::unique_ptr<::paddle::framework::OpDesc> Apply() const override {    \
52
      auto* op = new ::paddle::framework::OpDesc();                          \
D
dzhwinter 已提交
53 54 55 56 57 58 59 60 61 62
      op->SetType(#KERNEL_TYPE "_grad");                                     \
      op->SetInput("Out", Output("Out"));                                    \
      op->SetInput(::paddle::framework::GradVarName("Out"),                  \
                   OutputGrad("Out"));                                       \
                                                                             \
      op->SetAttrMap(Attrs());                                               \
                                                                             \
      op->SetOutput(::paddle::framework::GradVarName("X"), InputGrad("X"));  \
      return std::unique_ptr<::paddle::framework::OpDesc>(op);               \
    }                                                                        \
D
dzhwinter 已提交
63
  }
D
dzhwinter 已提交
64

65 66 67 68
framework::OpKernelType GetKernelType(const framework::ExecutionContext& ctx,
                                      const framework::OperatorWithKernel& oper,
                                      const std::string& name) {
  framework::LibraryType library{framework::LibraryType::kPlain};
M
mozga-intel 已提交
69
  framework::DataLayout layout = framework::DataLayout::kAnyLayout;
70 71 72 73 74
#ifdef PADDLE_WITH_MKLDNN
  auto it = oper.Attrs().find("use_mkldnn");
  if (library == framework::LibraryType::kPlain && it != oper.Attrs().end() &&
      platform::CanMKLDNNBeUsed(ctx)) {
    library = framework::LibraryType::kMKLDNN;
M
mozga-intel 已提交
75
    layout = framework::DataLayout::kMKLDNN;
76 77 78
  }
#endif
  return framework::OpKernelType(
C
chengduo 已提交
79 80
      framework::GetDataTypeOfVar(ctx.InputVar(name)), ctx.GetPlace(), layout,
      library);
81 82
}

Q
qijun 已提交
83 84 85 86
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

87
  void InferShape(framework::InferShapeContext* ctx) const override {
88
    ctx->ShareDim("X", /*->*/ "Out");
F
fengjiayi 已提交
89
    ctx->ShareLoD("X", /*->*/ "Out");
Q
qijun 已提交
90
  }
91

92
 protected:
93 94 95 96
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "X");
  }
Q
qijun 已提交
97 98
};

C
chengduo 已提交
99 100 101 102 103 104
class ActivationOpInferVarType
    : public framework::PassInDtypeAndVarTypeToOutput {
 protected:
  std::unordered_map<std::string, std::string> GetInputOutputWithSameType()
      const override {
    return std::unordered_map<std::string, std::string>{{"X", /*->*/ "Out"}};
105 106 107
  }
};

Q
qijun 已提交
108 109 110 111
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

112
  void InferShape(framework::InferShapeContext* ctx) const override {
113 114
    ctx->ShareDim("Out", framework::GradVarName("X"));
    ctx->ShareLoD("Out", framework::GradVarName("X"));
Q
qijun 已提交
115
  }
116

117
 protected:
118 119 120 121
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "Out");
  }
Q
qijun 已提交
122 123
};

D
dzhwinter 已提交
124
UNUSED constexpr char SigmoidDoc[] = R"DOC(
125
Sigmoid Activation Operator
K
Kexin Zhao 已提交
126

F
fengjiayi 已提交
127
$$out = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
128

D
dzhwinter 已提交
129
)DOC";
Q
qijun 已提交
130

D
dzhwinter 已提交
131
UNUSED constexpr char LogSigmoidDoc[] = R"DOC(
132
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
133

134
$$out = \\log \\frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
135

D
dzhwinter 已提交
136
)DOC";
137

D
dzhwinter 已提交
138
UNUSED constexpr char ExpDoc[] = R"DOC(
K
kexinzhao 已提交
139
Exp Activation Operator.
K
Kexin Zhao 已提交
140

F
fengjiayi 已提交
141
$out = e^x$
K
Kexin Zhao 已提交
142

D
dzhwinter 已提交
143
)DOC";
Q
qijun 已提交
144

D
dzhwinter 已提交
145
UNUSED constexpr char ReluDoc[] = R"DOC(
K
kexinzhao 已提交
146
Relu Activation Operator.
K
Kexin Zhao 已提交
147

F
fengjiayi 已提交
148
$out = \max(x, 0)$
K
Kexin Zhao 已提交
149

D
dzhwinter 已提交
150
)DOC";
K
Kexin Zhao 已提交
151

C
Clementine 已提交
152 153 154 155 156 157 158
UNUSED constexpr char GeluDoc[] = R"DOC(
Gelu Activation Operator.

$out = \\frac{1 + erf(\\frac{x}{\\sqrt{2}})}{2} x$

)DOC";

D
dzhwinter 已提交
159
UNUSED constexpr char TanhDoc[] = R"DOC(
K
kexinzhao 已提交
160
Tanh Activation Operator.
K
Kexin Zhao 已提交
161

Q
update  
qiaolongfei 已提交
162
$$out = \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
163

D
dzhwinter 已提交
164
)DOC";
165

D
dzhwinter 已提交
166
UNUSED constexpr char TanhShrinkDoc[] = R"DOC(
K
kexinzhao 已提交
167
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
168

Y
Yan Chunwei 已提交
169
$$out = x - \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
170

D
dzhwinter 已提交
171
)DOC";
K
Kexin Zhao 已提交
172

D
dzhwinter 已提交
173
UNUSED constexpr char SqrtDoc[] = R"DOC(
K
kexinzhao 已提交
174
Sqrt Activation Operator.
K
Kexin Zhao 已提交
175

F
fengjiayi 已提交
176
$out = \sqrt{x}$
K
Kexin Zhao 已提交
177

D
dzhwinter 已提交
178
)DOC";
179

D
dzhwinter 已提交
180
UNUSED constexpr char AbsDoc[] = R"DOC(
K
kexinzhao 已提交
181
Abs Activation Operator.
K
Kexin Zhao 已提交
182

F
fengjiayi 已提交
183
$out = |x|$
K
Kexin Zhao 已提交
184

D
dzhwinter 已提交
185
)DOC";
186

D
dzhwinter 已提交
187
UNUSED constexpr char CeilDoc[] = R"DOC(
D
dzhwinter 已提交
188 189
Ceil Activation Operator.

F
fengjiayi 已提交
190
$out = ceil(x)$
D
dzhwinter 已提交
191

D
dzhwinter 已提交
192
)DOC";
D
dzhwinter 已提交
193

D
dzhwinter 已提交
194
UNUSED constexpr char FloorDoc[] = R"DOC(
D
dzhwinter 已提交
195 196
Floor Activation Operator.

F
fengjiayi 已提交
197
$out = floor(x)$
D
dzhwinter 已提交
198

D
dzhwinter 已提交
199
)DOC";
D
dzhwinter 已提交
200

D
dzhwinter 已提交
201
UNUSED constexpr char CosDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
202
Cosine Activation Operator.
C
add cos  
chengduoZH 已提交
203 204 205

$out = cos(x)$

D
dzhwinter 已提交
206
)DOC";
C
add cos  
chengduoZH 已提交
207

D
dzhwinter 已提交
208
UNUSED constexpr char SinDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
209 210 211 212
Sine Activation Operator.

$out = sin(x)$

D
dzhwinter 已提交
213
)DOC";
C
add sin  
chengduoZH 已提交
214

D
dzhwinter 已提交
215
UNUSED constexpr char RoundDoc[] = R"DOC(
D
dzhwinter 已提交
216 217
Round Activation Operator.

F
fengjiayi 已提交
218
$out = [x]$
D
dzhwinter 已提交
219

D
dzhwinter 已提交
220
)DOC";
D
dzhwinter 已提交
221

D
dzhwinter 已提交
222
UNUSED constexpr char ReciprocalDoc[] = R"DOC(
K
kexinzhao 已提交
223
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
224

225
$$out = \\frac{1}{x}$$
K
Kexin Zhao 已提交
226

D
dzhwinter 已提交
227
)DOC";
228

D
dzhwinter 已提交
229
UNUSED constexpr char LogDoc[] = R"DOC(
K
kexinzhao 已提交
230
Log Activation Operator.
K
Kexin Zhao 已提交
231

F
fengjiayi 已提交
232
$out = \ln(x)$
K
Kexin Zhao 已提交
233 234 235

Natural logarithm of x.

D
dzhwinter 已提交
236 237
)DOC";

D
dzhwinter 已提交
238
UNUSED constexpr char SquareDoc[] = R"DOC(
D
dzhwinter 已提交
239 240 241
Square Activation Operator.

$out = x^2$
242

D
dzhwinter 已提交
243 244
)DOC";

D
dzhwinter 已提交
245
UNUSED constexpr char SoftplusDoc[] = R"DOC(
D
dzhwinter 已提交
246 247 248 249 250 251
Softplus Activation Operator.

$out = \ln(1 + e^{x})$

)DOC";

D
dzhwinter 已提交
252
UNUSED constexpr char SoftsignDoc[] = R"DOC(
D
dzhwinter 已提交
253 254 255 256 257 258 259
Softsign Activation Operator.

$$out = \frac{x}{1 + |x|}$$

)DOC";

class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
260
 public:
Y
Yu Yang 已提交
261
  void Make() override {
D
dzhwinter 已提交
262 263 264
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Out", "Output of LeakyRelu operator");
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
265
    AddComment(R"DOC(
D
dzhwinter 已提交
266
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
267

D
dzhwinter 已提交
268
$out = \max(x, \alpha * x)$
K
Kexin Zhao 已提交
269 270

)DOC");
271 272 273
  }
};

D
dzhwinter 已提交
274
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
K
kexinzhao 已提交
275
 public:
Y
Yu Yang 已提交
276
  void Make() override {
D
dzhwinter 已提交
277 278 279
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Out", "Output of Softshrink operator");
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
280
    AddComment(R"DOC(
281 282 283
:strong:`Softshrink Activation Operator`

..  math::
284
    out = \begin{cases}
285 286 287 288
         x - \lambda, \text{if } x > \lambda \\
         x + \lambda, \text{if } x < -\lambda \\
         0,  \text{otherwise}
         \end{cases}
K
Kexin Zhao 已提交
289 290

)DOC");
K
kexinzhao 已提交
291 292 293
  }
};

D
dzhwinter 已提交
294
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
295
 public:
Y
Yu Yang 已提交
296
  void Make() override {
D
dzhwinter 已提交
297 298
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Out", "Output of HardShrink operator");
Y
yuyang18 已提交
299 300
    AddAttr<float>("threshold",
                   "The value of threshold for HardShrink. [default: 0.5]")
D
dzhwinter 已提交
301
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
302
    AddComment(R"DOC(
Y
yuyang18 已提交
303
:strong:`HardShrink activation operator`
K
Kexin Zhao 已提交
304

Y
yuyang18 已提交
305 306 307 308 309 310
..  math::
    out = \begin{cases}
            x, \text{if } x > \lambda \\
            x, \text{if } x < -\lambda \\
            0,  \text{otherwise}
          \end{cases}
K
Kexin Zhao 已提交
311 312

)DOC");
313 314 315
  }
};

316 317
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
318
  void Make() override {
319
    AddInput("X", "Input of BRelu operator");
F
fengjiayi 已提交
320
    AddOutput("Out", "Output of BRelu operator");
321 322 323 324
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
325
    AddComment(R"DOC(
K
kexinzhao 已提交
326
BRelu Activation Operator.
K
Kexin Zhao 已提交
327

F
fengjiayi 已提交
328
$out = \max(\min(x, t_{min}), t_{max})$
K
Kexin Zhao 已提交
329 330

)DOC");
331 332 333 334 335
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
336
  void Make() override {
337
    AddInput("X", "Input of SoftRelu operator");
F
fengjiayi 已提交
338
    AddOutput("Out", "Output of SoftRelu operator");
339 340
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
341
    AddComment(R"DOC(
K
kexinzhao 已提交
342
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
343

F
fengjiayi 已提交
344
$out = \ln(1 + \exp(\max(\min(x, threshold), threshold))$
K
Kexin Zhao 已提交
345 346

)DOC");
347 348 349
  }
};

350 351
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
352
  void Make() override {
K
Kexin Zhao 已提交
353
    AddInput("X", "Input of ELU operator");
F
fengjiayi 已提交
354
    AddOutput("Out", "Output of ELU operator");
355
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
356
    AddComment(R"DOC(
K
kexinzhao 已提交
357
ELU Activation Operator.
K
Kexin Zhao 已提交
358 359 360 361

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

F
fengjiayi 已提交
362
$out = \max(0, x) + \min(0, \alpha * (e^x - 1))$
K
Kexin Zhao 已提交
363 364

)DOC");
365 366 367
  }
};

368 369
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
370
  void Make() override {
371
    AddInput("X", "Input of Relu6 operator");
F
fengjiayi 已提交
372
    AddOutput("Out", "Output of Relu6 operator");
373 374
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
375
    AddComment(R"DOC(
K
kexinzhao 已提交
376
Relu6 Activation Operator.
K
Kexin Zhao 已提交
377

F
fengjiayi 已提交
378
$out = \min(\max(0, x), 6)$
K
Kexin Zhao 已提交
379 380

)DOC");
381 382 383
  }
};

384 385
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
386
  void Make() override {
387
    AddInput("X", "Input of Pow operator");
F
fengjiayi 已提交
388
    AddOutput("Out", "Output of Pow operator");
389
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
390
    AddComment(R"DOC(
K
kexinzhao 已提交
391
Pow Activation Operator.
K
Kexin Zhao 已提交
392

F
fengjiayi 已提交
393
$out = x^{factor}$
K
Kexin Zhao 已提交
394 395

)DOC");
396 397 398 399 400
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
401
  void Make() override {
402
    AddInput("X", "Input of STanh operator");
F
fengjiayi 已提交
403
    AddOutput("Out", "Output of STanh operator");
404 405 406 407
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
408
    AddComment(R"DOC(
K
kexinzhao 已提交
409
STanh Activation Operator.
K
Kexin Zhao 已提交
410

Y
Yan Chunwei 已提交
411
$$out = b * \\frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
K
Kexin Zhao 已提交
412 413

)DOC");
Q
qijun 已提交
414 415 416
  }
};

417 418
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
419
  void Make() override {
420
    AddInput("X", "Input of ThresholdedRelu operator");
F
fengjiayi 已提交
421
    AddOutput("Out", "Output of ThresholdedRelu operator");
Y
yuyang18 已提交
422 423
    AddAttr<float>("threshold",
                   "The threshold location of activation. [default 1.0].")
424
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
425
    AddComment(R"DOC(
Y
yuyang18 已提交
426
:strong:`ThresholdedRelu activation operator`
K
Kexin Zhao 已提交
427

Y
yuyang18 已提交
428
..  math::
K
Kexin Zhao 已提交
429

Y
yuyang18 已提交
430
    out = \begin{cases}
Y
yuyang18 已提交
431
             x,  \text{if } x > threshold \\
Y
yuyang18 已提交
432 433
             0,  \text{otherwise}
          \end{cases}
K
Kexin Zhao 已提交
434
)DOC");
435 436 437
  }
};

438 439
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
440
  void Make() override {
441
    AddInput("X", "Input of HardSigmoid operator");
F
fengjiayi 已提交
442
    AddOutput("Out", "Output of HardSigmoid operator");
443 444 445 446
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
447
    AddComment(R"DOC(
K
kexinzhao 已提交
448
HardSigmoid Activation Operator.
449

450
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391),
K
Kexin Zhao 已提交
451
which is much faster than sigmoid.
452

F
fengjiayi 已提交
453
$out = \max(0, \min(1, slope * x + shift))$
454 455

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
456
The default slope and shift are set according to the above reference.
457 458
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
459
)DOC");
460 461 462
  }
};

A
Abhinav Arora 已提交
463 464
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
465
  void Make() override {
A
Abhinav Arora 已提交
466
    AddInput("X", "Input of Swish operator");
F
fengjiayi 已提交
467
    AddOutput("Out", "Output of Swish operator");
A
Abhinav Arora 已提交
468 469 470 471
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

F
fengjiayi 已提交
472
$$out = \\frac{x}{1 + e^{- \beta x}}$$
A
Abhinav Arora 已提交
473 474 475 476 477

)DOC");
  }
};

D
dzhwinter 已提交
478 479 480 481
REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(Exp, ExpDoc);
REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc);
C
Clementine 已提交
482
REGISTER_ACTIVATION_OP_MAKER(Gelu, GeluDoc);
D
dzhwinter 已提交
483 484 485 486 487 488 489 490 491 492 493 494 495 496 497
REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc);
REGISTER_ACTIVATION_OP_MAKER(TanhShrink, TanhShrinkDoc);
REGISTER_ACTIVATION_OP_MAKER(Sqrt, SqrtDoc);
REGISTER_ACTIVATION_OP_MAKER(Abs, AbsDoc);
REGISTER_ACTIVATION_OP_MAKER(Ceil, CeilDoc);
REGISTER_ACTIVATION_OP_MAKER(Floor, FloorDoc);
REGISTER_ACTIVATION_OP_MAKER(Cos, CosDoc);
REGISTER_ACTIVATION_OP_MAKER(Sin, SinDoc);
REGISTER_ACTIVATION_OP_MAKER(Round, RoundDoc);
REGISTER_ACTIVATION_OP_MAKER(Reciprocal, ReciprocalDoc);
REGISTER_ACTIVATION_OP_MAKER(Log, LogDoc);
REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc);
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);

D
dzhwinter 已提交
498 499
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sigmoid, sigmoid);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu, relu);
C
Clementine 已提交
500
REGISTER_ACTIVATION_OP_GRAD_MAKER(Gelu, gelu);
D
dzhwinter 已提交
501
REGISTER_ACTIVATION_OP_GRAD_MAKER(Exp, exp);
D
dzhwinter 已提交
502 503 504
REGISTER_ACTIVATION_OP_GRAD_MAKER(Tanh, tanh);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Ceil, ceil);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Floor, floor);
D
dzhwinter 已提交
505
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sqrt, sqrt);
D
dzhwinter 已提交
506
REGISTER_ACTIVATION_OP_GRAD_MAKER(SoftRelu, soft_relu);
D
dzhwinter 已提交
507 508
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu6, relu6);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Reciprocal, reciprocal);
D
dzhwinter 已提交
509
REGISTER_ACTIVATION_OP_GRAD_MAKER(HardSigmoid, hard_sigmoid);
Q
qijun 已提交
510 511 512 513
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
514

D
dzhwinter 已提交
515
#define FOR_EACH_INPLACE_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
516
  __macro(Sigmoid, sigmoid);                 \
517
  __macro(Relu, relu);                       \
D
dzhwinter 已提交
518
  __macro(Exp, exp);                         \
519
  __macro(Tanh, tanh);                       \
D
dzhwinter 已提交
520 521
  __macro(Ceil, ceil);                       \
  __macro(Floor, floor);                     \
522
  __macro(Sqrt, sqrt);                       \
D
dzhwinter 已提交
523 524 525 526
  __macro(SoftRelu, soft_relu);              \
  __macro(Relu6, relu6);                     \
  __macro(Reciprocal, reciprocal);           \
  __macro(HardSigmoid, hard_sigmoid);
D
dzhwinter 已提交
527 528

#define FOR_EACH_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
529 530
  __macro(LogSigmoid, logsigmoid);   \
  __macro(SoftShrink, softshrink);   \
531
  __macro(Abs, abs);                 \
D
dzhwinter 已提交
532 533 534 535 536
  __macro(Cos, cos);                 \
  __macro(Sin, sin);                 \
  __macro(Round, round);             \
  __macro(Log, log);                 \
  __macro(Square, square);           \
C
Clementine 已提交
537
  __macro(Gelu, gelu);               \
D
dzhwinter 已提交
538 539 540 541 542 543 544 545 546 547 548 549
  __macro(BRelu, brelu);             \
  __macro(Pow, pow);                 \
  __macro(STanh, stanh);             \
  __macro(Softplus, softplus);       \
  __macro(Softsign, softsign);       \
  __macro(LeakyRelu, leaky_relu);    \
  __macro(TanhShrink, tanh_shrink);  \
  __macro(ELU, elu);                 \
  __macro(HardShrink, hard_shrink);  \
  __macro(Swish, swish);             \
  __macro(ThresholdedRelu, thresholded_relu);

D
dzhwinter 已提交
550 551 552 553 554 555 556 557
#define REGISTER_INPLACE_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)                   \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp,            \
                    ::paddle::operators::OP_NAME##OpMaker,                     \
                    ::paddle::operators::ActivationOpInferVarType,             \
                    ::paddle::operators::OP_NAME##GradMaker,                   \
                    ::paddle::framework::SingleOpInplaceInToOut);              \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad, \
                    ::paddle::framework::SingleOpInplaceInToOut)
D
dzhwinter 已提交
558

D
dzhwinter 已提交
559 560 561
#define REGISTER_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)                    \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp,     \
                    ::paddle::operators::OP_NAME##OpMaker,              \
562
                    ::paddle::operators::ActivationOpInferVarType,      \
D
dzhwinter 已提交
563 564
                    ::paddle::framework::DefaultGradOpDescMaker<true>); \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)
A
Abhinav Arora 已提交
565

Q
QI JUN 已提交
566 567 568 569 570 571 572 573 574 575 576
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
577
                                ops::grad_functor<double>>);
578

D
dzhwinter 已提交
579
FOR_EACH_OP_FUNCTOR(REGISTER_ACTIVATION_OP);
D
dzhwinter 已提交
580
FOR_EACH_INPLACE_OP_FUNCTOR(REGISTER_INPLACE_ACTIVATION_OP);
581
FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);