activation_op.cc 17.3 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/activation_op.h"
D
dzhwinter 已提交
16
#include <string>
K
Krzysztof Binias 已提交
17
#include "paddle/fluid/operators/mkldnn_activation_op.h"
Q
qijun 已提交
18 19 20 21

namespace paddle {
namespace operators {

Y
Yu Yang 已提交
22 23 24 25 26
#define REGISTER_ACTIVATION_OP_MAKER(OP_NAME, OP_COMMENT)               \
  class OP_NAME##OpMaker                                                \
      : public ::paddle::framework::OpProtoAndCheckerMaker {            \
   public:                                                              \
    void Make() override {                                              \
X
Xin Pan 已提交
27
      AddInput("X", "Input of " #OP_NAME " operator");                  \
28
      AddOutput("Out", "Output of " #OP_NAME " operator").Reuse("X");   \
Y
Yu Yang 已提交
29 30 31
      AddAttr<bool>("use_mkldnn",                                       \
                    "(bool, default false) Only used in mkldnn kernel") \
          .SetDefault(false);                                           \
X
Xin Pan 已提交
32
      AddComment(OP_COMMENT);                                           \
Y
Yu Yang 已提交
33
    }                                                                   \
D
dzhwinter 已提交
34
  }
D
dzhwinter 已提交
35 36 37 38 39 40 41 42 43

#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME, KERNEL_TYPE)              \
  class OP_NAME##GradMaker                                                   \
      : public ::paddle::framework::SingleGradOpDescMaker {                  \
   public:                                                                   \
    using ::paddle::framework::SingleGradOpDescMaker::SingleGradOpDescMaker; \
                                                                             \
   protected:                                                                \
    std::unique_ptr<::paddle::framework::OpDesc> Apply() const override {    \
44
      auto* op = new ::paddle::framework::OpDesc();                          \
D
dzhwinter 已提交
45 46 47 48 49 50 51 52 53 54
      op->SetType(#KERNEL_TYPE "_grad");                                     \
      op->SetInput("Out", Output("Out"));                                    \
      op->SetInput(::paddle::framework::GradVarName("Out"),                  \
                   OutputGrad("Out"));                                       \
                                                                             \
      op->SetAttrMap(Attrs());                                               \
                                                                             \
      op->SetOutput(::paddle::framework::GradVarName("X"), InputGrad("X"));  \
      return std::unique_ptr<::paddle::framework::OpDesc>(op);               \
    }                                                                        \
D
dzhwinter 已提交
55
  }
D
dzhwinter 已提交
56

57 58 59 60
framework::OpKernelType GetKernelType(const framework::ExecutionContext& ctx,
                                      const framework::OperatorWithKernel& oper,
                                      const std::string& name) {
  framework::LibraryType library{framework::LibraryType::kPlain};
M
mozga-intel 已提交
61 62

  framework::DataLayout layout = framework::DataLayout::kAnyLayout;
63 64 65 66 67
#ifdef PADDLE_WITH_MKLDNN
  auto it = oper.Attrs().find("use_mkldnn");
  if (library == framework::LibraryType::kPlain && it != oper.Attrs().end() &&
      platform::CanMKLDNNBeUsed(ctx)) {
    library = framework::LibraryType::kMKLDNN;
M
mozga-intel 已提交
68
    layout = framework::DataLayout::kMKLDNN;
69 70 71 72 73 74 75
  }
#endif
  return framework::OpKernelType(
      framework::ToDataType(ctx.Input<framework::Tensor>(name)->type()),
      ctx.GetPlace(), layout, library);
}

Q
qijun 已提交
76 77 78 79
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

80
  void InferShape(framework::InferShapeContext* ctx) const override {
F
fengjiayi 已提交
81 82
    ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Out");
Q
qijun 已提交
83
  }
84 85 86 87 88

  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "X");
  }
Q
qijun 已提交
89 90
};

Q
qijun 已提交
91 92 93 94
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

95
  void InferShape(framework::InferShapeContext* ctx) const override {
F
fengjiayi 已提交
96
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Out"));
Q
qijun 已提交
97
  }
98 99 100 101 102

  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "Out");
  }
Q
qijun 已提交
103 104
};

Q
qiaolongfei 已提交
105
__attribute__((unused)) constexpr char SigmoidDoc[] = R"DOC(
106
Sigmoid Activation Operator
K
Kexin Zhao 已提交
107

F
fengjiayi 已提交
108
$$out = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
109

D
dzhwinter 已提交
110
)DOC";
Q
qijun 已提交
111

Q
qiaolongfei 已提交
112
__attribute__((unused)) constexpr char LogSigmoidDoc[] = R"DOC(
113
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
114

F
fengjiayi 已提交
115
$$out = \log \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
116

D
dzhwinter 已提交
117
)DOC";
118

Q
qiaolongfei 已提交
119
__attribute__((unused)) constexpr char ExpDoc[] = R"DOC(
K
kexinzhao 已提交
120
Exp Activation Operator.
K
Kexin Zhao 已提交
121

F
fengjiayi 已提交
122
$out = e^x$
K
Kexin Zhao 已提交
123

D
dzhwinter 已提交
124
)DOC";
Q
qijun 已提交
125

Q
qiaolongfei 已提交
126
__attribute__((unused)) constexpr char ReluDoc[] = R"DOC(
K
kexinzhao 已提交
127
Relu Activation Operator.
K
Kexin Zhao 已提交
128

F
fengjiayi 已提交
129
$out = \max(x, 0)$
K
Kexin Zhao 已提交
130

D
dzhwinter 已提交
131
)DOC";
K
Kexin Zhao 已提交
132

Q
qiaolongfei 已提交
133
__attribute__((unused)) constexpr char TanhDoc[] = R"DOC(
K
kexinzhao 已提交
134
Tanh Activation Operator.
K
Kexin Zhao 已提交
135

F
fengjiayi 已提交
136
$$out = \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
137

D
dzhwinter 已提交
138
)DOC";
139

Q
qiaolongfei 已提交
140
__attribute__((unused)) constexpr char TanhShrinkDoc[] = R"DOC(
K
kexinzhao 已提交
141
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
142

F
fengjiayi 已提交
143
$$out = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
144

D
dzhwinter 已提交
145
)DOC";
K
Kexin Zhao 已提交
146

Q
qiaolongfei 已提交
147
__attribute__((unused)) constexpr char SqrtDoc[] = R"DOC(
K
kexinzhao 已提交
148
Sqrt Activation Operator.
K
Kexin Zhao 已提交
149

F
fengjiayi 已提交
150
$out = \sqrt{x}$
K
Kexin Zhao 已提交
151

D
dzhwinter 已提交
152
)DOC";
153

Q
qiaolongfei 已提交
154
__attribute__((unused)) constexpr char AbsDoc[] = R"DOC(
K
kexinzhao 已提交
155
Abs Activation Operator.
K
Kexin Zhao 已提交
156

F
fengjiayi 已提交
157
$out = |x|$
K
Kexin Zhao 已提交
158

D
dzhwinter 已提交
159
)DOC";
160

Q
qiaolongfei 已提交
161
__attribute__((unused)) constexpr char CeilDoc[] = R"DOC(
D
dzhwinter 已提交
162 163
Ceil Activation Operator.

F
fengjiayi 已提交
164
$out = ceil(x)$
D
dzhwinter 已提交
165

D
dzhwinter 已提交
166
)DOC";
D
dzhwinter 已提交
167

Q
qiaolongfei 已提交
168
__attribute__((unused)) constexpr char FloorDoc[] = R"DOC(
D
dzhwinter 已提交
169 170
Floor Activation Operator.

F
fengjiayi 已提交
171
$out = floor(x)$
D
dzhwinter 已提交
172

D
dzhwinter 已提交
173
)DOC";
D
dzhwinter 已提交
174

Q
qiaolongfei 已提交
175
__attribute__((unused)) constexpr char CosDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
176
Cosine Activation Operator.
C
add cos  
chengduoZH 已提交
177 178 179

$out = cos(x)$

D
dzhwinter 已提交
180
)DOC";
C
add cos  
chengduoZH 已提交
181

Q
qiaolongfei 已提交
182
__attribute__((unused)) constexpr char SinDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
183 184 185 186
Sine Activation Operator.

$out = sin(x)$

D
dzhwinter 已提交
187
)DOC";
C
add sin  
chengduoZH 已提交
188

Q
qiaolongfei 已提交
189
__attribute__((unused)) constexpr char RoundDoc[] = R"DOC(
D
dzhwinter 已提交
190 191
Round Activation Operator.

F
fengjiayi 已提交
192
$out = [x]$
D
dzhwinter 已提交
193

D
dzhwinter 已提交
194
)DOC";
D
dzhwinter 已提交
195

Q
qiaolongfei 已提交
196
__attribute__((unused)) constexpr char ReciprocalDoc[] = R"DOC(
K
kexinzhao 已提交
197
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
198

F
fengjiayi 已提交
199
$$out = \frac{1}{x}$$
K
Kexin Zhao 已提交
200

D
dzhwinter 已提交
201
)DOC";
202

Q
qiaolongfei 已提交
203
__attribute__((unused)) constexpr char LogDoc[] = R"DOC(
K
kexinzhao 已提交
204
Log Activation Operator.
K
Kexin Zhao 已提交
205

F
fengjiayi 已提交
206
$out = \ln(x)$
K
Kexin Zhao 已提交
207 208 209

Natural logarithm of x.

D
dzhwinter 已提交
210 211
)DOC";

Q
qiaolongfei 已提交
212
__attribute__((unused)) constexpr char SquareDoc[] = R"DOC(
D
dzhwinter 已提交
213 214 215
Square Activation Operator.

$out = x^2$
216

D
dzhwinter 已提交
217 218
)DOC";

Q
qiaolongfei 已提交
219
__attribute__((unused)) constexpr char SoftplusDoc[] = R"DOC(
D
dzhwinter 已提交
220 221 222 223 224 225
Softplus Activation Operator.

$out = \ln(1 + e^{x})$

)DOC";

Q
qiaolongfei 已提交
226
__attribute__((unused)) constexpr char SoftsignDoc[] = R"DOC(
D
dzhwinter 已提交
227 228 229 230 231 232 233
Softsign Activation Operator.

$$out = \frac{x}{1 + |x|}$$

)DOC";

class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
234
 public:
Y
Yu Yang 已提交
235
  void Make() override {
D
dzhwinter 已提交
236 237 238
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Out", "Output of LeakyRelu operator");
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
239
    AddComment(R"DOC(
D
dzhwinter 已提交
240
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
241

D
dzhwinter 已提交
242
$out = \max(x, \alpha * x)$
K
Kexin Zhao 已提交
243 244

)DOC");
245 246 247
  }
};

D
dzhwinter 已提交
248
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
K
kexinzhao 已提交
249
 public:
Y
Yu Yang 已提交
250
  void Make() override {
D
dzhwinter 已提交
251 252 253
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Out", "Output of Softshrink operator");
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
254
    AddComment(R"DOC(
D
dzhwinter 已提交
255
Softshrink Activation Operator.
K
Kexin Zhao 已提交
256

D
dzhwinter 已提交
257 258 259 260 261 262 263
$$
out = \begin{cases} 
    x - \lambda, \text{if } x > \lambda \\
    x + \lambda, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$
K
Kexin Zhao 已提交
264 265

)DOC");
K
kexinzhao 已提交
266 267 268
  }
};

D
dzhwinter 已提交
269
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
270
 public:
Y
Yu Yang 已提交
271
  void Make() override {
D
dzhwinter 已提交
272 273
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Out", "Output of HardShrink operator");
Y
yuyang18 已提交
274 275
    AddAttr<float>("threshold",
                   "The value of threshold for HardShrink. [default: 0.5]")
D
dzhwinter 已提交
276
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
277
    AddComment(R"DOC(
Y
yuyang18 已提交
278
:strong:`HardShrink activation operator`
K
Kexin Zhao 已提交
279

Y
yuyang18 已提交
280 281 282 283 284 285
..  math::
    out = \begin{cases}
            x, \text{if } x > \lambda \\
            x, \text{if } x < -\lambda \\
            0,  \text{otherwise}
          \end{cases}
K
Kexin Zhao 已提交
286 287

)DOC");
288 289 290
  }
};

291 292
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
293
  void Make() override {
294
    AddInput("X", "Input of BRelu operator");
F
fengjiayi 已提交
295
    AddOutput("Out", "Output of BRelu operator");
296 297 298 299
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
300
    AddComment(R"DOC(
K
kexinzhao 已提交
301
BRelu Activation Operator.
K
Kexin Zhao 已提交
302

F
fengjiayi 已提交
303
$out = \max(\min(x, t_{min}), t_{max})$
K
Kexin Zhao 已提交
304 305

)DOC");
306 307 308 309 310
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
311
  void Make() override {
312
    AddInput("X", "Input of SoftRelu operator");
F
fengjiayi 已提交
313
    AddOutput("Out", "Output of SoftRelu operator");
314 315
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
316
    AddComment(R"DOC(
K
kexinzhao 已提交
317
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
318

F
fengjiayi 已提交
319
$out = \ln(1 + \exp(\max(\min(x, threshold), threshold))$
K
Kexin Zhao 已提交
320 321

)DOC");
322 323 324
  }
};

325 326
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
327
  void Make() override {
K
Kexin Zhao 已提交
328
    AddInput("X", "Input of ELU operator");
F
fengjiayi 已提交
329
    AddOutput("Out", "Output of ELU operator");
330
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
331
    AddComment(R"DOC(
K
kexinzhao 已提交
332
ELU Activation Operator.
K
Kexin Zhao 已提交
333 334 335 336

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

F
fengjiayi 已提交
337
$out = \max(0, x) + \min(0, \alpha * (e^x - 1))$
K
Kexin Zhao 已提交
338 339

)DOC");
340 341 342
  }
};

343 344
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
345
  void Make() override {
346
    AddInput("X", "Input of Relu6 operator");
F
fengjiayi 已提交
347
    AddOutput("Out", "Output of Relu6 operator");
348 349
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
350
    AddComment(R"DOC(
K
kexinzhao 已提交
351
Relu6 Activation Operator.
K
Kexin Zhao 已提交
352

F
fengjiayi 已提交
353
$out = \min(\max(0, x), 6)$
K
Kexin Zhao 已提交
354 355

)DOC");
356 357 358
  }
};

359 360
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
361
  void Make() override {
362
    AddInput("X", "Input of Pow operator");
F
fengjiayi 已提交
363
    AddOutput("Out", "Output of Pow operator");
364
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
365
    AddComment(R"DOC(
K
kexinzhao 已提交
366
Pow Activation Operator.
K
Kexin Zhao 已提交
367

F
fengjiayi 已提交
368
$out = x^{factor}$
K
Kexin Zhao 已提交
369 370

)DOC");
371 372 373 374 375
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
376
  void Make() override {
377
    AddInput("X", "Input of STanh operator");
F
fengjiayi 已提交
378
    AddOutput("Out", "Output of STanh operator");
379 380 381 382
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
383
    AddComment(R"DOC(
K
kexinzhao 已提交
384
STanh Activation Operator.
K
Kexin Zhao 已提交
385

F
fengjiayi 已提交
386
$$out = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
K
Kexin Zhao 已提交
387 388

)DOC");
Q
qijun 已提交
389 390 391
  }
};

392 393
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
394
  void Make() override {
395
    AddInput("X", "Input of ThresholdedRelu operator");
F
fengjiayi 已提交
396
    AddOutput("Out", "Output of ThresholdedRelu operator");
Y
yuyang18 已提交
397 398
    AddAttr<float>("threshold",
                   "The threshold location of activation. [default 1.0].")
399
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
400
    AddComment(R"DOC(
Y
yuyang18 已提交
401
:strong:`ThresholdedRelu activation operator`
K
Kexin Zhao 已提交
402

Y
yuyang18 已提交
403
..  math::
K
Kexin Zhao 已提交
404

Y
yuyang18 已提交
405
    out = \begin{cases}
Y
yuyang18 已提交
406
             x,  \text{if } x > threshold \\
Y
yuyang18 已提交
407 408
             0,  \text{otherwise}
          \end{cases}
K
Kexin Zhao 已提交
409
)DOC");
410 411 412
  }
};

413 414
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
415
  void Make() override {
416
    AddInput("X", "Input of HardSigmoid operator");
F
fengjiayi 已提交
417
    AddOutput("Out", "Output of HardSigmoid operator");
418 419 420 421
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
422
    AddComment(R"DOC(
K
kexinzhao 已提交
423
HardSigmoid Activation Operator.
424

K
Kexin Zhao 已提交
425 426
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
427

F
fengjiayi 已提交
428
$out = \max(0, \min(1, slope * x + shift))$
429 430

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
431
The default slope and shift are set according to the above reference.
432 433
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
434
)DOC");
435 436 437
  }
};

A
Abhinav Arora 已提交
438 439
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
440
  void Make() override {
A
Abhinav Arora 已提交
441
    AddInput("X", "Input of Swish operator");
F
fengjiayi 已提交
442
    AddOutput("Out", "Output of Swish operator");
A
Abhinav Arora 已提交
443 444 445 446
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

F
fengjiayi 已提交
447
$$out = \frac{x}{1 + e^{- \beta x}}$$
A
Abhinav Arora 已提交
448 449 450 451 452

)DOC");
  }
};

D
dzhwinter 已提交
453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471
REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(Exp, ExpDoc);
REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc);
REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc);
REGISTER_ACTIVATION_OP_MAKER(TanhShrink, TanhShrinkDoc);
REGISTER_ACTIVATION_OP_MAKER(Sqrt, SqrtDoc);
REGISTER_ACTIVATION_OP_MAKER(Abs, AbsDoc);
REGISTER_ACTIVATION_OP_MAKER(Ceil, CeilDoc);
REGISTER_ACTIVATION_OP_MAKER(Floor, FloorDoc);
REGISTER_ACTIVATION_OP_MAKER(Cos, CosDoc);
REGISTER_ACTIVATION_OP_MAKER(Sin, SinDoc);
REGISTER_ACTIVATION_OP_MAKER(Round, RoundDoc);
REGISTER_ACTIVATION_OP_MAKER(Reciprocal, ReciprocalDoc);
REGISTER_ACTIVATION_OP_MAKER(Log, LogDoc);
REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc);
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);

D
dzhwinter 已提交
472 473
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sigmoid, sigmoid);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu, relu);
D
dzhwinter 已提交
474
REGISTER_ACTIVATION_OP_GRAD_MAKER(Exp, exp);
D
dzhwinter 已提交
475 476 477
REGISTER_ACTIVATION_OP_GRAD_MAKER(Tanh, tanh);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Ceil, ceil);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Floor, floor);
D
dzhwinter 已提交
478
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sqrt, sqrt);
D
dzhwinter 已提交
479
REGISTER_ACTIVATION_OP_GRAD_MAKER(SoftRelu, soft_relu);
D
dzhwinter 已提交
480 481
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu6, relu6);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Reciprocal, reciprocal);
D
dzhwinter 已提交
482
REGISTER_ACTIVATION_OP_GRAD_MAKER(HardSigmoid, hard_sigmoid);
Q
qijun 已提交
483 484 485 486
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
487

D
dzhwinter 已提交
488
#define FOR_EACH_INPLACE_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
489
  __macro(Sigmoid, sigmoid);                 \
490
  __macro(Relu, relu);                       \
D
dzhwinter 已提交
491
  __macro(Exp, exp);                         \
492
  __macro(Tanh, tanh);                       \
D
dzhwinter 已提交
493 494
  __macro(Ceil, ceil);                       \
  __macro(Floor, floor);                     \
495
  __macro(Sqrt, sqrt);                       \
D
dzhwinter 已提交
496 497 498 499
  __macro(SoftRelu, soft_relu);              \
  __macro(Relu6, relu6);                     \
  __macro(Reciprocal, reciprocal);           \
  __macro(HardSigmoid, hard_sigmoid);
D
dzhwinter 已提交
500 501

#define FOR_EACH_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
502 503
  __macro(LogSigmoid, logsigmoid);   \
  __macro(SoftShrink, softshrink);   \
504
  __macro(Abs, abs);                 \
D
dzhwinter 已提交
505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527
  __macro(Cos, cos);                 \
  __macro(Sin, sin);                 \
  __macro(Round, round);             \
  __macro(Log, log);                 \
  __macro(Square, square);           \
  __macro(BRelu, brelu);             \
  __macro(Pow, pow);                 \
  __macro(STanh, stanh);             \
  __macro(Softplus, softplus);       \
  __macro(Softsign, softsign);       \
  __macro(LeakyRelu, leaky_relu);    \
  __macro(TanhShrink, tanh_shrink);  \
  __macro(ELU, elu);                 \
  __macro(HardShrink, hard_shrink);  \
  __macro(Swish, swish);             \
  __macro(ThresholdedRelu, thresholded_relu);

#define REGISTER_INPLACE_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)        \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp, \
                    ::paddle::operators::OP_NAME##OpMaker,          \
                    ::paddle::operators::OP_NAME##GradMaker);       \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)

D
dzhwinter 已提交
528 529 530 531 532
#define REGISTER_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)                    \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp,     \
                    ::paddle::operators::OP_NAME##OpMaker,              \
                    ::paddle::framework::DefaultGradOpDescMaker<true>); \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)
A
Abhinav Arora 已提交
533

Q
QI JUN 已提交
534 535 536 537 538 539 540 541 542 543 544
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
545
                                ops::grad_functor<double>>);
546

D
dzhwinter 已提交
547
FOR_EACH_OP_FUNCTOR(REGISTER_ACTIVATION_OP);
D
dzhwinter 已提交
548
FOR_EACH_INPLACE_OP_FUNCTOR(REGISTER_INPLACE_ACTIVATION_OP);
549
FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);