activation_op.cc 20.9 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/activation_op.h"
Q
qijun 已提交
16 17 18 19

namespace paddle {
namespace operators {

Q
qijun 已提交
20 21 22 23
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

24
  void InferShape(framework::InferShapeContext *ctx) const override {
F
fengjiayi 已提交
25 26
    ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Out");
Q
qijun 已提交
27
  }
28 29 30 31 32

  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext &ctx) const override {
    return ActivationHelper().GetKernelType(ctx, *this);
  }
Q
qijun 已提交
33 34
};

Q
qijun 已提交
35 36 37 38
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

39
  void InferShape(framework::InferShapeContext *ctx) const override {
F
fengjiayi 已提交
40
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Out"));
Q
qijun 已提交
41
  }
42 43 44 45 46

  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext &ctx) const override {
    return ActivationHelper().GetKernelType(ctx, *this);
  }
Q
qijun 已提交
47 48
};

Q
qijun 已提交
49 50
class SigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
51 52
  SigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
Q
qijun 已提交
53
    AddInput("X", "Input of Sigmoid operator");
F
fengjiayi 已提交
54
    AddOutput("Out", "Output of Sigmoid operator");
K
Kexin Zhao 已提交
55
    AddComment(R"DOC(
56
Sigmoid Activation Operator
K
Kexin Zhao 已提交
57

F
fengjiayi 已提交
58
$$out = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
59 60

)DOC");
Q
qijun 已提交
61 62 63
  }
};

64 65
class LogSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
66 67
  LogSigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
68
    AddInput("X", "Input of LogSigmoid operator");
F
fengjiayi 已提交
69
    AddOutput("Out", "Output of LogSigmoid operator");
K
Kexin Zhao 已提交
70
    AddComment(R"DOC(
71
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
72

F
fengjiayi 已提交
73
$$out = \log \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
74 75

)DOC");
76 77 78
  }
};

Q
qijun 已提交
79 80
class ExpOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
81 82
  ExpOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
Q
qijun 已提交
83
    AddInput("X", "Input of Exp operator");
F
fengjiayi 已提交
84
    AddOutput("Out", "Output of Exp operator");
K
Kexin Zhao 已提交
85
    AddComment(R"DOC(
K
kexinzhao 已提交
86
Exp Activation Operator.
K
Kexin Zhao 已提交
87

F
fengjiayi 已提交
88
$out = e^x$
K
Kexin Zhao 已提交
89 90

)DOC");
Q
qijun 已提交
91 92 93 94 95
  }
};

class ReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
96 97
  ReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
Q
qijun 已提交
98
    AddInput("X", "Input of Relu operator");
F
fengjiayi 已提交
99
    AddOutput("Out", "Output of Relu operator");
100 101 102 103 104 105 106 107 108 109
    AddAttr<bool>("use_mkldnn",
                  "(bool, default false) Only used in mkldnn kernel")
        .SetDefault(false);
    AddAttr<std::string>(
        "data_format",
        "(string, default NCHW) Only used in "
        "An optional string from: \"NHWC\", \"NCHW\". "
        "Defaults to \"NHWC\". Specify the data format of the output data, "
        "the input will be transformed automatically. ")
        .SetDefault("AnyLayout");
K
Kexin Zhao 已提交
110
    AddComment(R"DOC(
K
kexinzhao 已提交
111
Relu Activation Operator.
K
Kexin Zhao 已提交
112

F
fengjiayi 已提交
113
$out = \max(x, 0)$
K
Kexin Zhao 已提交
114 115

)DOC");
116 117 118
  }
};

K
Kavya Srinet 已提交
119 120
class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
121 122
  LeakyReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kavya Srinet 已提交
123
    AddInput("X", "Input of LeakyRelu operator");
F
fengjiayi 已提交
124
    AddOutput("Out", "Output of LeakyRelu operator");
125
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
126
    AddComment(R"DOC(
K
kexinzhao 已提交
127
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
128

F
fengjiayi 已提交
129
$out = \max(x, \alpha * x)$
K
Kexin Zhao 已提交
130 131

)DOC");
K
Kavya Srinet 已提交
132 133 134
  }
};

135 136
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
137 138
  SoftShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
139
    AddInput("X", "Input of Softshrink operator");
F
fengjiayi 已提交
140
    AddOutput("Out", "Output of Softshrink operator");
141
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
142
    AddComment(R"DOC(
K
kexinzhao 已提交
143
Softshrink Activation Operator.
K
Kexin Zhao 已提交
144 145

$$
F
fengjiayi 已提交
146
out = \begin{cases} 
K
Kexin Zhao 已提交
147 148 149 150 151 152 153
    x - \lambda, \text{if } x > \lambda \\
    x + \lambda, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
154 155 156
  }
};

157 158
class TanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
159 160
  TanhOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
161
    AddInput("X", "Input of Tanh operator");
F
fengjiayi 已提交
162
    AddOutput("Out", "Output of Tanh operator");
163 164 165 166 167 168 169 170 171 172
    AddAttr<bool>("use_mkldnn",
                  "(bool, default false) Only used in mkldnn kernel")
        .SetDefault(false);
    AddAttr<std::string>(
        "data_format",
        "(string, default NCHW) Only used in "
        "An optional string from: \"NHWC\", \"NCHW\". "
        "Defaults to \"NHWC\". Specify the data format of the output data, "
        "the input will be transformed automatically. ")
        .SetDefault("AnyLayout");
K
Kexin Zhao 已提交
173
    AddComment(R"DOC(
K
kexinzhao 已提交
174
Tanh Activation Operator.
K
Kexin Zhao 已提交
175

F
fengjiayi 已提交
176
$$out = \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
177 178

)DOC");
179 180 181
  }
};

K
Kavya Srinet 已提交
182 183
class TanhShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
184 185
  TanhShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kavya Srinet 已提交
186
    AddInput("X", "Input of TanhShrink operator");
F
fengjiayi 已提交
187
    AddOutput("Out", "Output of TanhShrink operator");
K
Kexin Zhao 已提交
188
    AddComment(R"DOC(
K
kexinzhao 已提交
189
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
190

F
fengjiayi 已提交
191
$$out = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
192 193

)DOC");
K
Kavya Srinet 已提交
194 195 196
  }
};

197 198
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
199 200
  HardShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
201
    AddInput("X", "Input of HardShrink operator");
F
fengjiayi 已提交
202
    AddOutput("Out", "Output of HardShrink operator");
203 204
    AddAttr<float>("threshold", "The value of threshold for HardShrink")
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
205
    AddComment(R"DOC(
K
kexinzhao 已提交
206
HardShrink Activation Operator.
K
Kexin Zhao 已提交
207 208

$$
F
fengjiayi 已提交
209
out = \begin{cases} 
K
Kexin Zhao 已提交
210 211 212 213 214 215 216
    x, \text{if } x > \lambda \\
    x, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
217 218 219
  }
};

220 221
class SqrtOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
222 223
  SqrtOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
224
    AddInput("X", "Input of Sqrt operator");
F
fengjiayi 已提交
225
    AddOutput("Out", "Output of Sqrt operator");
226 227 228 229 230 231 232 233 234 235
    AddAttr<bool>("use_mkldnn",
                  "(bool, default false) Only used in mkldnn kernel")
        .SetDefault(false);
    AddAttr<std::string>(
        "data_format",
        "(string, default NCHW) Only used in "
        "An optional string from: \"NHWC\", \"NCHW\". "
        "Defaults to \"NHWC\". Specify the data format of the output data, "
        "the input will be transformed automatically. ")
        .SetDefault("AnyLayout");
K
Kexin Zhao 已提交
236
    AddComment(R"DOC(
K
kexinzhao 已提交
237
Sqrt Activation Operator.
K
Kexin Zhao 已提交
238

F
fengjiayi 已提交
239
$out = \sqrt{x}$
K
Kexin Zhao 已提交
240 241

)DOC");
242 243 244 245 246
  }
};

class AbsOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
247 248
  AbsOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
249
    AddInput("X", "Input of Abs operator");
F
fengjiayi 已提交
250
    AddOutput("Out", "Output of Abs operator");
251 252 253 254 255 256 257 258 259 260
    AddAttr<bool>("use_mkldnn",
                  "(bool, default false) Only used in mkldnn kernel")
        .SetDefault(false);
    AddAttr<std::string>(
        "data_format",
        "(string, default NCHW) Only used in "
        "An optional string from: \"NHWC\", \"NCHW\". "
        "Defaults to \"NHWC\". Specify the data format of the output data, "
        "the input will be transformed automatically. ")
        .SetDefault("AnyLayout");
K
Kexin Zhao 已提交
261
    AddComment(R"DOC(
K
kexinzhao 已提交
262
Abs Activation Operator.
K
Kexin Zhao 已提交
263

F
fengjiayi 已提交
264
$out = |x|$
K
Kexin Zhao 已提交
265 266

)DOC");
267 268 269
  }
};

D
dzhwinter 已提交
270 271
class CeilOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
272 273
  CeilOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
274
    AddInput("X", "Input of Ceil operator");
F
fengjiayi 已提交
275
    AddOutput("Out", "Output of Ceil operator");
D
dzhwinter 已提交
276 277 278
    AddComment(R"DOC(
Ceil Activation Operator.

F
fengjiayi 已提交
279
$out = ceil(x)$
D
dzhwinter 已提交
280 281 282 283 284 285 286

)DOC");
  }
};

class FloorOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
287 288
  FloorOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
289
    AddInput("X", "Input of Floor operator");
F
fengjiayi 已提交
290
    AddOutput("Out", "Output of Floor operator");
D
dzhwinter 已提交
291 292 293
    AddComment(R"DOC(
Floor Activation Operator.

F
fengjiayi 已提交
294
$out = floor(x)$
D
dzhwinter 已提交
295 296 297 298 299 300 301

)DOC");
  }
};

class RoundOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
302 303
  RoundOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
304
    AddInput("X", "Input of Round operator");
F
fengjiayi 已提交
305
    AddOutput("Out", "Output of Round operator");
D
dzhwinter 已提交
306 307 308
    AddComment(R"DOC(
Round Activation Operator.

F
fengjiayi 已提交
309
$out = [x]$
D
dzhwinter 已提交
310 311 312 313 314

)DOC");
  }
};

315 316
class ReciprocalOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
317 318
  ReciprocalOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
319
    AddInput("X", "Input of Reciprocal operator");
F
fengjiayi 已提交
320
    AddOutput("Out", "Output of Reciprocal operator");
K
Kexin Zhao 已提交
321
    AddComment(R"DOC(
K
kexinzhao 已提交
322
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
323

F
fengjiayi 已提交
324
$$out = \frac{1}{x}$$
K
Kexin Zhao 已提交
325 326

)DOC");
327 328 329 330 331
  }
};

class LogOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
332 333
  LogOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
334
    AddInput("X", "Input of Log operator");
F
fengjiayi 已提交
335
    AddOutput("Out", "Output of Log operator");
K
Kexin Zhao 已提交
336
    AddComment(R"DOC(
K
kexinzhao 已提交
337
Log Activation Operator.
K
Kexin Zhao 已提交
338

F
fengjiayi 已提交
339
$out = \ln(x)$
K
Kexin Zhao 已提交
340 341 342 343

Natural logarithm of x.

)DOC");
344 345 346 347 348
  }
};

class SquareOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
349 350
  SquareOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
351
    AddInput("X", "Input of Square operator");
F
fengjiayi 已提交
352
    AddOutput("Out", "Output of Square operator");
K
Kexin Zhao 已提交
353
    AddComment(R"DOC(
K
kexinzhao 已提交
354
Square Activation Operator.
K
Kexin Zhao 已提交
355

F
fengjiayi 已提交
356
$out = x^2$
K
Kexin Zhao 已提交
357 358

)DOC");
359 360 361
  }
};

K
kexinzhao 已提交
362 363
class SoftplusOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
364 365
  SoftplusOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
kexinzhao 已提交
366
    AddInput("X", "Input of Softplus operator");
F
fengjiayi 已提交
367
    AddOutput("Out", "Output of Softplus operator");
K
Kexin Zhao 已提交
368
    AddComment(R"DOC(
K
kexinzhao 已提交
369
Softplus Activation Operator.
K
Kexin Zhao 已提交
370

F
fengjiayi 已提交
371
$out = \ln(1 + e^{x})$
K
Kexin Zhao 已提交
372 373

)DOC");
K
kexinzhao 已提交
374 375 376
  }
};

377 378
class SoftsignOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
379 380
  SoftsignOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
381
    AddInput("X", "Input of Softsign operator");
F
fengjiayi 已提交
382
    AddOutput("Out", "Output of Softsign operator");
K
Kexin Zhao 已提交
383
    AddComment(R"DOC(
K
kexinzhao 已提交
384
Softsign Activation Operator.
K
Kexin Zhao 已提交
385

F
fengjiayi 已提交
386
$$out = \frac{x}{1 + |x|}$$
K
Kexin Zhao 已提交
387 388

)DOC");
389 390 391
  }
};

392 393
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
394 395
  BReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
396
    AddInput("X", "Input of BRelu operator");
F
fengjiayi 已提交
397
    AddOutput("Out", "Output of BRelu operator");
398 399 400 401
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
402
    AddComment(R"DOC(
K
kexinzhao 已提交
403
BRelu Activation Operator.
K
Kexin Zhao 已提交
404

F
fengjiayi 已提交
405
$out = \max(\min(x, t_{min}), t_{max})$
K
Kexin Zhao 已提交
406 407

)DOC");
408 409 410 411 412
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
413 414
  SoftReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
415
    AddInput("X", "Input of SoftRelu operator");
F
fengjiayi 已提交
416
    AddOutput("Out", "Output of SoftRelu operator");
417 418
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
419
    AddComment(R"DOC(
K
kexinzhao 已提交
420
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
421

F
fengjiayi 已提交
422
$out = \ln(1 + \exp(\max(\min(x, threshold), threshold))$
K
Kexin Zhao 已提交
423 424

)DOC");
425 426 427
  }
};

428 429
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
430 431
  ELUOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kexin Zhao 已提交
432
    AddInput("X", "Input of ELU operator");
F
fengjiayi 已提交
433
    AddOutput("Out", "Output of ELU operator");
434
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
435
    AddComment(R"DOC(
K
kexinzhao 已提交
436
ELU Activation Operator.
K
Kexin Zhao 已提交
437 438 439 440

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

F
fengjiayi 已提交
441
$out = \max(0, x) + \min(0, \alpha * (e^x - 1))$
K
Kexin Zhao 已提交
442 443

)DOC");
444 445 446
  }
};

447 448
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
449 450
  Relu6OpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
451
    AddInput("X", "Input of Relu6 operator");
F
fengjiayi 已提交
452
    AddOutput("Out", "Output of Relu6 operator");
453 454
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
455
    AddComment(R"DOC(
K
kexinzhao 已提交
456
Relu6 Activation Operator.
K
Kexin Zhao 已提交
457

F
fengjiayi 已提交
458
$out = \min(\max(0, x), 6)$
K
Kexin Zhao 已提交
459 460

)DOC");
461 462 463
  }
};

464 465
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
466 467
  PowOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
468
    AddInput("X", "Input of Pow operator");
F
fengjiayi 已提交
469
    AddOutput("Out", "Output of Pow operator");
470
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
471
    AddComment(R"DOC(
K
kexinzhao 已提交
472
Pow Activation Operator.
K
Kexin Zhao 已提交
473

F
fengjiayi 已提交
474
$out = x^{factor}$
K
Kexin Zhao 已提交
475 476

)DOC");
477 478 479 480 481
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
482 483
  STanhOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
484
    AddInput("X", "Input of STanh operator");
F
fengjiayi 已提交
485
    AddOutput("Out", "Output of STanh operator");
486 487 488 489
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
490
    AddComment(R"DOC(
K
kexinzhao 已提交
491
STanh Activation Operator.
K
Kexin Zhao 已提交
492

F
fengjiayi 已提交
493
$$out = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
K
Kexin Zhao 已提交
494 495

)DOC");
Q
qijun 已提交
496 497 498
  }
};

499 500
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
501 502
  ThresholdedReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
503
    AddInput("X", "Input of ThresholdedRelu operator");
F
fengjiayi 已提交
504
    AddOutput("Out", "Output of ThresholdedRelu operator");
505 506
    AddAttr<float>("threshold", "The threshold location of activation")
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
507
    AddComment(R"DOC(
K
kexinzhao 已提交
508
ThresholdedRelu Activation Operator.
K
Kexin Zhao 已提交
509 510

$$
F
fengjiayi 已提交
511
out = \begin{cases} 
K
Kexin Zhao 已提交
512 513 514 515 516 517
    x, \text{if } x > threshold \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
518 519 520
  }
};

521 522
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
523 524
  HardSigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
525
    AddInput("X", "Input of HardSigmoid operator");
F
fengjiayi 已提交
526
    AddOutput("Out", "Output of HardSigmoid operator");
527 528 529 530
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
531
    AddComment(R"DOC(
K
kexinzhao 已提交
532
HardSigmoid Activation Operator.
533

K
Kexin Zhao 已提交
534 535
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
536

F
fengjiayi 已提交
537
$out = \max(0, \min(1, slope * x + shift))$
538 539

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
540
The default slope and shift are set according to the above reference.
541 542
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
543
)DOC");
544 545 546
  }
};

A
Abhinav Arora 已提交
547 548
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
549 550
  SwishOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
A
Abhinav Arora 已提交
551
    AddInput("X", "Input of Swish operator");
F
fengjiayi 已提交
552
    AddOutput("Out", "Output of Swish operator");
A
Abhinav Arora 已提交
553 554 555 556
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

F
fengjiayi 已提交
557
$$out = \frac{x}{1 + e^{- \beta x}}$$
A
Abhinav Arora 已提交
558 559 560 561 562

)DOC");
  }
};

Q
qijun 已提交
563 564 565 566
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
567

Q
qijun 已提交
568 569 570
REGISTER_OP(sigmoid, ops::ActivationOp, ops::SigmoidOpMaker, sigmoid_grad,
            ops::ActivationOpGrad);

571 572 573
REGISTER_OP(logsigmoid, ops::ActivationOp, ops::LogSigmoidOpMaker,
            logsigmoid_grad, ops::ActivationOpGrad);

Q
qijun 已提交
574 575
REGISTER_OP(exp, ops::ActivationOp, ops::ExpOpMaker, exp_grad,
            ops::ActivationOpGrad);
Q
qijun 已提交
576 577 578

REGISTER_OP(relu, ops::ActivationOp, ops::ReluOpMaker, relu_grad,
            ops::ActivationOpGrad);
579 580 581 582

REGISTER_OP(tanh, ops::ActivationOp, ops::TanhOpMaker, tanh_grad,
            ops::ActivationOpGrad);

K
Kavya Srinet 已提交
583 584
REGISTER_OP(tanh_shrink, ops::ActivationOp, ops::TanhShrinkOpMaker,
            tanh_shrink_grad, ops::ActivationOpGrad);
585

586
REGISTER_OP(softshrink, ops::ActivationOp, ops::SoftShrinkOpMaker,
587 588
            softshrink_grad, ops::ActivationOpGrad);

589 590 591 592 593 594
REGISTER_OP(sqrt, ops::ActivationOp, ops::SqrtOpMaker, sqrt_grad,
            ops::ActivationOpGrad);

REGISTER_OP(abs, ops::ActivationOp, ops::AbsOpMaker, abs_grad,
            ops::ActivationOpGrad);

D
dzhwinter 已提交
595 596 597 598 599 600 601 602 603
REGISTER_OP(ceil, ops::ActivationOp, ops::CeilOpMaker, ceil_grad,
            ops::ActivationOpGrad);

REGISTER_OP(floor, ops::ActivationOp, ops::FloorOpMaker, floor_grad,
            ops::ActivationOpGrad);

REGISTER_OP(round, ops::ActivationOp, ops::RoundOpMaker, round_grad,
            ops::ActivationOpGrad);

604 605 606 607 608 609 610 611 612
REGISTER_OP(reciprocal, ops::ActivationOp, ops::ReciprocalOpMaker,
            reciprocal_grad, ops::ActivationOpGrad);

REGISTER_OP(log, ops::ActivationOp, ops::LogOpMaker, log_grad,
            ops::ActivationOpGrad);

REGISTER_OP(square, ops::ActivationOp, ops::SquareOpMaker, square_grad,
            ops::ActivationOpGrad);

K
kexinzhao 已提交
613 614 615
REGISTER_OP(softplus, ops::ActivationOp, ops::SoftplusOpMaker, softplus_grad,
            ops::ActivationOpGrad);

616 617 618
REGISTER_OP(softsign, ops::ActivationOp, ops::SoftsignOpMaker, softsign_grad,
            ops::ActivationOpGrad);

619
REGISTER_OP(brelu, ops::ActivationOp, ops::BReluOpMaker, brelu_grad,
620 621
            ops::ActivationOpGrad);

622
REGISTER_OP(leaky_relu, ops::ActivationOp, ops::LeakyReluOpMaker,
K
Kavya Srinet 已提交
623
            leaky_relu_grad, ops::ActivationOpGrad);
624

625 626
REGISTER_OP(soft_relu, ops::ActivationOp, ops::SoftReluOpMaker, soft_relu_grad,
            ops::ActivationOpGrad);
627

628
REGISTER_OP(elu, ops::ActivationOp, ops::ELUOpMaker, elu_grad,
629 630
            ops::ActivationOpGrad);

631
REGISTER_OP(relu6, ops::ActivationOp, ops::Relu6OpMaker, relu6_grad,
632 633
            ops::ActivationOpGrad);

634
REGISTER_OP(pow, ops::ActivationOp, ops::PowOpMaker, pow_grad,
635 636
            ops::ActivationOpGrad);

637
REGISTER_OP(stanh, ops::ActivationOp, ops::STanhOpMaker, stanh_grad,
638
            ops::ActivationOpGrad);
639

640
REGISTER_OP(hard_shrink, ops::ActivationOp, ops::HardShrinkOpMaker,
641 642
            hard_shrink_grad, ops::ActivationOpGrad);

643 644
REGISTER_OP(thresholded_relu, ops::ActivationOp, ops::ThresholdedReluOpMaker,
            thresholded_relu_grad, ops::ActivationOpGrad);
645

646
REGISTER_OP(hard_sigmoid, ops::ActivationOp, ops::HardSigmoidOpMaker,
647 648
            hard_sigmoid_grad, ops::ActivationOpGrad);

A
Abhinav Arora 已提交
649 650 651
REGISTER_OP(swish, ops::ActivationOp, ops::SwishOpMaker, swish_grad,
            ops::ActivationOpGrad);

Q
QI JUN 已提交
652 653 654 655 656 657 658 659 660 661 662
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
663
                                ops::grad_functor<double>>);
664 665

FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);
K
Kexin Zhao 已提交
666 667 668 669 670 671 672 673 674 675 676

REGISTER_OP_CPU_KERNEL(relu,
                       ops::ActivationKernel<paddle::platform::CPUDeviceContext,
                                             ops::ReluFunctor<float>>,
                       ops::ActivationKernel<paddle::platform::CPUDeviceContext,
                                             ops::ReluFunctor<double>>);
REGISTER_OP_CPU_KERNEL(
    relu_grad, ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,
                                         ops::ReluGradFunctor<float>>,
    ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,
                              ops::ReluGradFunctor<double>>);