activation_op.cc 18.8 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/activation_op.h"
Q
qijun 已提交
16 17 18 19

namespace paddle {
namespace operators {

Q
qijun 已提交
20 21 22 23
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

24
  void InferShape(framework::InferShapeContext *ctx) const override {
F
fengjiayi 已提交
25 26
    ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Out");
Q
qijun 已提交
27
  }
Q
qijun 已提交
28 29
};

Q
qijun 已提交
30 31 32 33
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

34
  void InferShape(framework::InferShapeContext *ctx) const override {
F
fengjiayi 已提交
35
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Out"));
Q
qijun 已提交
36 37 38
  }
};

Q
qijun 已提交
39 40
class SigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
41 42
  SigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
Q
qijun 已提交
43
    AddInput("X", "Input of Sigmoid operator");
F
fengjiayi 已提交
44
    AddOutput("Out", "Output of Sigmoid operator");
K
Kexin Zhao 已提交
45
    AddComment(R"DOC(
46
Sigmoid Activation Operator
K
Kexin Zhao 已提交
47

F
fengjiayi 已提交
48
$$out = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
49 50

)DOC");
Q
qijun 已提交
51 52 53
  }
};

54 55
class LogSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
56 57
  LogSigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
58
    AddInput("X", "Input of LogSigmoid operator");
F
fengjiayi 已提交
59
    AddOutput("Out", "Output of LogSigmoid operator");
K
Kexin Zhao 已提交
60
    AddComment(R"DOC(
61
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
62

F
fengjiayi 已提交
63
$$out = \log \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
64 65

)DOC");
66 67 68
  }
};

Q
qijun 已提交
69 70
class ExpOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
71 72
  ExpOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
Q
qijun 已提交
73
    AddInput("X", "Input of Exp operator");
F
fengjiayi 已提交
74
    AddOutput("Out", "Output of Exp operator");
K
Kexin Zhao 已提交
75
    AddComment(R"DOC(
K
kexinzhao 已提交
76
Exp Activation Operator.
K
Kexin Zhao 已提交
77

F
fengjiayi 已提交
78
$out = e^x$
K
Kexin Zhao 已提交
79 80

)DOC");
Q
qijun 已提交
81 82 83 84 85
  }
};

class ReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
86 87
  ReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
Q
qijun 已提交
88
    AddInput("X", "Input of Relu operator");
F
fengjiayi 已提交
89
    AddOutput("Out", "Output of Relu operator");
K
Kexin Zhao 已提交
90
    AddComment(R"DOC(
K
kexinzhao 已提交
91
Relu Activation Operator.
K
Kexin Zhao 已提交
92

F
fengjiayi 已提交
93
$out = \max(x, 0)$
K
Kexin Zhao 已提交
94 95

)DOC");
96 97 98
  }
};

K
Kavya Srinet 已提交
99 100
class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
101 102
  LeakyReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kavya Srinet 已提交
103
    AddInput("X", "Input of LeakyRelu operator");
F
fengjiayi 已提交
104
    AddOutput("Out", "Output of LeakyRelu operator");
105
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
106
    AddComment(R"DOC(
K
kexinzhao 已提交
107
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
108

F
fengjiayi 已提交
109
$out = \max(x, \alpha * x)$
K
Kexin Zhao 已提交
110 111

)DOC");
K
Kavya Srinet 已提交
112 113 114
  }
};

115 116
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
117 118
  SoftShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
119
    AddInput("X", "Input of Softshrink operator");
F
fengjiayi 已提交
120
    AddOutput("Out", "Output of Softshrink operator");
121
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
122
    AddComment(R"DOC(
K
kexinzhao 已提交
123
Softshrink Activation Operator.
K
Kexin Zhao 已提交
124 125

$$
F
fengjiayi 已提交
126
out = \begin{cases} 
K
Kexin Zhao 已提交
127 128 129 130 131 132 133
    x - \lambda, \text{if } x > \lambda \\
    x + \lambda, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
134 135 136
  }
};

137 138
class TanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
139 140
  TanhOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
141
    AddInput("X", "Input of Tanh operator");
F
fengjiayi 已提交
142
    AddOutput("Out", "Output of Tanh operator");
K
Kexin Zhao 已提交
143
    AddComment(R"DOC(
K
kexinzhao 已提交
144
Tanh Activation Operator.
K
Kexin Zhao 已提交
145

F
fengjiayi 已提交
146
$$out = \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
147 148

)DOC");
149 150 151
  }
};

K
Kavya Srinet 已提交
152 153
class TanhShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
154 155
  TanhShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kavya Srinet 已提交
156
    AddInput("X", "Input of TanhShrink operator");
F
fengjiayi 已提交
157
    AddOutput("Out", "Output of TanhShrink operator");
K
Kexin Zhao 已提交
158
    AddComment(R"DOC(
K
kexinzhao 已提交
159
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
160

F
fengjiayi 已提交
161
$$out = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
162 163

)DOC");
K
Kavya Srinet 已提交
164 165 166
  }
};

167 168
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
169 170
  HardShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
171
    AddInput("X", "Input of HardShrink operator");
F
fengjiayi 已提交
172
    AddOutput("Out", "Output of HardShrink operator");
173 174
    AddAttr<float>("threshold", "The value of threshold for HardShrink")
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
175
    AddComment(R"DOC(
K
kexinzhao 已提交
176
HardShrink Activation Operator.
K
Kexin Zhao 已提交
177 178

$$
F
fengjiayi 已提交
179
out = \begin{cases} 
K
Kexin Zhao 已提交
180 181 182 183 184 185 186
    x, \text{if } x > \lambda \\
    x, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
187 188 189
  }
};

190 191
class SqrtOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
192 193
  SqrtOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
194
    AddInput("X", "Input of Sqrt operator");
F
fengjiayi 已提交
195
    AddOutput("Out", "Output of Sqrt operator");
K
Kexin Zhao 已提交
196
    AddComment(R"DOC(
K
kexinzhao 已提交
197
Sqrt Activation Operator.
K
Kexin Zhao 已提交
198

F
fengjiayi 已提交
199
$out = \sqrt{x}$
K
Kexin Zhao 已提交
200 201

)DOC");
202 203 204 205 206
  }
};

class AbsOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
207 208
  AbsOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
209
    AddInput("X", "Input of Abs operator");
F
fengjiayi 已提交
210
    AddOutput("Out", "Output of Abs operator");
K
Kexin Zhao 已提交
211
    AddComment(R"DOC(
K
kexinzhao 已提交
212
Abs Activation Operator.
K
Kexin Zhao 已提交
213

F
fengjiayi 已提交
214
$out = |x|$
K
Kexin Zhao 已提交
215 216

)DOC");
217 218 219
  }
};

D
dzhwinter 已提交
220 221
class CeilOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
222 223
  CeilOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
224
    AddInput("X", "Input of Ceil operator");
F
fengjiayi 已提交
225
    AddOutput("Out", "Output of Ceil operator");
D
dzhwinter 已提交
226 227 228
    AddComment(R"DOC(
Ceil Activation Operator.

F
fengjiayi 已提交
229
$out = ceil(x)$
D
dzhwinter 已提交
230 231 232 233 234 235 236

)DOC");
  }
};

class FloorOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
237 238
  FloorOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
239
    AddInput("X", "Input of Floor operator");
F
fengjiayi 已提交
240
    AddOutput("Out", "Output of Floor operator");
D
dzhwinter 已提交
241 242 243
    AddComment(R"DOC(
Floor Activation Operator.

F
fengjiayi 已提交
244
$out = floor(x)$
D
dzhwinter 已提交
245 246 247 248 249 250 251

)DOC");
  }
};

class RoundOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
252 253
  RoundOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
254
    AddInput("X", "Input of Round operator");
F
fengjiayi 已提交
255
    AddOutput("Out", "Output of Round operator");
D
dzhwinter 已提交
256 257 258
    AddComment(R"DOC(
Round Activation Operator.

F
fengjiayi 已提交
259
$out = [x]$
D
dzhwinter 已提交
260 261 262 263 264

)DOC");
  }
};

265 266
class ReciprocalOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
267 268
  ReciprocalOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
269
    AddInput("X", "Input of Reciprocal operator");
F
fengjiayi 已提交
270
    AddOutput("Out", "Output of Reciprocal operator");
K
Kexin Zhao 已提交
271
    AddComment(R"DOC(
K
kexinzhao 已提交
272
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
273

F
fengjiayi 已提交
274
$$out = \frac{1}{x}$$
K
Kexin Zhao 已提交
275 276

)DOC");
277 278 279 280 281
  }
};

class LogOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
282 283
  LogOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
284
    AddInput("X", "Input of Log operator");
F
fengjiayi 已提交
285
    AddOutput("Out", "Output of Log operator");
K
Kexin Zhao 已提交
286
    AddComment(R"DOC(
K
kexinzhao 已提交
287
Log Activation Operator.
K
Kexin Zhao 已提交
288

F
fengjiayi 已提交
289
$out = \ln(x)$
K
Kexin Zhao 已提交
290 291 292 293

Natural logarithm of x.

)DOC");
294 295 296 297 298
  }
};

class SquareOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
299 300
  SquareOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
301
    AddInput("X", "Input of Square operator");
F
fengjiayi 已提交
302
    AddOutput("Out", "Output of Square operator");
K
Kexin Zhao 已提交
303
    AddComment(R"DOC(
K
kexinzhao 已提交
304
Square Activation Operator.
K
Kexin Zhao 已提交
305

F
fengjiayi 已提交
306
$out = x^2$
K
Kexin Zhao 已提交
307 308

)DOC");
309 310 311
  }
};

K
kexinzhao 已提交
312 313
class SoftplusOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
314 315
  SoftplusOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
kexinzhao 已提交
316
    AddInput("X", "Input of Softplus operator");
F
fengjiayi 已提交
317
    AddOutput("Out", "Output of Softplus operator");
K
Kexin Zhao 已提交
318
    AddComment(R"DOC(
K
kexinzhao 已提交
319
Softplus Activation Operator.
K
Kexin Zhao 已提交
320

F
fengjiayi 已提交
321
$out = \ln(1 + e^{x})$
K
Kexin Zhao 已提交
322 323

)DOC");
K
kexinzhao 已提交
324 325 326
  }
};

327 328
class SoftsignOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
329 330
  SoftsignOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
331
    AddInput("X", "Input of Softsign operator");
F
fengjiayi 已提交
332
    AddOutput("Out", "Output of Softsign operator");
K
Kexin Zhao 已提交
333
    AddComment(R"DOC(
K
kexinzhao 已提交
334
Softsign Activation Operator.
K
Kexin Zhao 已提交
335

F
fengjiayi 已提交
336
$$out = \frac{x}{1 + |x|}$$
K
Kexin Zhao 已提交
337 338

)DOC");
339 340 341
  }
};

342 343
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
344 345
  BReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
346
    AddInput("X", "Input of BRelu operator");
F
fengjiayi 已提交
347
    AddOutput("Out", "Output of BRelu operator");
348 349 350 351
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
352
    AddComment(R"DOC(
K
kexinzhao 已提交
353
BRelu Activation Operator.
K
Kexin Zhao 已提交
354

F
fengjiayi 已提交
355
$out = \max(\min(x, t_{min}), t_{max})$
K
Kexin Zhao 已提交
356 357

)DOC");
358 359 360 361 362
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
363 364
  SoftReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
365
    AddInput("X", "Input of SoftRelu operator");
F
fengjiayi 已提交
366
    AddOutput("Out", "Output of SoftRelu operator");
367 368
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
369
    AddComment(R"DOC(
K
kexinzhao 已提交
370
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
371

F
fengjiayi 已提交
372
$out = \ln(1 + \exp(\max(\min(x, threshold), threshold))$
K
Kexin Zhao 已提交
373 374

)DOC");
375 376 377
  }
};

378 379
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
380 381
  ELUOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kexin Zhao 已提交
382
    AddInput("X", "Input of ELU operator");
F
fengjiayi 已提交
383
    AddOutput("Out", "Output of ELU operator");
384
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
385
    AddComment(R"DOC(
K
kexinzhao 已提交
386
ELU Activation Operator.
K
Kexin Zhao 已提交
387 388 389 390

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

F
fengjiayi 已提交
391
$out = \max(0, x) + \min(0, \alpha * (e^x - 1))$
K
Kexin Zhao 已提交
392 393

)DOC");
394 395 396
  }
};

397 398
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
399 400
  Relu6OpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
401
    AddInput("X", "Input of Relu6 operator");
F
fengjiayi 已提交
402
    AddOutput("Out", "Output of Relu6 operator");
403 404
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
405
    AddComment(R"DOC(
K
kexinzhao 已提交
406
Relu6 Activation Operator.
K
Kexin Zhao 已提交
407

F
fengjiayi 已提交
408
$out = \min(\max(0, x), 6)$
K
Kexin Zhao 已提交
409 410

)DOC");
411 412 413
  }
};

414 415
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
416 417
  PowOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
418
    AddInput("X", "Input of Pow operator");
F
fengjiayi 已提交
419
    AddOutput("Out", "Output of Pow operator");
420
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
421
    AddComment(R"DOC(
K
kexinzhao 已提交
422
Pow Activation Operator.
K
Kexin Zhao 已提交
423

F
fengjiayi 已提交
424
$out = x^{factor}$
K
Kexin Zhao 已提交
425 426

)DOC");
427 428 429 430 431
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
432 433
  STanhOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
434
    AddInput("X", "Input of STanh operator");
F
fengjiayi 已提交
435
    AddOutput("Out", "Output of STanh operator");
436 437 438 439
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
440
    AddComment(R"DOC(
K
kexinzhao 已提交
441
STanh Activation Operator.
K
Kexin Zhao 已提交
442

F
fengjiayi 已提交
443
$$out = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
K
Kexin Zhao 已提交
444 445

)DOC");
Q
qijun 已提交
446 447 448
  }
};

449 450
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
451 452
  ThresholdedReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
453
    AddInput("X", "Input of ThresholdedRelu operator");
F
fengjiayi 已提交
454
    AddOutput("Out", "Output of ThresholdedRelu operator");
455 456
    AddAttr<float>("threshold", "The threshold location of activation")
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
457
    AddComment(R"DOC(
K
kexinzhao 已提交
458
ThresholdedRelu Activation Operator.
K
Kexin Zhao 已提交
459 460

$$
F
fengjiayi 已提交
461
out = \begin{cases} 
K
Kexin Zhao 已提交
462 463 464 465 466 467
    x, \text{if } x > threshold \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
468 469 470
  }
};

471 472
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
473 474
  HardSigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
475
    AddInput("X", "Input of HardSigmoid operator");
F
fengjiayi 已提交
476
    AddOutput("Out", "Output of HardSigmoid operator");
477 478 479 480
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
481
    AddComment(R"DOC(
K
kexinzhao 已提交
482
HardSigmoid Activation Operator.
483

K
Kexin Zhao 已提交
484 485
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
486

F
fengjiayi 已提交
487
$out = \max(0, \min(1, slope * x + shift))$
488 489

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
490
The default slope and shift are set according to the above reference.
491 492
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
493
)DOC");
494 495 496
  }
};

A
Abhinav Arora 已提交
497 498
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
499 500
  SwishOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
A
Abhinav Arora 已提交
501
    AddInput("X", "Input of Swish operator");
F
fengjiayi 已提交
502
    AddOutput("Out", "Output of Swish operator");
A
Abhinav Arora 已提交
503 504 505 506
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

F
fengjiayi 已提交
507
$$out = \frac{x}{1 + e^{- \beta x}}$$
A
Abhinav Arora 已提交
508 509 510 511 512

)DOC");
  }
};

Q
qijun 已提交
513 514 515 516
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
517

Q
qijun 已提交
518 519 520
REGISTER_OP(sigmoid, ops::ActivationOp, ops::SigmoidOpMaker, sigmoid_grad,
            ops::ActivationOpGrad);

521 522 523
REGISTER_OP(logsigmoid, ops::ActivationOp, ops::LogSigmoidOpMaker,
            logsigmoid_grad, ops::ActivationOpGrad);

Q
qijun 已提交
524 525
REGISTER_OP(exp, ops::ActivationOp, ops::ExpOpMaker, exp_grad,
            ops::ActivationOpGrad);
Q
qijun 已提交
526 527 528

REGISTER_OP(relu, ops::ActivationOp, ops::ReluOpMaker, relu_grad,
            ops::ActivationOpGrad);
529 530 531 532

REGISTER_OP(tanh, ops::ActivationOp, ops::TanhOpMaker, tanh_grad,
            ops::ActivationOpGrad);

K
Kavya Srinet 已提交
533 534
REGISTER_OP(tanh_shrink, ops::ActivationOp, ops::TanhShrinkOpMaker,
            tanh_shrink_grad, ops::ActivationOpGrad);
535

536
REGISTER_OP(softshrink, ops::ActivationOp, ops::SoftShrinkOpMaker,
537 538
            softshrink_grad, ops::ActivationOpGrad);

539 540 541 542 543 544
REGISTER_OP(sqrt, ops::ActivationOp, ops::SqrtOpMaker, sqrt_grad,
            ops::ActivationOpGrad);

REGISTER_OP(abs, ops::ActivationOp, ops::AbsOpMaker, abs_grad,
            ops::ActivationOpGrad);

D
dzhwinter 已提交
545 546 547 548 549 550 551 552 553
REGISTER_OP(ceil, ops::ActivationOp, ops::CeilOpMaker, ceil_grad,
            ops::ActivationOpGrad);

REGISTER_OP(floor, ops::ActivationOp, ops::FloorOpMaker, floor_grad,
            ops::ActivationOpGrad);

REGISTER_OP(round, ops::ActivationOp, ops::RoundOpMaker, round_grad,
            ops::ActivationOpGrad);

554 555 556 557 558 559 560 561 562
REGISTER_OP(reciprocal, ops::ActivationOp, ops::ReciprocalOpMaker,
            reciprocal_grad, ops::ActivationOpGrad);

REGISTER_OP(log, ops::ActivationOp, ops::LogOpMaker, log_grad,
            ops::ActivationOpGrad);

REGISTER_OP(square, ops::ActivationOp, ops::SquareOpMaker, square_grad,
            ops::ActivationOpGrad);

K
kexinzhao 已提交
563 564 565
REGISTER_OP(softplus, ops::ActivationOp, ops::SoftplusOpMaker, softplus_grad,
            ops::ActivationOpGrad);

566 567 568
REGISTER_OP(softsign, ops::ActivationOp, ops::SoftsignOpMaker, softsign_grad,
            ops::ActivationOpGrad);

569
REGISTER_OP(brelu, ops::ActivationOp, ops::BReluOpMaker, brelu_grad,
570 571
            ops::ActivationOpGrad);

572
REGISTER_OP(leaky_relu, ops::ActivationOp, ops::LeakyReluOpMaker,
K
Kavya Srinet 已提交
573
            leaky_relu_grad, ops::ActivationOpGrad);
574

575 576
REGISTER_OP(soft_relu, ops::ActivationOp, ops::SoftReluOpMaker, soft_relu_grad,
            ops::ActivationOpGrad);
577

578
REGISTER_OP(elu, ops::ActivationOp, ops::ELUOpMaker, elu_grad,
579 580
            ops::ActivationOpGrad);

581
REGISTER_OP(relu6, ops::ActivationOp, ops::Relu6OpMaker, relu6_grad,
582 583
            ops::ActivationOpGrad);

584
REGISTER_OP(pow, ops::ActivationOp, ops::PowOpMaker, pow_grad,
585 586
            ops::ActivationOpGrad);

587
REGISTER_OP(stanh, ops::ActivationOp, ops::STanhOpMaker, stanh_grad,
588
            ops::ActivationOpGrad);
589

590
REGISTER_OP(hard_shrink, ops::ActivationOp, ops::HardShrinkOpMaker,
591 592
            hard_shrink_grad, ops::ActivationOpGrad);

593 594
REGISTER_OP(thresholded_relu, ops::ActivationOp, ops::ThresholdedReluOpMaker,
            thresholded_relu_grad, ops::ActivationOpGrad);
595

596
REGISTER_OP(hard_sigmoid, ops::ActivationOp, ops::HardSigmoidOpMaker,
597 598
            hard_sigmoid_grad, ops::ActivationOpGrad);

A
Abhinav Arora 已提交
599 600 601
REGISTER_OP(swish, ops::ActivationOp, ops::SwishOpMaker, swish_grad,
            ops::ActivationOpGrad);

Q
QI JUN 已提交
602 603 604 605 606 607 608 609 610 611 612
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
613
                                ops::grad_functor<double>>);
614 615

FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);
K
Kexin Zhao 已提交
616 617 618 619 620 621 622 623 624 625 626

REGISTER_OP_CPU_KERNEL(relu,
                       ops::ActivationKernel<paddle::platform::CPUDeviceContext,
                                             ops::ReluFunctor<float>>,
                       ops::ActivationKernel<paddle::platform::CPUDeviceContext,
                                             ops::ReluFunctor<double>>);
REGISTER_OP_CPU_KERNEL(
    relu_grad, ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,
                                         ops::ReluGradFunctor<float>>,
    ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,
                              ops::ReluGradFunctor<double>>);