activation_op.cc 18.1 KB
Newer Older
Q
qijun 已提交
1 2
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14 15 16 17 18 19

#include "paddle/operators/activation_op.h"

namespace paddle {
namespace operators {

Q
qijun 已提交
20 21 22 23
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

24
  void InferShape(framework::InferShapeContext *ctx) const override {
Q
Qiao Longfei 已提交
25 26
    ctx->SetOutputDim("Y", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Y");
Q
qijun 已提交
27
  }
Q
qijun 已提交
28 29
};

Q
qijun 已提交
30 31 32 33
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

34
  void InferShape(framework::InferShapeContext *ctx) const override {
Q
Qiao Longfei 已提交
35
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Y"));
Q
qijun 已提交
36 37 38
  }
};

Q
qijun 已提交
39 40
class SigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
41 42
  SigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
Q
qijun 已提交
43 44
    AddInput("X", "Input of Sigmoid operator");
    AddOutput("Y", "Output of Sigmoid operator");
K
Kexin Zhao 已提交
45
    AddComment(R"DOC(
46
Sigmoid Activation Operator
K
Kexin Zhao 已提交
47

48
$$y = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
49 50

)DOC");
Q
qijun 已提交
51 52 53
  }
};

54 55
class LogSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
56 57
  LogSigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
58 59
    AddInput("X", "Input of LogSigmoid operator");
    AddOutput("Y", "Output of LogSigmoid operator");
K
Kexin Zhao 已提交
60
    AddComment(R"DOC(
61
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
62

63
$$y = \log \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
64 65

)DOC");
66 67 68
  }
};

Q
qijun 已提交
69 70
class ExpOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
71 72
  ExpOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
Q
qijun 已提交
73 74
    AddInput("X", "Input of Exp operator");
    AddOutput("Y", "Output of Exp operator");
K
Kexin Zhao 已提交
75
    AddComment(R"DOC(
K
kexinzhao 已提交
76
Exp Activation Operator.
K
Kexin Zhao 已提交
77 78 79 80

$y = e^x$

)DOC");
Q
qijun 已提交
81 82 83 84 85
  }
};

class ReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
86 87
  ReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
Q
qijun 已提交
88 89
    AddInput("X", "Input of Relu operator");
    AddOutput("Y", "Output of Relu operator");
K
Kexin Zhao 已提交
90
    AddComment(R"DOC(
K
kexinzhao 已提交
91
Relu Activation Operator.
K
Kexin Zhao 已提交
92 93 94 95

$y = \max(x, 0)$

)DOC");
96 97 98
  }
};

K
Kavya Srinet 已提交
99 100
class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
101 102
  LeakyReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kavya Srinet 已提交
103 104
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Y", "Output of LeakyRelu operator");
105
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
106
    AddComment(R"DOC(
K
kexinzhao 已提交
107
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
108 109 110 111

$y = \max(x, \alpha * x)$

)DOC");
K
Kavya Srinet 已提交
112 113 114
  }
};

115 116
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
117 118
  SoftShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
119 120
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Y", "Output of Softshrink operator");
121
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
122
    AddComment(R"DOC(
K
kexinzhao 已提交
123
Softshrink Activation Operator.
K
Kexin Zhao 已提交
124 125 126 127 128 129 130 131 132 133

$$
y = \begin{cases} 
    x - \lambda, \text{if } x > \lambda \\
    x + \lambda, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
134 135 136
  }
};

137 138
class TanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
139 140
  TanhOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
141 142
    AddInput("X", "Input of Tanh operator");
    AddOutput("Y", "Output of Tanh operator");
K
Kexin Zhao 已提交
143
    AddComment(R"DOC(
K
kexinzhao 已提交
144
Tanh Activation Operator.
K
Kexin Zhao 已提交
145 146 147 148

$$y = \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$

)DOC");
149 150 151
  }
};

K
Kavya Srinet 已提交
152 153
class TanhShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
154 155
  TanhShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kavya Srinet 已提交
156 157
    AddInput("X", "Input of TanhShrink operator");
    AddOutput("Y", "Output of TanhShrink operator");
K
Kexin Zhao 已提交
158
    AddComment(R"DOC(
K
kexinzhao 已提交
159
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
160 161 162 163

$$y = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$

)DOC");
K
Kavya Srinet 已提交
164 165 166
  }
};

167 168
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
169 170
  HardShrinkOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
171 172
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Y", "Output of HardShrink operator");
173 174
    AddAttr<float>("threshold", "The value of threshold for HardShrink")
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
175
    AddComment(R"DOC(
K
kexinzhao 已提交
176
HardShrink Activation Operator.
K
Kexin Zhao 已提交
177 178 179 180 181 182 183 184 185 186

$$
y = \begin{cases} 
    x, \text{if } x > \lambda \\
    x, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
187 188 189
  }
};

190 191
class SqrtOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
192 193
  SqrtOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
194 195
    AddInput("X", "Input of Sqrt operator");
    AddOutput("Y", "Output of Sqrt operator");
K
Kexin Zhao 已提交
196
    AddComment(R"DOC(
K
kexinzhao 已提交
197
Sqrt Activation Operator.
K
Kexin Zhao 已提交
198 199 200 201

$y = \sqrt{x}$

)DOC");
202 203 204 205 206
  }
};

class AbsOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
207 208
  AbsOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
209 210
    AddInput("X", "Input of Abs operator");
    AddOutput("Y", "Output of Abs operator");
K
Kexin Zhao 已提交
211
    AddComment(R"DOC(
K
kexinzhao 已提交
212
Abs Activation Operator.
K
Kexin Zhao 已提交
213 214 215 216

$y = |x|$

)DOC");
217 218 219
  }
};

D
dzhwinter 已提交
220 221
class CeilOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
222 223
  CeilOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
224 225 226 227 228 229 230 231 232 233 234 235 236
    AddInput("X", "Input of Ceil operator");
    AddOutput("Y", "Output of Ceil operator");
    AddComment(R"DOC(
Ceil Activation Operator.

$y = ceil(x)$

)DOC");
  }
};

class FloorOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
237 238
  FloorOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
239 240 241 242 243 244 245 246 247 248 249 250 251
    AddInput("X", "Input of Floor operator");
    AddOutput("Y", "Output of Floor operator");
    AddComment(R"DOC(
Floor Activation Operator.

$y = floor(x)$

)DOC");
  }
};

class RoundOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
252 253
  RoundOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
254 255 256 257 258 259 260 261 262 263 264
    AddInput("X", "Input of Round operator");
    AddOutput("Y", "Output of Round operator");
    AddComment(R"DOC(
Round Activation Operator.

$y = [x]$

)DOC");
  }
};

265 266
class ReciprocalOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
267 268
  ReciprocalOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
269 270
    AddInput("X", "Input of Reciprocal operator");
    AddOutput("Y", "Output of Reciprocal operator");
K
Kexin Zhao 已提交
271
    AddComment(R"DOC(
K
kexinzhao 已提交
272
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
273 274 275 276

$$y = \frac{1}{x}$$

)DOC");
277 278 279 280 281
  }
};

class LogOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
282 283
  LogOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
284 285
    AddInput("X", "Input of Log operator");
    AddOutput("Y", "Output of Log operator");
K
Kexin Zhao 已提交
286
    AddComment(R"DOC(
K
kexinzhao 已提交
287
Log Activation Operator.
K
Kexin Zhao 已提交
288 289 290 291 292 293

$y = \ln(x)$

Natural logarithm of x.

)DOC");
294 295 296 297 298
  }
};

class SquareOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
299 300
  SquareOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
301 302
    AddInput("X", "Input of Square operator");
    AddOutput("Y", "Output of Square operator");
K
Kexin Zhao 已提交
303
    AddComment(R"DOC(
K
kexinzhao 已提交
304
Square Activation Operator.
K
Kexin Zhao 已提交
305 306 307 308

$y = x^2$

)DOC");
309 310 311
  }
};

K
kexinzhao 已提交
312 313
class SoftplusOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
314 315
  SoftplusOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
kexinzhao 已提交
316 317
    AddInput("X", "Input of Softplus operator");
    AddOutput("Y", "Output of Softplus operator");
K
Kexin Zhao 已提交
318
    AddComment(R"DOC(
K
kexinzhao 已提交
319
Softplus Activation Operator.
K
Kexin Zhao 已提交
320 321 322 323

$y = \ln(1 + e^{x})$

)DOC");
K
kexinzhao 已提交
324 325 326
  }
};

327 328
class SoftsignOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
329 330
  SoftsignOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
331 332
    AddInput("X", "Input of Softsign operator");
    AddOutput("Y", "Output of Softsign operator");
K
Kexin Zhao 已提交
333
    AddComment(R"DOC(
K
kexinzhao 已提交
334
Softsign Activation Operator.
K
Kexin Zhao 已提交
335 336 337 338

$$y = \frac{x}{1 + |x|}$$

)DOC");
339 340 341
  }
};

342 343
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
344 345
  BReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
346 347
    AddInput("X", "Input of BRelu operator");
    AddOutput("Y", "Output of BRelu operator");
348 349 350 351
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
352
    AddComment(R"DOC(
K
kexinzhao 已提交
353
BRelu Activation Operator.
K
Kexin Zhao 已提交
354 355 356 357

$y = \max(\min(x, t_{min}), t_{max})$

)DOC");
358 359 360 361 362
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
363 364
  SoftReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
365 366
    AddInput("X", "Input of SoftRelu operator");
    AddOutput("Y", "Output of SoftRelu operator");
367 368
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
369
    AddComment(R"DOC(
K
kexinzhao 已提交
370
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
371 372 373 374

$y = \ln(1 + \exp(\max(\min(x, threshold), threshold))$

)DOC");
375 376 377
  }
};

378 379
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
380 381
  ELUOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
K
Kexin Zhao 已提交
382 383
    AddInput("X", "Input of ELU operator");
    AddOutput("Y", "Output of ELU operator");
384
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
385
    AddComment(R"DOC(
K
kexinzhao 已提交
386
ELU Activation Operator.
K
Kexin Zhao 已提交
387 388 389 390 391 392 393

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

$y = \max(0, x) + \min(0, \alpha * (e^x - 1))$

)DOC");
394 395 396
  }
};

397 398
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
399 400
  Relu6OpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
401 402
    AddInput("X", "Input of Relu6 operator");
    AddOutput("Y", "Output of Relu6 operator");
403 404
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
405
    AddComment(R"DOC(
K
kexinzhao 已提交
406
Relu6 Activation Operator.
K
Kexin Zhao 已提交
407 408 409 410

$y = \min(\max(0, x), 6)$

)DOC");
411 412 413
  }
};

414 415
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
416 417
  PowOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
418 419
    AddInput("X", "Input of Pow operator");
    AddOutput("Y", "Output of Pow operator");
420
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
421
    AddComment(R"DOC(
K
kexinzhao 已提交
422
Pow Activation Operator.
K
Kexin Zhao 已提交
423 424 425 426

$y = x^{factor}$

)DOC");
427 428 429 430 431
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
432 433
  STanhOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
434 435
    AddInput("X", "Input of STanh operator");
    AddOutput("Y", "Output of STanh operator");
436 437 438 439
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
440
    AddComment(R"DOC(
K
kexinzhao 已提交
441
STanh Activation Operator.
K
Kexin Zhao 已提交
442 443 444 445

$$y = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$

)DOC");
Q
qijun 已提交
446 447 448
  }
};

449 450
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
451 452
  ThresholdedReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
453 454
    AddInput("X", "Input of ThresholdedRelu operator");
    AddOutput("Y", "Output of ThresholdedRelu operator");
455 456
    AddAttr<float>("threshold", "The threshold location of activation")
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
457
    AddComment(R"DOC(
K
kexinzhao 已提交
458
ThresholdedRelu Activation Operator.
K
Kexin Zhao 已提交
459 460 461 462 463 464 465 466 467

$$
y = \begin{cases} 
    x, \text{if } x > threshold \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
468 469 470
  }
};

471 472
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
473 474
  HardSigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
475 476
    AddInput("X", "Input of HardSigmoid operator");
    AddOutput("Y", "Output of HardSigmoid operator");
477 478 479 480
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
481
    AddComment(R"DOC(
K
kexinzhao 已提交
482
HardSigmoid Activation Operator.
483

K
Kexin Zhao 已提交
484 485
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
486

K
Kexin Zhao 已提交
487
$y = \max(0, \min(1, slope * x + shift))$
488 489

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
490
The default slope and shift are set according to the above reference.
491 492
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
493
)DOC");
494 495 496
  }
};

A
Abhinav Arora 已提交
497 498
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
499 500
  SwishOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : framework::OpProtoAndCheckerMaker(proto, op_checker) {
A
Abhinav Arora 已提交
501 502 503 504 505 506 507 508 509 510 511 512
    AddInput("X", "Input of Swish operator");
    AddOutput("Y", "Output of Swish operator");
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

$$y = \frac{x}{1 + e^{- \beta x}}$$

)DOC");
  }
};

Q
qijun 已提交
513 514 515 516
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
517

Q
qijun 已提交
518 519 520
REGISTER_OP(sigmoid, ops::ActivationOp, ops::SigmoidOpMaker, sigmoid_grad,
            ops::ActivationOpGrad);

521 522 523
REGISTER_OP(logsigmoid, ops::ActivationOp, ops::LogSigmoidOpMaker,
            logsigmoid_grad, ops::ActivationOpGrad);

Q
qijun 已提交
524 525
REGISTER_OP(exp, ops::ActivationOp, ops::ExpOpMaker, exp_grad,
            ops::ActivationOpGrad);
Q
qijun 已提交
526 527 528

REGISTER_OP(relu, ops::ActivationOp, ops::ReluOpMaker, relu_grad,
            ops::ActivationOpGrad);
529 530 531 532

REGISTER_OP(tanh, ops::ActivationOp, ops::TanhOpMaker, tanh_grad,
            ops::ActivationOpGrad);

K
Kavya Srinet 已提交
533 534
REGISTER_OP(tanh_shrink, ops::ActivationOp, ops::TanhShrinkOpMaker,
            tanh_shrink_grad, ops::ActivationOpGrad);
535

536
REGISTER_OP(softshrink, ops::ActivationOp, ops::SoftShrinkOpMaker,
537 538
            softshrink_grad, ops::ActivationOpGrad);

539 540 541 542 543 544
REGISTER_OP(sqrt, ops::ActivationOp, ops::SqrtOpMaker, sqrt_grad,
            ops::ActivationOpGrad);

REGISTER_OP(abs, ops::ActivationOp, ops::AbsOpMaker, abs_grad,
            ops::ActivationOpGrad);

D
dzhwinter 已提交
545 546 547 548 549 550 551 552 553
REGISTER_OP(ceil, ops::ActivationOp, ops::CeilOpMaker, ceil_grad,
            ops::ActivationOpGrad);

REGISTER_OP(floor, ops::ActivationOp, ops::FloorOpMaker, floor_grad,
            ops::ActivationOpGrad);

REGISTER_OP(round, ops::ActivationOp, ops::RoundOpMaker, round_grad,
            ops::ActivationOpGrad);

554 555 556 557 558 559 560 561 562
REGISTER_OP(reciprocal, ops::ActivationOp, ops::ReciprocalOpMaker,
            reciprocal_grad, ops::ActivationOpGrad);

REGISTER_OP(log, ops::ActivationOp, ops::LogOpMaker, log_grad,
            ops::ActivationOpGrad);

REGISTER_OP(square, ops::ActivationOp, ops::SquareOpMaker, square_grad,
            ops::ActivationOpGrad);

K
kexinzhao 已提交
563 564 565
REGISTER_OP(softplus, ops::ActivationOp, ops::SoftplusOpMaker, softplus_grad,
            ops::ActivationOpGrad);

566 567 568
REGISTER_OP(softsign, ops::ActivationOp, ops::SoftsignOpMaker, softsign_grad,
            ops::ActivationOpGrad);

569
REGISTER_OP(brelu, ops::ActivationOp, ops::BReluOpMaker, brelu_grad,
570 571
            ops::ActivationOpGrad);

572
REGISTER_OP(leaky_relu, ops::ActivationOp, ops::LeakyReluOpMaker,
K
Kavya Srinet 已提交
573
            leaky_relu_grad, ops::ActivationOpGrad);
574

575 576
REGISTER_OP(soft_relu, ops::ActivationOp, ops::SoftReluOpMaker, soft_relu_grad,
            ops::ActivationOpGrad);
577

578
REGISTER_OP(elu, ops::ActivationOp, ops::ELUOpMaker, elu_grad,
579 580
            ops::ActivationOpGrad);

581
REGISTER_OP(relu6, ops::ActivationOp, ops::Relu6OpMaker, relu6_grad,
582 583
            ops::ActivationOpGrad);

584
REGISTER_OP(pow, ops::ActivationOp, ops::PowOpMaker, pow_grad,
585 586
            ops::ActivationOpGrad);

587
REGISTER_OP(stanh, ops::ActivationOp, ops::STanhOpMaker, stanh_grad,
588
            ops::ActivationOpGrad);
589

590
REGISTER_OP(hard_shrink, ops::ActivationOp, ops::HardShrinkOpMaker,
591 592
            hard_shrink_grad, ops::ActivationOpGrad);

593 594
REGISTER_OP(thresholded_relu, ops::ActivationOp, ops::ThresholdedReluOpMaker,
            thresholded_relu_grad, ops::ActivationOpGrad);
595

596
REGISTER_OP(hard_sigmoid, ops::ActivationOp, ops::HardSigmoidOpMaker,
597 598
            hard_sigmoid_grad, ops::ActivationOpGrad);

A
Abhinav Arora 已提交
599 600 601
REGISTER_OP(swish, ops::ActivationOp, ops::SwishOpMaker, swish_grad,
            ops::ActivationOpGrad);

Q
QI JUN 已提交
602 603 604 605 606 607 608 609 610 611 612
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
613
                                ops::grad_functor<double>>);
614 615

FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);