activation_op.cc 18.6 KB
Newer Older
Q
qijun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

#include "paddle/operators/activation_op.h"

namespace paddle {
namespace operators {

Q
qijun 已提交
20 21 22 23
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

24
  void InferShape(framework::InferShapeContext *ctx) const override {
Q
Qiao Longfei 已提交
25 26
    ctx->SetOutputDim("Y", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Y");
Q
qijun 已提交
27
  }
Q
qijun 已提交
28 29
};

Q
qijun 已提交
30 31 32 33
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

34
  void InferShape(framework::InferShapeContext *ctx) const override {
Q
Qiao Longfei 已提交
35
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Y"));
Q
qijun 已提交
36 37 38
  }
};

Q
qijun 已提交
39 40 41 42 43 44 45
class SigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SigmoidOpMaker(framework::OpProto *proto,
                 framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Sigmoid operator");
    AddOutput("Y", "Output of Sigmoid operator");
K
Kexin Zhao 已提交
46
    AddComment(R"DOC(
47
Sigmoid Activation Operator
K
Kexin Zhao 已提交
48

49
$$y = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
50 51

)DOC");
Q
qijun 已提交
52 53 54
  }
};

55 56 57 58 59 60 61
class LogSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  LogSigmoidOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of LogSigmoid operator");
    AddOutput("Y", "Output of LogSigmoid operator");
K
Kexin Zhao 已提交
62
    AddComment(R"DOC(
63
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
64

65
$$y = \log \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
66 67

)DOC");
68 69 70
  }
};

Q
qijun 已提交
71 72 73 74 75 76
class ExpOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ExpOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Exp operator");
    AddOutput("Y", "Output of Exp operator");
K
Kexin Zhao 已提交
77
    AddComment(R"DOC(
K
kexinzhao 已提交
78
Exp Activation Operator.
K
Kexin Zhao 已提交
79 80 81 82

$y = e^x$

)DOC");
Q
qijun 已提交
83 84 85 86 87 88 89 90 91
  }
};

class ReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ReluOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Relu operator");
    AddOutput("Y", "Output of Relu operator");
K
Kexin Zhao 已提交
92
    AddComment(R"DOC(
K
kexinzhao 已提交
93
Relu Activation Operator.
K
Kexin Zhao 已提交
94 95 96 97

$y = \max(x, 0)$

)DOC");
98 99 100
  }
};

K
Kavya Srinet 已提交
101 102 103 104 105 106 107
class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  LeakyReluOpMaker(framework::OpProto *proto,
                   framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Y", "Output of LeakyRelu operator");
108
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
109
    AddComment(R"DOC(
K
kexinzhao 已提交
110
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
111 112 113 114

$y = \max(x, \alpha * x)$

)DOC");
K
Kavya Srinet 已提交
115 116 117
  }
};

118 119 120 121 122 123 124
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SoftShrinkOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Y", "Output of Softshrink operator");
125
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
126
    AddComment(R"DOC(
K
kexinzhao 已提交
127
Softshrink Activation Operator.
K
Kexin Zhao 已提交
128 129 130 131 132 133 134 135 136 137

$$
y = \begin{cases} 
    x - \lambda, \text{if } x > \lambda \\
    x + \lambda, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
138 139 140
  }
};

141 142 143 144 145 146
class TanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  TanhOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Tanh operator");
    AddOutput("Y", "Output of Tanh operator");
K
Kexin Zhao 已提交
147
    AddComment(R"DOC(
K
kexinzhao 已提交
148
Tanh Activation Operator.
K
Kexin Zhao 已提交
149 150 151 152

$$y = \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$

)DOC");
153 154 155
  }
};

K
Kavya Srinet 已提交
156 157 158 159 160 161 162
class TanhShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  TanhShrinkOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of TanhShrink operator");
    AddOutput("Y", "Output of TanhShrink operator");
K
Kexin Zhao 已提交
163
    AddComment(R"DOC(
K
kexinzhao 已提交
164
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
165 166 167 168

$$y = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$

)DOC");
K
Kavya Srinet 已提交
169 170 171
  }
};

172 173 174 175 176 177 178
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  HardShrinkOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Y", "Output of HardShrink operator");
179 180
    AddAttr<float>("threshold", "The value of threshold for HardShrink")
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
181
    AddComment(R"DOC(
K
kexinzhao 已提交
182
HardShrink Activation Operator.
K
Kexin Zhao 已提交
183 184 185 186 187 188 189 190 191 192

$$
y = \begin{cases} 
    x, \text{if } x > \lambda \\
    x, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
193 194 195
  }
};

196 197 198 199 200 201
class SqrtOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SqrtOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Sqrt operator");
    AddOutput("Y", "Output of Sqrt operator");
K
Kexin Zhao 已提交
202
    AddComment(R"DOC(
K
kexinzhao 已提交
203
Sqrt Activation Operator.
K
Kexin Zhao 已提交
204 205 206 207

$y = \sqrt{x}$

)DOC");
208 209 210 211 212 213 214 215 216
  }
};

class AbsOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  AbsOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Abs operator");
    AddOutput("Y", "Output of Abs operator");
K
Kexin Zhao 已提交
217
    AddComment(R"DOC(
K
kexinzhao 已提交
218
Abs Activation Operator.
K
Kexin Zhao 已提交
219 220 221 222

$y = |x|$

)DOC");
223 224 225
  }
};

D
dzhwinter 已提交
226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270
class CeilOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  CeilOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Ceil operator");
    AddOutput("Y", "Output of Ceil operator");
    AddComment(R"DOC(
Ceil Activation Operator.

$y = ceil(x)$

)DOC");
  }
};

class FloorOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  FloorOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Floor operator");
    AddOutput("Y", "Output of Floor operator");
    AddComment(R"DOC(
Floor Activation Operator.

$y = floor(x)$

)DOC");
  }
};

class RoundOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  RoundOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Round operator");
    AddOutput("Y", "Output of Round operator");
    AddComment(R"DOC(
Round Activation Operator.

$y = [x]$

)DOC");
  }
};

271 272 273 274 275 276 277
class ReciprocalOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ReciprocalOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Reciprocal operator");
    AddOutput("Y", "Output of Reciprocal operator");
K
Kexin Zhao 已提交
278
    AddComment(R"DOC(
K
kexinzhao 已提交
279
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
280 281 282 283

$$y = \frac{1}{x}$$

)DOC");
284 285 286 287 288 289 290 291 292
  }
};

class LogOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  LogOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Log operator");
    AddOutput("Y", "Output of Log operator");
K
Kexin Zhao 已提交
293
    AddComment(R"DOC(
K
kexinzhao 已提交
294
Log Activation Operator.
K
Kexin Zhao 已提交
295 296 297 298 299 300

$y = \ln(x)$

Natural logarithm of x.

)DOC");
301 302 303 304 305 306 307 308 309
  }
};

class SquareOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SquareOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Square operator");
    AddOutput("Y", "Output of Square operator");
K
Kexin Zhao 已提交
310
    AddComment(R"DOC(
K
kexinzhao 已提交
311
Square Activation Operator.
K
Kexin Zhao 已提交
312 313 314 315

$y = x^2$

)DOC");
316 317 318
  }
};

K
kexinzhao 已提交
319 320 321 322 323 324 325
class SoftplusOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SoftplusOpMaker(framework::OpProto *proto,
                  framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Softplus operator");
    AddOutput("Y", "Output of Softplus operator");
K
Kexin Zhao 已提交
326
    AddComment(R"DOC(
K
kexinzhao 已提交
327
Softplus Activation Operator.
K
Kexin Zhao 已提交
328 329 330 331

$y = \ln(1 + e^{x})$

)DOC");
K
kexinzhao 已提交
332 333 334
  }
};

335 336 337 338 339 340 341
class SoftsignOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SoftsignOpMaker(framework::OpProto *proto,
                  framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Softsign operator");
    AddOutput("Y", "Output of Softsign operator");
K
Kexin Zhao 已提交
342
    AddComment(R"DOC(
K
kexinzhao 已提交
343
Softsign Activation Operator.
K
Kexin Zhao 已提交
344 345 346 347

$$y = \frac{x}{1 + |x|}$$

)DOC");
348 349 350
  }
};

351 352 353 354 355 356
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  BReluOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of BRelu operator");
    AddOutput("Y", "Output of BRelu operator");
357 358 359 360
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
361
    AddComment(R"DOC(
K
kexinzhao 已提交
362
BRelu Activation Operator.
K
Kexin Zhao 已提交
363 364 365 366

$y = \max(\min(x, t_{min}), t_{max})$

)DOC");
367 368 369 370 371 372 373 374 375 376
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SoftReluOpMaker(framework::OpProto *proto,
                  framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of SoftRelu operator");
    AddOutput("Y", "Output of SoftRelu operator");
377 378
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
379
    AddComment(R"DOC(
K
kexinzhao 已提交
380
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
381 382 383 384

$y = \ln(1 + \exp(\max(\min(x, threshold), threshold))$

)DOC");
385 386 387
  }
};

388 389 390 391
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ELUOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
K
Kexin Zhao 已提交
392 393
    AddInput("X", "Input of ELU operator");
    AddOutput("Y", "Output of ELU operator");
394
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
395
    AddComment(R"DOC(
K
kexinzhao 已提交
396
ELU Activation Operator.
K
Kexin Zhao 已提交
397 398 399 400 401 402 403

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

$y = \max(0, x) + \min(0, \alpha * (e^x - 1))$

)DOC");
404 405 406
  }
};

407 408 409 410 411 412
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  Relu6OpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Relu6 operator");
    AddOutput("Y", "Output of Relu6 operator");
413 414
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
415
    AddComment(R"DOC(
K
kexinzhao 已提交
416
Relu6 Activation Operator.
K
Kexin Zhao 已提交
417 418 419 420

$y = \min(\max(0, x), 6)$

)DOC");
421 422 423
  }
};

424 425 426 427 428 429
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  PowOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Pow operator");
    AddOutput("Y", "Output of Pow operator");
430
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
431
    AddComment(R"DOC(
K
kexinzhao 已提交
432
Pow Activation Operator.
K
Kexin Zhao 已提交
433 434 435 436

$y = x^{factor}$

)DOC");
437 438 439 440 441 442 443 444 445
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  STanhOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of STanh operator");
    AddOutput("Y", "Output of STanh operator");
446 447 448 449
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
450
    AddComment(R"DOC(
K
kexinzhao 已提交
451
STanh Activation Operator.
K
Kexin Zhao 已提交
452 453 454 455

$$y = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$

)DOC");
Q
qijun 已提交
456 457 458
  }
};

459 460 461 462 463 464 465
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ThresholdedReluOpMaker(framework::OpProto *proto,
                         framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of ThresholdedRelu operator");
    AddOutput("Y", "Output of ThresholdedRelu operator");
466 467
    AddAttr<float>("threshold", "The threshold location of activation")
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
468
    AddComment(R"DOC(
K
kexinzhao 已提交
469
ThresholdedRelu Activation Operator.
K
Kexin Zhao 已提交
470 471 472 473 474 475 476 477 478

$$
y = \begin{cases} 
    x, \text{if } x > threshold \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
479 480 481
  }
};

482 483 484 485 486 487 488
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  HardSigmoidOpMaker(framework::OpProto *proto,
                     framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of HardSigmoid operator");
    AddOutput("Y", "Output of HardSigmoid operator");
489 490 491 492
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
493
    AddComment(R"DOC(
K
kexinzhao 已提交
494
HardSigmoid Activation Operator.
495

K
Kexin Zhao 已提交
496 497
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
498

K
Kexin Zhao 已提交
499
$y = \max(0, \min(1, slope * x + shift))$
500 501

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
502
The default slope and shift are set according to the above reference.
503 504
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
505
)DOC");
506 507 508
  }
};

A
Abhinav Arora 已提交
509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SwishOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Swish operator");
    AddOutput("Y", "Output of Swish operator");
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

$$y = \frac{x}{1 + e^{- \beta x}}$$

)DOC");
  }
};

Q
qijun 已提交
525 526 527 528
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
529

Q
qijun 已提交
530 531 532
REGISTER_OP(sigmoid, ops::ActivationOp, ops::SigmoidOpMaker, sigmoid_grad,
            ops::ActivationOpGrad);

533 534 535
REGISTER_OP(logsigmoid, ops::ActivationOp, ops::LogSigmoidOpMaker,
            logsigmoid_grad, ops::ActivationOpGrad);

Q
qijun 已提交
536 537
REGISTER_OP(exp, ops::ActivationOp, ops::ExpOpMaker, exp_grad,
            ops::ActivationOpGrad);
Q
qijun 已提交
538 539 540

REGISTER_OP(relu, ops::ActivationOp, ops::ReluOpMaker, relu_grad,
            ops::ActivationOpGrad);
541 542 543 544

REGISTER_OP(tanh, ops::ActivationOp, ops::TanhOpMaker, tanh_grad,
            ops::ActivationOpGrad);

K
Kavya Srinet 已提交
545 546
REGISTER_OP(tanh_shrink, ops::ActivationOp, ops::TanhShrinkOpMaker,
            tanh_shrink_grad, ops::ActivationOpGrad);
547

548
REGISTER_OP(softshrink, ops::ActivationOp, ops::SoftShrinkOpMaker,
549 550
            softshrink_grad, ops::ActivationOpGrad);

551 552 553 554 555 556
REGISTER_OP(sqrt, ops::ActivationOp, ops::SqrtOpMaker, sqrt_grad,
            ops::ActivationOpGrad);

REGISTER_OP(abs, ops::ActivationOp, ops::AbsOpMaker, abs_grad,
            ops::ActivationOpGrad);

D
dzhwinter 已提交
557 558 559 560 561 562 563 564 565
REGISTER_OP(ceil, ops::ActivationOp, ops::CeilOpMaker, ceil_grad,
            ops::ActivationOpGrad);

REGISTER_OP(floor, ops::ActivationOp, ops::FloorOpMaker, floor_grad,
            ops::ActivationOpGrad);

REGISTER_OP(round, ops::ActivationOp, ops::RoundOpMaker, round_grad,
            ops::ActivationOpGrad);

566 567 568 569 570 571 572 573 574
REGISTER_OP(reciprocal, ops::ActivationOp, ops::ReciprocalOpMaker,
            reciprocal_grad, ops::ActivationOpGrad);

REGISTER_OP(log, ops::ActivationOp, ops::LogOpMaker, log_grad,
            ops::ActivationOpGrad);

REGISTER_OP(square, ops::ActivationOp, ops::SquareOpMaker, square_grad,
            ops::ActivationOpGrad);

K
kexinzhao 已提交
575 576 577
REGISTER_OP(softplus, ops::ActivationOp, ops::SoftplusOpMaker, softplus_grad,
            ops::ActivationOpGrad);

578 579 580
REGISTER_OP(softsign, ops::ActivationOp, ops::SoftsignOpMaker, softsign_grad,
            ops::ActivationOpGrad);

581
REGISTER_OP(brelu, ops::ActivationOp, ops::BReluOpMaker, brelu_grad,
582 583
            ops::ActivationOpGrad);

584
REGISTER_OP(leaky_relu, ops::ActivationOp, ops::LeakyReluOpMaker,
K
Kavya Srinet 已提交
585
            leaky_relu_grad, ops::ActivationOpGrad);
586

587 588
REGISTER_OP(soft_relu, ops::ActivationOp, ops::SoftReluOpMaker, soft_relu_grad,
            ops::ActivationOpGrad);
589

590
REGISTER_OP(elu, ops::ActivationOp, ops::ELUOpMaker, elu_grad,
591 592
            ops::ActivationOpGrad);

593
REGISTER_OP(relu6, ops::ActivationOp, ops::Relu6OpMaker, relu6_grad,
594 595
            ops::ActivationOpGrad);

596
REGISTER_OP(pow, ops::ActivationOp, ops::PowOpMaker, pow_grad,
597 598
            ops::ActivationOpGrad);

599
REGISTER_OP(stanh, ops::ActivationOp, ops::STanhOpMaker, stanh_grad,
600
            ops::ActivationOpGrad);
601

602
REGISTER_OP(hard_shrink, ops::ActivationOp, ops::HardShrinkOpMaker,
603 604
            hard_shrink_grad, ops::ActivationOpGrad);

605 606
REGISTER_OP(thresholded_relu, ops::ActivationOp, ops::ThresholdedReluOpMaker,
            thresholded_relu_grad, ops::ActivationOpGrad);
607

608
REGISTER_OP(hard_sigmoid, ops::ActivationOp, ops::HardSigmoidOpMaker,
609 610
            hard_sigmoid_grad, ops::ActivationOpGrad);

A
Abhinav Arora 已提交
611 612 613
REGISTER_OP(swish, ops::ActivationOp, ops::SwishOpMaker, swish_grad,
            ops::ActivationOpGrad);

Y
Yu Yang 已提交
614 615 616 617 618 619 620 621 622 623 624
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)       \
  REGISTER_OP_CPU_KERNEL(                                                     \
      act_type,                                                               \
      ops::ActivationKernel<paddle::platform::CPUPlace, ops::functor<float>>, \
      ops::ActivationKernel<paddle::platform::CPUPlace,                       \
                            ops::functor<double>>);                           \
  REGISTER_OP_CPU_KERNEL(                                                     \
      act_type##_grad, ops::ActivationGradKernel<paddle::platform::CPUPlace,  \
                                                 ops::grad_functor<float>>,   \
      ops::ActivationGradKernel<paddle::platform::CPUPlace,                   \
                                ops::grad_functor<double>>);
625 626

FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);