activation_op.cc 16.6 KB
Newer Older
Q
qijun 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

#include "paddle/operators/activation_op.h"

namespace paddle {
namespace operators {

Q
qijun 已提交
20 21 22 23
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

24
  void InferShape(framework::InferShapeContext *ctx) const override {
Q
Qiao Longfei 已提交
25 26
    ctx->SetOutputDim("Y", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Y");
Q
qijun 已提交
27
  }
Q
qijun 已提交
28 29
};

Q
qijun 已提交
30 31 32 33
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

34
  void InferShape(framework::InferShapeContext *ctx) const override {
Q
Qiao Longfei 已提交
35
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Y"));
Q
qijun 已提交
36 37 38
  }
};

Q
qijun 已提交
39 40 41 42 43 44 45
class SigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SigmoidOpMaker(framework::OpProto *proto,
                 framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Sigmoid operator");
    AddOutput("Y", "Output of Sigmoid operator");
K
Kexin Zhao 已提交
46
    AddComment(R"DOC(
K
kexinzhao 已提交
47
Sigmoid Activation Operator.
K
Kexin Zhao 已提交
48 49 50 51

$y = 1 / (1 + e^{-x})$

)DOC");
Q
qijun 已提交
52 53 54
  }
};

55 56 57 58 59 60 61
class LogSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  LogSigmoidOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of LogSigmoid operator");
    AddOutput("Y", "Output of LogSigmoid operator");
K
Kexin Zhao 已提交
62
    AddComment(R"DOC(
K
kexinzhao 已提交
63
Logsigmoid Activation Operator.
K
Kexin Zhao 已提交
64 65 66 67

$y = \log(1 / (1 + e^{-x}))$

)DOC");
68 69 70
  }
};

Q
qijun 已提交
71 72 73 74 75 76
class ExpOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ExpOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Exp operator");
    AddOutput("Y", "Output of Exp operator");
K
Kexin Zhao 已提交
77
    AddComment(R"DOC(
K
kexinzhao 已提交
78
Exp Activation Operator.
K
Kexin Zhao 已提交
79 80 81 82

$y = e^x$

)DOC");
Q
qijun 已提交
83 84 85 86 87 88 89 90 91
  }
};

class ReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ReluOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Relu operator");
    AddOutput("Y", "Output of Relu operator");
K
Kexin Zhao 已提交
92
    AddComment(R"DOC(
K
kexinzhao 已提交
93
Relu Activation Operator.
K
Kexin Zhao 已提交
94 95 96 97

$y = \max(x, 0)$

)DOC");
98 99 100
  }
};

K
Kavya Srinet 已提交
101 102 103 104 105 106 107
class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  LeakyReluOpMaker(framework::OpProto *proto,
                   framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Y", "Output of LeakyRelu operator");
108
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
109
    AddComment(R"DOC(
K
kexinzhao 已提交
110
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
111 112 113 114

$y = \max(x, \alpha * x)$

)DOC");
K
Kavya Srinet 已提交
115 116 117
  }
};

118 119 120 121 122 123 124
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SoftShrinkOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Y", "Output of Softshrink operator");
125
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
126
    AddComment(R"DOC(
K
kexinzhao 已提交
127
Softshrink Activation Operator.
K
Kexin Zhao 已提交
128 129 130 131 132 133 134 135 136 137

$$
y = \begin{cases} 
    x - \lambda, \text{if } x > \lambda \\
    x + \lambda, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
138 139 140
  }
};

141 142 143 144 145 146
class TanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  TanhOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Tanh operator");
    AddOutput("Y", "Output of Tanh operator");
K
Kexin Zhao 已提交
147
    AddComment(R"DOC(
K
kexinzhao 已提交
148
Tanh Activation Operator.
K
Kexin Zhao 已提交
149 150 151 152

$$y = \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$

)DOC");
153 154 155
  }
};

K
Kavya Srinet 已提交
156 157 158 159 160 161 162
class TanhShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  TanhShrinkOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of TanhShrink operator");
    AddOutput("Y", "Output of TanhShrink operator");
K
Kexin Zhao 已提交
163
    AddComment(R"DOC(
K
kexinzhao 已提交
164
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
165 166 167 168

$$y = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$

)DOC");
K
Kavya Srinet 已提交
169 170 171
  }
};

172 173 174 175 176 177 178
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  HardShrinkOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Y", "Output of HardShrink operator");
179 180
    AddAttr<float>("threshold", "The value of threshold for HardShrink")
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
181
    AddComment(R"DOC(
K
kexinzhao 已提交
182
HardShrink Activation Operator.
K
Kexin Zhao 已提交
183 184 185 186 187 188 189 190 191 192

$$
y = \begin{cases} 
    x, \text{if } x > \lambda \\
    x, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
193 194 195
  }
};

196 197 198 199 200 201
class SqrtOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SqrtOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Sqrt operator");
    AddOutput("Y", "Output of Sqrt operator");
K
Kexin Zhao 已提交
202
    AddComment(R"DOC(
K
kexinzhao 已提交
203
Sqrt Activation Operator.
K
Kexin Zhao 已提交
204 205 206 207

$y = \sqrt{x}$

)DOC");
208 209 210 211 212 213 214 215 216
  }
};

class AbsOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  AbsOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Abs operator");
    AddOutput("Y", "Output of Abs operator");
K
Kexin Zhao 已提交
217
    AddComment(R"DOC(
K
kexinzhao 已提交
218
Abs Activation Operator.
K
Kexin Zhao 已提交
219 220 221 222

$y = |x|$

)DOC");
223 224 225 226 227 228 229 230 231 232
  }
};

class ReciprocalOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ReciprocalOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Reciprocal operator");
    AddOutput("Y", "Output of Reciprocal operator");
K
Kexin Zhao 已提交
233
    AddComment(R"DOC(
K
kexinzhao 已提交
234
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
235 236 237 238

$$y = \frac{1}{x}$$

)DOC");
239 240 241 242 243 244 245 246 247
  }
};

class LogOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  LogOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Log operator");
    AddOutput("Y", "Output of Log operator");
K
Kexin Zhao 已提交
248
    AddComment(R"DOC(
K
kexinzhao 已提交
249
Log Activation Operator.
K
Kexin Zhao 已提交
250 251 252 253 254 255

$y = \ln(x)$

Natural logarithm of x.

)DOC");
256 257 258 259 260 261 262 263 264
  }
};

class SquareOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SquareOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Square operator");
    AddOutput("Y", "Output of Square operator");
K
Kexin Zhao 已提交
265
    AddComment(R"DOC(
K
kexinzhao 已提交
266
Square Activation Operator.
K
Kexin Zhao 已提交
267 268 269 270

$y = x^2$

)DOC");
271 272 273
  }
};

K
kexinzhao 已提交
274 275 276 277 278 279 280
class SoftplusOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SoftplusOpMaker(framework::OpProto *proto,
                  framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Softplus operator");
    AddOutput("Y", "Output of Softplus operator");
K
Kexin Zhao 已提交
281
    AddComment(R"DOC(
K
kexinzhao 已提交
282
Softplus Activation Operator.
K
Kexin Zhao 已提交
283 284 285 286

$y = \ln(1 + e^{x})$

)DOC");
K
kexinzhao 已提交
287 288 289
  }
};

290 291 292 293 294 295 296
class SoftsignOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SoftsignOpMaker(framework::OpProto *proto,
                  framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Softsign operator");
    AddOutput("Y", "Output of Softsign operator");
K
Kexin Zhao 已提交
297
    AddComment(R"DOC(
K
kexinzhao 已提交
298
Softsign Activation Operator.
K
Kexin Zhao 已提交
299 300 301 302

$$y = \frac{x}{1 + |x|}$$

)DOC");
303 304 305
  }
};

306 307 308 309 310 311
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  BReluOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of BRelu operator");
    AddOutput("Y", "Output of BRelu operator");
312 313 314 315
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
316
    AddComment(R"DOC(
K
kexinzhao 已提交
317
BRelu Activation Operator.
K
Kexin Zhao 已提交
318 319 320 321

$y = \max(\min(x, t_{min}), t_{max})$

)DOC");
322 323 324 325 326 327 328 329 330 331
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  SoftReluOpMaker(framework::OpProto *proto,
                  framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of SoftRelu operator");
    AddOutput("Y", "Output of SoftRelu operator");
332 333
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
334
    AddComment(R"DOC(
K
kexinzhao 已提交
335
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
336 337 338 339

$y = \ln(1 + \exp(\max(\min(x, threshold), threshold))$

)DOC");
340 341 342
  }
};

343 344 345 346
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ELUOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
K
Kexin Zhao 已提交
347 348
    AddInput("X", "Input of ELU operator");
    AddOutput("Y", "Output of ELU operator");
349
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
350
    AddComment(R"DOC(
K
kexinzhao 已提交
351
ELU Activation Operator.
K
Kexin Zhao 已提交
352 353 354 355 356 357 358

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

$y = \max(0, x) + \min(0, \alpha * (e^x - 1))$

)DOC");
359 360 361
  }
};

362 363 364 365 366 367
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  Relu6OpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Relu6 operator");
    AddOutput("Y", "Output of Relu6 operator");
368 369
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
370
    AddComment(R"DOC(
K
kexinzhao 已提交
371
Relu6 Activation Operator.
K
Kexin Zhao 已提交
372 373 374 375

$y = \min(\max(0, x), 6)$

)DOC");
376 377 378
  }
};

379 380 381 382 383 384
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  PowOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of Pow operator");
    AddOutput("Y", "Output of Pow operator");
385
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
386
    AddComment(R"DOC(
K
kexinzhao 已提交
387
Pow Activation Operator.
K
Kexin Zhao 已提交
388 389 390 391

$y = x^{factor}$

)DOC");
392 393 394 395 396 397 398 399 400
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  STanhOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of STanh operator");
    AddOutput("Y", "Output of STanh operator");
401 402 403 404
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
405
    AddComment(R"DOC(
K
kexinzhao 已提交
406
STanh Activation Operator.
K
Kexin Zhao 已提交
407 408 409 410

$$y = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$

)DOC");
Q
qijun 已提交
411 412 413
  }
};

414 415 416 417 418 419 420
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ThresholdedReluOpMaker(framework::OpProto *proto,
                         framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of ThresholdedRelu operator");
    AddOutput("Y", "Output of ThresholdedRelu operator");
421 422
    AddAttr<float>("threshold", "The threshold location of activation")
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
423
    AddComment(R"DOC(
K
kexinzhao 已提交
424
ThresholdedRelu Activation Operator.
K
Kexin Zhao 已提交
425 426 427 428 429 430 431 432 433

$$
y = \begin{cases} 
    x, \text{if } x > threshold \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
434 435 436
  }
};

437 438 439 440 441 442 443
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  HardSigmoidOpMaker(framework::OpProto *proto,
                     framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "Input of HardSigmoid operator");
    AddOutput("Y", "Output of HardSigmoid operator");
444 445 446 447
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
448
    AddComment(R"DOC(
K
kexinzhao 已提交
449
HardSigmoid Activation Operator.
450

K
Kexin Zhao 已提交
451 452
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
453

K
Kexin Zhao 已提交
454
$y = \max(0, \min(1, slope * x + shift))$
455 456

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
457
The default slope and shift are set according to the above reference.
458 459
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
460
)DOC");
461 462 463
  }
};

Q
qijun 已提交
464 465 466 467
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
468

Q
qijun 已提交
469 470 471
REGISTER_OP(sigmoid, ops::ActivationOp, ops::SigmoidOpMaker, sigmoid_grad,
            ops::ActivationOpGrad);

472 473 474
REGISTER_OP(logsigmoid, ops::ActivationOp, ops::LogSigmoidOpMaker,
            logsigmoid_grad, ops::ActivationOpGrad);

Q
qijun 已提交
475 476
REGISTER_OP(exp, ops::ActivationOp, ops::ExpOpMaker, exp_grad,
            ops::ActivationOpGrad);
Q
qijun 已提交
477 478 479

REGISTER_OP(relu, ops::ActivationOp, ops::ReluOpMaker, relu_grad,
            ops::ActivationOpGrad);
480 481 482 483

REGISTER_OP(tanh, ops::ActivationOp, ops::TanhOpMaker, tanh_grad,
            ops::ActivationOpGrad);

K
Kavya Srinet 已提交
484 485
REGISTER_OP(tanh_shrink, ops::ActivationOp, ops::TanhShrinkOpMaker,
            tanh_shrink_grad, ops::ActivationOpGrad);
486

487
REGISTER_OP(softshrink, ops::ActivationOp, ops::SoftShrinkOpMaker,
488 489
            softshrink_grad, ops::ActivationOpGrad);

490 491 492 493 494 495 496 497 498 499 500 501 502 503 504
REGISTER_OP(sqrt, ops::ActivationOp, ops::SqrtOpMaker, sqrt_grad,
            ops::ActivationOpGrad);

REGISTER_OP(abs, ops::ActivationOp, ops::AbsOpMaker, abs_grad,
            ops::ActivationOpGrad);

REGISTER_OP(reciprocal, ops::ActivationOp, ops::ReciprocalOpMaker,
            reciprocal_grad, ops::ActivationOpGrad);

REGISTER_OP(log, ops::ActivationOp, ops::LogOpMaker, log_grad,
            ops::ActivationOpGrad);

REGISTER_OP(square, ops::ActivationOp, ops::SquareOpMaker, square_grad,
            ops::ActivationOpGrad);

K
kexinzhao 已提交
505 506 507
REGISTER_OP(softplus, ops::ActivationOp, ops::SoftplusOpMaker, softplus_grad,
            ops::ActivationOpGrad);

508 509 510
REGISTER_OP(softsign, ops::ActivationOp, ops::SoftsignOpMaker, softsign_grad,
            ops::ActivationOpGrad);

511
REGISTER_OP(brelu, ops::ActivationOp, ops::BReluOpMaker, brelu_grad,
512 513
            ops::ActivationOpGrad);

514
REGISTER_OP(leaky_relu, ops::ActivationOp, ops::LeakyReluOpMaker,
K
Kavya Srinet 已提交
515
            leaky_relu_grad, ops::ActivationOpGrad);
516

517 518
REGISTER_OP(soft_relu, ops::ActivationOp, ops::SoftReluOpMaker, soft_relu_grad,
            ops::ActivationOpGrad);
519

520
REGISTER_OP(elu, ops::ActivationOp, ops::ELUOpMaker, elu_grad,
521 522
            ops::ActivationOpGrad);

523
REGISTER_OP(relu6, ops::ActivationOp, ops::Relu6OpMaker, relu6_grad,
524 525
            ops::ActivationOpGrad);

526
REGISTER_OP(pow, ops::ActivationOp, ops::PowOpMaker, pow_grad,
527 528
            ops::ActivationOpGrad);

529
REGISTER_OP(stanh, ops::ActivationOp, ops::STanhOpMaker, stanh_grad,
530
            ops::ActivationOpGrad);
531

532
REGISTER_OP(hard_shrink, ops::ActivationOp, ops::HardShrinkOpMaker,
533 534
            hard_shrink_grad, ops::ActivationOpGrad);

535 536
REGISTER_OP(thresholded_relu, ops::ActivationOp, ops::ThresholdedReluOpMaker,
            thresholded_relu_grad, ops::ActivationOpGrad);
537

538
REGISTER_OP(hard_sigmoid, ops::ActivationOp, ops::HardSigmoidOpMaker,
539 540
            hard_sigmoid_grad, ops::ActivationOpGrad);

Y
Yu Yang 已提交
541 542 543 544 545 546 547 548 549 550 551
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)       \
  REGISTER_OP_CPU_KERNEL(                                                     \
      act_type,                                                               \
      ops::ActivationKernel<paddle::platform::CPUPlace, ops::functor<float>>, \
      ops::ActivationKernel<paddle::platform::CPUPlace,                       \
                            ops::functor<double>>);                           \
  REGISTER_OP_CPU_KERNEL(                                                     \
      act_type##_grad, ops::ActivationGradKernel<paddle::platform::CPUPlace,  \
                                                 ops::grad_functor<float>>,   \
      ops::ActivationGradKernel<paddle::platform::CPUPlace,                   \
                                ops::grad_functor<double>>);
552 553

FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);