activation_op.cc 17.1 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/activation_op.h"
D
dzhwinter 已提交
16
#include <string>
K
Krzysztof Binias 已提交
17
#include "paddle/fluid/operators/mkldnn_activation_op.h"
Q
qijun 已提交
18 19 20 21

namespace paddle {
namespace operators {

Y
Yu Yang 已提交
22 23 24 25 26
#define REGISTER_ACTIVATION_OP_MAKER(OP_NAME, OP_COMMENT)               \
  class OP_NAME##OpMaker                                                \
      : public ::paddle::framework::OpProtoAndCheckerMaker {            \
   public:                                                              \
    void Make() override {                                              \
X
Xin Pan 已提交
27 28
      AddInput("X", "Input of " #OP_NAME " operator");                  \
      AddOutput("Out", "Output of " #OP_NAME " operator");              \
Y
Yu Yang 已提交
29 30 31
      AddAttr<bool>("use_mkldnn",                                       \
                    "(bool, default false) Only used in mkldnn kernel") \
          .SetDefault(false);                                           \
X
Xin Pan 已提交
32
      AddComment(OP_COMMENT);                                           \
Y
Yu Yang 已提交
33
    }                                                                   \
D
dzhwinter 已提交
34
  }
D
dzhwinter 已提交
35 36 37 38 39 40 41 42 43

#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME, KERNEL_TYPE)              \
  class OP_NAME##GradMaker                                                   \
      : public ::paddle::framework::SingleGradOpDescMaker {                  \
   public:                                                                   \
    using ::paddle::framework::SingleGradOpDescMaker::SingleGradOpDescMaker; \
                                                                             \
   protected:                                                                \
    std::unique_ptr<::paddle::framework::OpDesc> Apply() const override {    \
44
      auto* op = new ::paddle::framework::OpDesc();                          \
D
dzhwinter 已提交
45 46 47 48 49 50 51 52 53 54
      op->SetType(#KERNEL_TYPE "_grad");                                     \
      op->SetInput("Out", Output("Out"));                                    \
      op->SetInput(::paddle::framework::GradVarName("Out"),                  \
                   OutputGrad("Out"));                                       \
                                                                             \
      op->SetAttrMap(Attrs());                                               \
                                                                             \
      op->SetOutput(::paddle::framework::GradVarName("X"), InputGrad("X"));  \
      return std::unique_ptr<::paddle::framework::OpDesc>(op);               \
    }                                                                        \
D
dzhwinter 已提交
55
  }
D
dzhwinter 已提交
56

57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
framework::OpKernelType GetKernelType(const framework::ExecutionContext& ctx,
                                      const framework::OperatorWithKernel& oper,
                                      const std::string& name) {
  framework::LibraryType library{framework::LibraryType::kPlain};
#ifdef PADDLE_WITH_MKLDNN
  auto it = oper.Attrs().find("use_mkldnn");
  if (library == framework::LibraryType::kPlain && it != oper.Attrs().end() &&
      platform::CanMKLDNNBeUsed(ctx)) {
    library = framework::LibraryType::kMKLDNN;
  }
#endif
  framework::DataLayout layout = framework::DataLayout::kAnyLayout;
  return framework::OpKernelType(
      framework::ToDataType(ctx.Input<framework::Tensor>(name)->type()),
      ctx.GetPlace(), layout, library);
}

Q
qijun 已提交
74 75 76 77
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

78
  void InferShape(framework::InferShapeContext* ctx) const override {
F
fengjiayi 已提交
79 80
    ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Out");
Q
qijun 已提交
81
  }
82 83 84 85 86

  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "X");
  }
Q
qijun 已提交
87 88
};

Q
qijun 已提交
89 90 91 92
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

93
  void InferShape(framework::InferShapeContext* ctx) const override {
F
fengjiayi 已提交
94
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Out"));
Q
qijun 已提交
95
  }
96 97 98 99 100

  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "Out");
  }
Q
qijun 已提交
101 102
};

Q
qiaolongfei 已提交
103
__attribute__((unused)) constexpr char SigmoidDoc[] = R"DOC(
104
Sigmoid Activation Operator
K
Kexin Zhao 已提交
105

F
fengjiayi 已提交
106
$$out = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
107

D
dzhwinter 已提交
108
)DOC";
Q
qijun 已提交
109

Q
qiaolongfei 已提交
110
__attribute__((unused)) constexpr char LogSigmoidDoc[] = R"DOC(
111
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
112

F
fengjiayi 已提交
113
$$out = \log \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
114

D
dzhwinter 已提交
115
)DOC";
116

Q
qiaolongfei 已提交
117
__attribute__((unused)) constexpr char ExpDoc[] = R"DOC(
K
kexinzhao 已提交
118
Exp Activation Operator.
K
Kexin Zhao 已提交
119

F
fengjiayi 已提交
120
$out = e^x$
K
Kexin Zhao 已提交
121

D
dzhwinter 已提交
122
)DOC";
Q
qijun 已提交
123

Q
qiaolongfei 已提交
124
__attribute__((unused)) constexpr char ReluDoc[] = R"DOC(
K
kexinzhao 已提交
125
Relu Activation Operator.
K
Kexin Zhao 已提交
126

F
fengjiayi 已提交
127
$out = \max(x, 0)$
K
Kexin Zhao 已提交
128

D
dzhwinter 已提交
129
)DOC";
K
Kexin Zhao 已提交
130

Q
qiaolongfei 已提交
131
__attribute__((unused)) constexpr char TanhDoc[] = R"DOC(
K
kexinzhao 已提交
132
Tanh Activation Operator.
K
Kexin Zhao 已提交
133

F
fengjiayi 已提交
134
$$out = \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
135

D
dzhwinter 已提交
136
)DOC";
137

Q
qiaolongfei 已提交
138
__attribute__((unused)) constexpr char TanhShrinkDoc[] = R"DOC(
K
kexinzhao 已提交
139
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
140

F
fengjiayi 已提交
141
$$out = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
142

D
dzhwinter 已提交
143
)DOC";
K
Kexin Zhao 已提交
144

Q
qiaolongfei 已提交
145
__attribute__((unused)) constexpr char SqrtDoc[] = R"DOC(
K
kexinzhao 已提交
146
Sqrt Activation Operator.
K
Kexin Zhao 已提交
147

F
fengjiayi 已提交
148
$out = \sqrt{x}$
K
Kexin Zhao 已提交
149

D
dzhwinter 已提交
150
)DOC";
151

Q
qiaolongfei 已提交
152
__attribute__((unused)) constexpr char AbsDoc[] = R"DOC(
K
kexinzhao 已提交
153
Abs Activation Operator.
K
Kexin Zhao 已提交
154

F
fengjiayi 已提交
155
$out = |x|$
K
Kexin Zhao 已提交
156

D
dzhwinter 已提交
157
)DOC";
158

Q
qiaolongfei 已提交
159
__attribute__((unused)) constexpr char CeilDoc[] = R"DOC(
D
dzhwinter 已提交
160 161
Ceil Activation Operator.

F
fengjiayi 已提交
162
$out = ceil(x)$
D
dzhwinter 已提交
163

D
dzhwinter 已提交
164
)DOC";
D
dzhwinter 已提交
165

Q
qiaolongfei 已提交
166
__attribute__((unused)) constexpr char FloorDoc[] = R"DOC(
D
dzhwinter 已提交
167 168
Floor Activation Operator.

F
fengjiayi 已提交
169
$out = floor(x)$
D
dzhwinter 已提交
170

D
dzhwinter 已提交
171
)DOC";
D
dzhwinter 已提交
172

Q
qiaolongfei 已提交
173
__attribute__((unused)) constexpr char CosDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
174
Cosine Activation Operator.
C
add cos  
chengduoZH 已提交
175 176 177

$out = cos(x)$

D
dzhwinter 已提交
178
)DOC";
C
add cos  
chengduoZH 已提交
179

Q
qiaolongfei 已提交
180
__attribute__((unused)) constexpr char SinDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
181 182 183 184
Sine Activation Operator.

$out = sin(x)$

D
dzhwinter 已提交
185
)DOC";
C
add sin  
chengduoZH 已提交
186

Q
qiaolongfei 已提交
187
__attribute__((unused)) constexpr char RoundDoc[] = R"DOC(
D
dzhwinter 已提交
188 189
Round Activation Operator.

F
fengjiayi 已提交
190
$out = [x]$
D
dzhwinter 已提交
191

D
dzhwinter 已提交
192
)DOC";
D
dzhwinter 已提交
193

Q
qiaolongfei 已提交
194
__attribute__((unused)) constexpr char ReciprocalDoc[] = R"DOC(
K
kexinzhao 已提交
195
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
196

F
fengjiayi 已提交
197
$$out = \frac{1}{x}$$
K
Kexin Zhao 已提交
198

D
dzhwinter 已提交
199
)DOC";
200

Q
qiaolongfei 已提交
201
__attribute__((unused)) constexpr char LogDoc[] = R"DOC(
K
kexinzhao 已提交
202
Log Activation Operator.
K
Kexin Zhao 已提交
203

F
fengjiayi 已提交
204
$out = \ln(x)$
K
Kexin Zhao 已提交
205 206 207

Natural logarithm of x.

D
dzhwinter 已提交
208 209
)DOC";

Q
qiaolongfei 已提交
210
__attribute__((unused)) constexpr char SquareDoc[] = R"DOC(
D
dzhwinter 已提交
211 212 213
Square Activation Operator.

$out = x^2$
214

D
dzhwinter 已提交
215 216
)DOC";

Q
qiaolongfei 已提交
217
__attribute__((unused)) constexpr char SoftplusDoc[] = R"DOC(
D
dzhwinter 已提交
218 219 220 221 222 223
Softplus Activation Operator.

$out = \ln(1 + e^{x})$

)DOC";

Q
qiaolongfei 已提交
224
__attribute__((unused)) constexpr char SoftsignDoc[] = R"DOC(
D
dzhwinter 已提交
225 226 227 228 229 230 231
Softsign Activation Operator.

$$out = \frac{x}{1 + |x|}$$

)DOC";

class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
232
 public:
Y
Yu Yang 已提交
233
  void Make() override {
D
dzhwinter 已提交
234 235 236
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Out", "Output of LeakyRelu operator");
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
237
    AddComment(R"DOC(
D
dzhwinter 已提交
238
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
239

D
dzhwinter 已提交
240
$out = \max(x, \alpha * x)$
K
Kexin Zhao 已提交
241 242

)DOC");
243 244 245
  }
};

D
dzhwinter 已提交
246
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
K
kexinzhao 已提交
247
 public:
Y
Yu Yang 已提交
248
  void Make() override {
D
dzhwinter 已提交
249 250 251
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Out", "Output of Softshrink operator");
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
252
    AddComment(R"DOC(
D
dzhwinter 已提交
253
Softshrink Activation Operator.
K
Kexin Zhao 已提交
254

D
dzhwinter 已提交
255 256 257 258 259 260 261
$$
out = \begin{cases} 
    x - \lambda, \text{if } x > \lambda \\
    x + \lambda, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$
K
Kexin Zhao 已提交
262 263

)DOC");
K
kexinzhao 已提交
264 265 266
  }
};

D
dzhwinter 已提交
267
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
268
 public:
Y
Yu Yang 已提交
269
  void Make() override {
D
dzhwinter 已提交
270 271 272 273
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Out", "Output of HardShrink operator");
    AddAttr<float>("threshold", "The value of threshold for HardShrink")
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
274
    AddComment(R"DOC(
D
dzhwinter 已提交
275
HardShrink Activation Operator.
K
Kexin Zhao 已提交
276

D
dzhwinter 已提交
277 278 279 280 281 282 283
$$
out = \begin{cases} 
    x, \text{if } x > \lambda \\
    x, \text{if } x < -\lambda \\
    0,  \text{otherwise}
    \end{cases}
$$
K
Kexin Zhao 已提交
284 285

)DOC");
286 287 288
  }
};

289 290
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
291
  void Make() override {
292
    AddInput("X", "Input of BRelu operator");
F
fengjiayi 已提交
293
    AddOutput("Out", "Output of BRelu operator");
294 295 296 297
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
298
    AddComment(R"DOC(
K
kexinzhao 已提交
299
BRelu Activation Operator.
K
Kexin Zhao 已提交
300

F
fengjiayi 已提交
301
$out = \max(\min(x, t_{min}), t_{max})$
K
Kexin Zhao 已提交
302 303

)DOC");
304 305 306 307 308
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
309
  void Make() override {
310
    AddInput("X", "Input of SoftRelu operator");
F
fengjiayi 已提交
311
    AddOutput("Out", "Output of SoftRelu operator");
312 313
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
314
    AddComment(R"DOC(
K
kexinzhao 已提交
315
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
316

F
fengjiayi 已提交
317
$out = \ln(1 + \exp(\max(\min(x, threshold), threshold))$
K
Kexin Zhao 已提交
318 319

)DOC");
320 321 322
  }
};

323 324
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
325
  void Make() override {
K
Kexin Zhao 已提交
326
    AddInput("X", "Input of ELU operator");
F
fengjiayi 已提交
327
    AddOutput("Out", "Output of ELU operator");
328
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
329
    AddComment(R"DOC(
K
kexinzhao 已提交
330
ELU Activation Operator.
K
Kexin Zhao 已提交
331 332 333 334

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

F
fengjiayi 已提交
335
$out = \max(0, x) + \min(0, \alpha * (e^x - 1))$
K
Kexin Zhao 已提交
336 337

)DOC");
338 339 340
  }
};

341 342
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
343
  void Make() override {
344
    AddInput("X", "Input of Relu6 operator");
F
fengjiayi 已提交
345
    AddOutput("Out", "Output of Relu6 operator");
346 347
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
348
    AddComment(R"DOC(
K
kexinzhao 已提交
349
Relu6 Activation Operator.
K
Kexin Zhao 已提交
350

F
fengjiayi 已提交
351
$out = \min(\max(0, x), 6)$
K
Kexin Zhao 已提交
352 353

)DOC");
354 355 356
  }
};

357 358
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
359
  void Make() override {
360
    AddInput("X", "Input of Pow operator");
F
fengjiayi 已提交
361
    AddOutput("Out", "Output of Pow operator");
362
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
363
    AddComment(R"DOC(
K
kexinzhao 已提交
364
Pow Activation Operator.
K
Kexin Zhao 已提交
365

F
fengjiayi 已提交
366
$out = x^{factor}$
K
Kexin Zhao 已提交
367 368

)DOC");
369 370 371 372 373
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
374
  void Make() override {
375
    AddInput("X", "Input of STanh operator");
F
fengjiayi 已提交
376
    AddOutput("Out", "Output of STanh operator");
377 378 379 380
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
381
    AddComment(R"DOC(
K
kexinzhao 已提交
382
STanh Activation Operator.
K
Kexin Zhao 已提交
383

F
fengjiayi 已提交
384
$$out = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
K
Kexin Zhao 已提交
385 386

)DOC");
Q
qijun 已提交
387 388 389
  }
};

390 391
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
392
  void Make() override {
393
    AddInput("X", "Input of ThresholdedRelu operator");
F
fengjiayi 已提交
394
    AddOutput("Out", "Output of ThresholdedRelu operator");
395 396
    AddAttr<float>("threshold", "The threshold location of activation")
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
397
    AddComment(R"DOC(
K
kexinzhao 已提交
398
ThresholdedRelu Activation Operator.
K
Kexin Zhao 已提交
399 400

$$
F
fengjiayi 已提交
401
out = \begin{cases} 
K
Kexin Zhao 已提交
402 403 404 405 406 407
    x, \text{if } x > threshold \\
    0,  \text{otherwise}
    \end{cases}
$$

)DOC");
408 409 410
  }
};

411 412
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
413
  void Make() override {
414
    AddInput("X", "Input of HardSigmoid operator");
F
fengjiayi 已提交
415
    AddOutput("Out", "Output of HardSigmoid operator");
416 417 418 419
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
420
    AddComment(R"DOC(
K
kexinzhao 已提交
421
HardSigmoid Activation Operator.
422

K
Kexin Zhao 已提交
423 424
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
425

F
fengjiayi 已提交
426
$out = \max(0, \min(1, slope * x + shift))$
427 428

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
429
The default slope and shift are set according to the above reference.
430 431
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
432
)DOC");
433 434 435
  }
};

A
Abhinav Arora 已提交
436 437
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
438
  void Make() override {
A
Abhinav Arora 已提交
439
    AddInput("X", "Input of Swish operator");
F
fengjiayi 已提交
440
    AddOutput("Out", "Output of Swish operator");
A
Abhinav Arora 已提交
441 442 443 444
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

F
fengjiayi 已提交
445
$$out = \frac{x}{1 + e^{- \beta x}}$$
A
Abhinav Arora 已提交
446 447 448 449 450

)DOC");
  }
};

D
dzhwinter 已提交
451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469
REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(Exp, ExpDoc);
REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc);
REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc);
REGISTER_ACTIVATION_OP_MAKER(TanhShrink, TanhShrinkDoc);
REGISTER_ACTIVATION_OP_MAKER(Sqrt, SqrtDoc);
REGISTER_ACTIVATION_OP_MAKER(Abs, AbsDoc);
REGISTER_ACTIVATION_OP_MAKER(Ceil, CeilDoc);
REGISTER_ACTIVATION_OP_MAKER(Floor, FloorDoc);
REGISTER_ACTIVATION_OP_MAKER(Cos, CosDoc);
REGISTER_ACTIVATION_OP_MAKER(Sin, SinDoc);
REGISTER_ACTIVATION_OP_MAKER(Round, RoundDoc);
REGISTER_ACTIVATION_OP_MAKER(Reciprocal, ReciprocalDoc);
REGISTER_ACTIVATION_OP_MAKER(Log, LogDoc);
REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc);
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);

D
dzhwinter 已提交
470 471
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sigmoid, sigmoid);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu, relu);
D
dzhwinter 已提交
472
REGISTER_ACTIVATION_OP_GRAD_MAKER(Exp, exp);
D
dzhwinter 已提交
473 474 475
REGISTER_ACTIVATION_OP_GRAD_MAKER(Tanh, tanh);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Ceil, ceil);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Floor, floor);
D
dzhwinter 已提交
476
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sqrt, sqrt);
D
dzhwinter 已提交
477
REGISTER_ACTIVATION_OP_GRAD_MAKER(SoftRelu, soft_relu);
D
dzhwinter 已提交
478 479
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu6, relu6);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Reciprocal, reciprocal);
D
dzhwinter 已提交
480
REGISTER_ACTIVATION_OP_GRAD_MAKER(HardSigmoid, hard_sigmoid);
Q
qijun 已提交
481 482 483 484
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
485

D
dzhwinter 已提交
486
#define FOR_EACH_INPLACE_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
487
  __macro(Sigmoid, sigmoid);                 \
488
  __macro(Relu, relu);                       \
D
dzhwinter 已提交
489
  __macro(Exp, exp);                         \
490
  __macro(Tanh, tanh);                       \
D
dzhwinter 已提交
491 492
  __macro(Ceil, ceil);                       \
  __macro(Floor, floor);                     \
493
  __macro(Sqrt, sqrt);                       \
D
dzhwinter 已提交
494 495 496 497
  __macro(SoftRelu, soft_relu);              \
  __macro(Relu6, relu6);                     \
  __macro(Reciprocal, reciprocal);           \
  __macro(HardSigmoid, hard_sigmoid);
D
dzhwinter 已提交
498 499

#define FOR_EACH_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
500 501
  __macro(LogSigmoid, logsigmoid);   \
  __macro(SoftShrink, softshrink);   \
502
  __macro(Abs, abs);                 \
D
dzhwinter 已提交
503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525
  __macro(Cos, cos);                 \
  __macro(Sin, sin);                 \
  __macro(Round, round);             \
  __macro(Log, log);                 \
  __macro(Square, square);           \
  __macro(BRelu, brelu);             \
  __macro(Pow, pow);                 \
  __macro(STanh, stanh);             \
  __macro(Softplus, softplus);       \
  __macro(Softsign, softsign);       \
  __macro(LeakyRelu, leaky_relu);    \
  __macro(TanhShrink, tanh_shrink);  \
  __macro(ELU, elu);                 \
  __macro(HardShrink, hard_shrink);  \
  __macro(Swish, swish);             \
  __macro(ThresholdedRelu, thresholded_relu);

#define REGISTER_INPLACE_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)        \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp, \
                    ::paddle::operators::OP_NAME##OpMaker,          \
                    ::paddle::operators::OP_NAME##GradMaker);       \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)

D
dzhwinter 已提交
526 527 528 529 530
#define REGISTER_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)                    \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp,     \
                    ::paddle::operators::OP_NAME##OpMaker,              \
                    ::paddle::framework::DefaultGradOpDescMaker<true>); \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)
A
Abhinav Arora 已提交
531

Q
QI JUN 已提交
532 533 534 535 536 537 538 539 540 541 542
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
543
                                ops::grad_functor<double>>);
544

D
dzhwinter 已提交
545
FOR_EACH_OP_FUNCTOR(REGISTER_ACTIVATION_OP);
D
dzhwinter 已提交
546
FOR_EACH_INPLACE_OP_FUNCTOR(REGISTER_INPLACE_ACTIVATION_OP);
547
FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);