activation_op.cc 17.4 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/activation_op.h"
D
dzhwinter 已提交
16
#include <string>
K
Krzysztof Binias 已提交
17
#include "paddle/fluid/operators/mkldnn_activation_op.h"
Q
qijun 已提交
18 19 20 21

namespace paddle {
namespace operators {

22 23 24 25 26 27 28 29 30 31 32 33 34 35
using paddle::framework::Tensor;

#define REGISTER_ACTIVATION_OP_MAKER(OP_NAME, OP_COMMENT)               \
  class OP_NAME##OpMaker                                                \
      : public ::paddle::framework::OpProtoAndCheckerMaker {            \
   public:                                                              \
    void Make() override {                                              \
      AddInput("X", "Input of " #OP_NAME " operator");                  \
      AddOutput("Out", "Output of " #OP_NAME " operator").Reuse("X");   \
      AddAttr<bool>("use_mkldnn",                                       \
                    "(bool, default false) Only used in mkldnn kernel") \
          .SetDefault(false);                                           \
      AddComment(#OP_COMMENT);                                          \
    }                                                                   \
D
dzhwinter 已提交
36
  }
D
dzhwinter 已提交
37 38 39 40 41 42 43 44 45

#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME, KERNEL_TYPE)              \
  class OP_NAME##GradMaker                                                   \
      : public ::paddle::framework::SingleGradOpDescMaker {                  \
   public:                                                                   \
    using ::paddle::framework::SingleGradOpDescMaker::SingleGradOpDescMaker; \
                                                                             \
   protected:                                                                \
    std::unique_ptr<::paddle::framework::OpDesc> Apply() const override {    \
46
      auto* op = new ::paddle::framework::OpDesc();                          \
D
dzhwinter 已提交
47 48 49 50 51 52 53 54 55 56
      op->SetType(#KERNEL_TYPE "_grad");                                     \
      op->SetInput("Out", Output("Out"));                                    \
      op->SetInput(::paddle::framework::GradVarName("Out"),                  \
                   OutputGrad("Out"));                                       \
                                                                             \
      op->SetAttrMap(Attrs());                                               \
                                                                             \
      op->SetOutput(::paddle::framework::GradVarName("X"), InputGrad("X"));  \
      return std::unique_ptr<::paddle::framework::OpDesc>(op);               \
    }                                                                        \
D
dzhwinter 已提交
57
  }
D
dzhwinter 已提交
58

59 60 61 62
framework::OpKernelType GetKernelType(const framework::ExecutionContext& ctx,
                                      const framework::OperatorWithKernel& oper,
                                      const std::string& name) {
  framework::LibraryType library{framework::LibraryType::kPlain};
M
mozga-intel 已提交
63
  framework::DataLayout layout = framework::DataLayout::kAnyLayout;
64 65 66 67 68
#ifdef PADDLE_WITH_MKLDNN
  auto it = oper.Attrs().find("use_mkldnn");
  if (library == framework::LibraryType::kPlain && it != oper.Attrs().end() &&
      platform::CanMKLDNNBeUsed(ctx)) {
    library = framework::LibraryType::kMKLDNN;
M
mozga-intel 已提交
69
    layout = framework::DataLayout::kMKLDNN;
70 71 72 73 74 75 76
  }
#endif
  return framework::OpKernelType(
      framework::ToDataType(ctx.Input<framework::Tensor>(name)->type()),
      ctx.GetPlace(), layout, library);
}

Q
qijun 已提交
77 78 79 80
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

81
  void InferShape(framework::InferShapeContext* ctx) const override {
F
fengjiayi 已提交
82 83
    ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Out");
Q
qijun 已提交
84
  }
85

86
 protected:
87 88 89 90
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "X");
  }
Q
qijun 已提交
91 92
};

Q
qijun 已提交
93 94 95 96
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

97
  void InferShape(framework::InferShapeContext* ctx) const override {
F
fengjiayi 已提交
98
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("Out"));
Q
qijun 已提交
99
  }
100

101
 protected:
102 103 104 105
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "Out");
  }
Q
qijun 已提交
106 107
};

Q
qiaolongfei 已提交
108
__attribute__((unused)) constexpr char SigmoidDoc[] = R"DOC(
109
Sigmoid Activation Operator
K
Kexin Zhao 已提交
110

F
fengjiayi 已提交
111
$$out = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
112

D
dzhwinter 已提交
113
)DOC";
Q
qijun 已提交
114

Q
qiaolongfei 已提交
115
__attribute__((unused)) constexpr char LogSigmoidDoc[] = R"DOC(
116
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
117

118
$$out = \\log \\frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
119

D
dzhwinter 已提交
120
)DOC";
121

Q
qiaolongfei 已提交
122
__attribute__((unused)) constexpr char ExpDoc[] = R"DOC(
K
kexinzhao 已提交
123
Exp Activation Operator.
K
Kexin Zhao 已提交
124

F
fengjiayi 已提交
125
$out = e^x$
K
Kexin Zhao 已提交
126

D
dzhwinter 已提交
127
)DOC";
Q
qijun 已提交
128

Q
qiaolongfei 已提交
129
__attribute__((unused)) constexpr char ReluDoc[] = R"DOC(
K
kexinzhao 已提交
130
Relu Activation Operator.
K
Kexin Zhao 已提交
131

F
fengjiayi 已提交
132
$out = \max(x, 0)$
K
Kexin Zhao 已提交
133

D
dzhwinter 已提交
134
)DOC";
K
Kexin Zhao 已提交
135

Q
qiaolongfei 已提交
136
__attribute__((unused)) constexpr char TanhDoc[] = R"DOC(
K
kexinzhao 已提交
137
Tanh Activation Operator.
K
Kexin Zhao 已提交
138

Q
update  
qiaolongfei 已提交
139
$$out = \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
140

D
dzhwinter 已提交
141
)DOC";
142

Q
qiaolongfei 已提交
143
__attribute__((unused)) constexpr char TanhShrinkDoc[] = R"DOC(
K
kexinzhao 已提交
144
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
145

Y
Yan Chunwei 已提交
146
$$out = x - \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
147

D
dzhwinter 已提交
148
)DOC";
K
Kexin Zhao 已提交
149

Q
qiaolongfei 已提交
150
__attribute__((unused)) constexpr char SqrtDoc[] = R"DOC(
K
kexinzhao 已提交
151
Sqrt Activation Operator.
K
Kexin Zhao 已提交
152

F
fengjiayi 已提交
153
$out = \sqrt{x}$
K
Kexin Zhao 已提交
154

D
dzhwinter 已提交
155
)DOC";
156

Q
qiaolongfei 已提交
157
__attribute__((unused)) constexpr char AbsDoc[] = R"DOC(
K
kexinzhao 已提交
158
Abs Activation Operator.
K
Kexin Zhao 已提交
159

F
fengjiayi 已提交
160
$out = |x|$
K
Kexin Zhao 已提交
161

D
dzhwinter 已提交
162
)DOC";
163

Q
qiaolongfei 已提交
164
__attribute__((unused)) constexpr char CeilDoc[] = R"DOC(
D
dzhwinter 已提交
165 166
Ceil Activation Operator.

F
fengjiayi 已提交
167
$out = ceil(x)$
D
dzhwinter 已提交
168

D
dzhwinter 已提交
169
)DOC";
D
dzhwinter 已提交
170

Q
qiaolongfei 已提交
171
__attribute__((unused)) constexpr char FloorDoc[] = R"DOC(
D
dzhwinter 已提交
172 173
Floor Activation Operator.

F
fengjiayi 已提交
174
$out = floor(x)$
D
dzhwinter 已提交
175

D
dzhwinter 已提交
176
)DOC";
D
dzhwinter 已提交
177

Q
qiaolongfei 已提交
178
__attribute__((unused)) constexpr char CosDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
179
Cosine Activation Operator.
C
add cos  
chengduoZH 已提交
180 181 182

$out = cos(x)$

D
dzhwinter 已提交
183
)DOC";
C
add cos  
chengduoZH 已提交
184

Q
qiaolongfei 已提交
185
__attribute__((unused)) constexpr char SinDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
186 187 188 189
Sine Activation Operator.

$out = sin(x)$

D
dzhwinter 已提交
190
)DOC";
C
add sin  
chengduoZH 已提交
191

Q
qiaolongfei 已提交
192
__attribute__((unused)) constexpr char RoundDoc[] = R"DOC(
D
dzhwinter 已提交
193 194
Round Activation Operator.

F
fengjiayi 已提交
195
$out = [x]$
D
dzhwinter 已提交
196

D
dzhwinter 已提交
197
)DOC";
D
dzhwinter 已提交
198

Q
qiaolongfei 已提交
199
__attribute__((unused)) constexpr char ReciprocalDoc[] = R"DOC(
K
kexinzhao 已提交
200
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
201

202
$$out = \\frac{1}{x}$$
K
Kexin Zhao 已提交
203

D
dzhwinter 已提交
204
)DOC";
205

Q
qiaolongfei 已提交
206
__attribute__((unused)) constexpr char LogDoc[] = R"DOC(
K
kexinzhao 已提交
207
Log Activation Operator.
K
Kexin Zhao 已提交
208

F
fengjiayi 已提交
209
$out = \ln(x)$
K
Kexin Zhao 已提交
210 211 212

Natural logarithm of x.

D
dzhwinter 已提交
213 214
)DOC";

Q
qiaolongfei 已提交
215
__attribute__((unused)) constexpr char SquareDoc[] = R"DOC(
D
dzhwinter 已提交
216 217 218
Square Activation Operator.

$out = x^2$
219

D
dzhwinter 已提交
220 221
)DOC";

Q
qiaolongfei 已提交
222
__attribute__((unused)) constexpr char SoftplusDoc[] = R"DOC(
D
dzhwinter 已提交
223 224 225 226 227 228
Softplus Activation Operator.

$out = \ln(1 + e^{x})$

)DOC";

Q
qiaolongfei 已提交
229
__attribute__((unused)) constexpr char SoftsignDoc[] = R"DOC(
D
dzhwinter 已提交
230 231 232 233 234 235 236
Softsign Activation Operator.

$$out = \frac{x}{1 + |x|}$$

)DOC";

class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
237
 public:
Y
Yu Yang 已提交
238
  void Make() override {
D
dzhwinter 已提交
239 240 241
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Out", "Output of LeakyRelu operator");
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
242
    AddComment(R"DOC(
D
dzhwinter 已提交
243
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
244

D
dzhwinter 已提交
245
$out = \max(x, \alpha * x)$
K
Kexin Zhao 已提交
246 247

)DOC");
248 249 250
  }
};

D
dzhwinter 已提交
251
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
K
kexinzhao 已提交
252
 public:
Y
Yu Yang 已提交
253
  void Make() override {
D
dzhwinter 已提交
254 255 256
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Out", "Output of Softshrink operator");
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
257
    AddComment(R"DOC(
258 259 260 261 262 263 264 265
:strong:`Softshrink Activation Operator`

..  math::
    out = \begin{cases} 
         x - \lambda, \text{if } x > \lambda \\
         x + \lambda, \text{if } x < -\lambda \\
         0,  \text{otherwise}
         \end{cases}
K
Kexin Zhao 已提交
266 267

)DOC");
K
kexinzhao 已提交
268 269 270
  }
};

D
dzhwinter 已提交
271
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
272
 public:
Y
Yu Yang 已提交
273
  void Make() override {
D
dzhwinter 已提交
274 275
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Out", "Output of HardShrink operator");
Y
yuyang18 已提交
276 277
    AddAttr<float>("threshold",
                   "The value of threshold for HardShrink. [default: 0.5]")
D
dzhwinter 已提交
278
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
279
    AddComment(R"DOC(
Y
yuyang18 已提交
280
:strong:`HardShrink activation operator`
K
Kexin Zhao 已提交
281

Y
yuyang18 已提交
282 283 284 285 286 287
..  math::
    out = \begin{cases}
            x, \text{if } x > \lambda \\
            x, \text{if } x < -\lambda \\
            0,  \text{otherwise}
          \end{cases}
K
Kexin Zhao 已提交
288 289

)DOC");
290 291 292
  }
};

293 294
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
295
  void Make() override {
296
    AddInput("X", "Input of BRelu operator");
F
fengjiayi 已提交
297
    AddOutput("Out", "Output of BRelu operator");
298 299 300 301
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
302
    AddComment(R"DOC(
K
kexinzhao 已提交
303
BRelu Activation Operator.
K
Kexin Zhao 已提交
304

F
fengjiayi 已提交
305
$out = \max(\min(x, t_{min}), t_{max})$
K
Kexin Zhao 已提交
306 307

)DOC");
308 309 310 311 312
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
313
  void Make() override {
314
    AddInput("X", "Input of SoftRelu operator");
F
fengjiayi 已提交
315
    AddOutput("Out", "Output of SoftRelu operator");
316 317
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
318
    AddComment(R"DOC(
K
kexinzhao 已提交
319
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
320

F
fengjiayi 已提交
321
$out = \ln(1 + \exp(\max(\min(x, threshold), threshold))$
K
Kexin Zhao 已提交
322 323

)DOC");
324 325 326
  }
};

327 328
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
329
  void Make() override {
K
Kexin Zhao 已提交
330
    AddInput("X", "Input of ELU operator");
F
fengjiayi 已提交
331
    AddOutput("Out", "Output of ELU operator");
332
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
333
    AddComment(R"DOC(
K
kexinzhao 已提交
334
ELU Activation Operator.
K
Kexin Zhao 已提交
335 336 337 338

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

F
fengjiayi 已提交
339
$out = \max(0, x) + \min(0, \alpha * (e^x - 1))$
K
Kexin Zhao 已提交
340 341

)DOC");
342 343 344
  }
};

345 346
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
347
  void Make() override {
348
    AddInput("X", "Input of Relu6 operator");
F
fengjiayi 已提交
349
    AddOutput("Out", "Output of Relu6 operator");
350 351
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
352
    AddComment(R"DOC(
K
kexinzhao 已提交
353
Relu6 Activation Operator.
K
Kexin Zhao 已提交
354

F
fengjiayi 已提交
355
$out = \min(\max(0, x), 6)$
K
Kexin Zhao 已提交
356 357

)DOC");
358 359 360
  }
};

361 362
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
363
  void Make() override {
364
    AddInput("X", "Input of Pow operator");
F
fengjiayi 已提交
365
    AddOutput("Out", "Output of Pow operator");
366
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
367
    AddComment(R"DOC(
K
kexinzhao 已提交
368
Pow Activation Operator.
K
Kexin Zhao 已提交
369

F
fengjiayi 已提交
370
$out = x^{factor}$
K
Kexin Zhao 已提交
371 372

)DOC");
373 374 375 376 377
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
378
  void Make() override {
379
    AddInput("X", "Input of STanh operator");
F
fengjiayi 已提交
380
    AddOutput("Out", "Output of STanh operator");
381 382 383 384
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
385
    AddComment(R"DOC(
K
kexinzhao 已提交
386
STanh Activation Operator.
K
Kexin Zhao 已提交
387

Y
Yan Chunwei 已提交
388
$$out = b * \\frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
K
Kexin Zhao 已提交
389 390

)DOC");
Q
qijun 已提交
391 392 393
  }
};

394 395
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
396
  void Make() override {
397
    AddInput("X", "Input of ThresholdedRelu operator");
F
fengjiayi 已提交
398
    AddOutput("Out", "Output of ThresholdedRelu operator");
Y
yuyang18 已提交
399 400
    AddAttr<float>("threshold",
                   "The threshold location of activation. [default 1.0].")
401
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
402
    AddComment(R"DOC(
Y
yuyang18 已提交
403
:strong:`ThresholdedRelu activation operator`
K
Kexin Zhao 已提交
404

Y
yuyang18 已提交
405
..  math::
K
Kexin Zhao 已提交
406

Y
yuyang18 已提交
407
    out = \begin{cases}
Y
yuyang18 已提交
408
             x,  \text{if } x > threshold \\
Y
yuyang18 已提交
409 410
             0,  \text{otherwise}
          \end{cases}
K
Kexin Zhao 已提交
411
)DOC");
412 413 414
  }
};

415 416
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
417
  void Make() override {
418
    AddInput("X", "Input of HardSigmoid operator");
F
fengjiayi 已提交
419
    AddOutput("Out", "Output of HardSigmoid operator");
420 421 422 423
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
424
    AddComment(R"DOC(
K
kexinzhao 已提交
425
HardSigmoid Activation Operator.
426

K
Kexin Zhao 已提交
427 428
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391), 
which is much faster than sigmoid.
429

F
fengjiayi 已提交
430
$out = \max(0, \min(1, slope * x + shift))$
431 432

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
433
The default slope and shift are set according to the above reference.
434 435
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
436
)DOC");
437 438 439
  }
};

A
Abhinav Arora 已提交
440 441
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
442
  void Make() override {
A
Abhinav Arora 已提交
443
    AddInput("X", "Input of Swish operator");
F
fengjiayi 已提交
444
    AddOutput("Out", "Output of Swish operator");
A
Abhinav Arora 已提交
445 446 447 448
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

F
fengjiayi 已提交
449
$$out = \\frac{x}{1 + e^{- \beta x}}$$
A
Abhinav Arora 已提交
450 451 452 453 454

)DOC");
  }
};

D
dzhwinter 已提交
455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473
REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(Exp, ExpDoc);
REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc);
REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc);
REGISTER_ACTIVATION_OP_MAKER(TanhShrink, TanhShrinkDoc);
REGISTER_ACTIVATION_OP_MAKER(Sqrt, SqrtDoc);
REGISTER_ACTIVATION_OP_MAKER(Abs, AbsDoc);
REGISTER_ACTIVATION_OP_MAKER(Ceil, CeilDoc);
REGISTER_ACTIVATION_OP_MAKER(Floor, FloorDoc);
REGISTER_ACTIVATION_OP_MAKER(Cos, CosDoc);
REGISTER_ACTIVATION_OP_MAKER(Sin, SinDoc);
REGISTER_ACTIVATION_OP_MAKER(Round, RoundDoc);
REGISTER_ACTIVATION_OP_MAKER(Reciprocal, ReciprocalDoc);
REGISTER_ACTIVATION_OP_MAKER(Log, LogDoc);
REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc);
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);

D
dzhwinter 已提交
474 475
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sigmoid, sigmoid);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu, relu);
D
dzhwinter 已提交
476
REGISTER_ACTIVATION_OP_GRAD_MAKER(Exp, exp);
D
dzhwinter 已提交
477 478 479
REGISTER_ACTIVATION_OP_GRAD_MAKER(Tanh, tanh);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Ceil, ceil);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Floor, floor);
D
dzhwinter 已提交
480
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sqrt, sqrt);
D
dzhwinter 已提交
481
REGISTER_ACTIVATION_OP_GRAD_MAKER(SoftRelu, soft_relu);
D
dzhwinter 已提交
482 483
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu6, relu6);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Reciprocal, reciprocal);
D
dzhwinter 已提交
484
REGISTER_ACTIVATION_OP_GRAD_MAKER(HardSigmoid, hard_sigmoid);
Q
qijun 已提交
485 486 487 488
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
489

D
dzhwinter 已提交
490
#define FOR_EACH_INPLACE_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
491
  __macro(Sigmoid, sigmoid);                 \
492
  __macro(Relu, relu);                       \
D
dzhwinter 已提交
493
  __macro(Exp, exp);                         \
494
  __macro(Tanh, tanh);                       \
D
dzhwinter 已提交
495 496
  __macro(Ceil, ceil);                       \
  __macro(Floor, floor);                     \
497
  __macro(Sqrt, sqrt);                       \
D
dzhwinter 已提交
498 499 500 501
  __macro(SoftRelu, soft_relu);              \
  __macro(Relu6, relu6);                     \
  __macro(Reciprocal, reciprocal);           \
  __macro(HardSigmoid, hard_sigmoid);
D
dzhwinter 已提交
502 503

#define FOR_EACH_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
504 505
  __macro(LogSigmoid, logsigmoid);   \
  __macro(SoftShrink, softshrink);   \
506
  __macro(Abs, abs);                 \
D
dzhwinter 已提交
507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529
  __macro(Cos, cos);                 \
  __macro(Sin, sin);                 \
  __macro(Round, round);             \
  __macro(Log, log);                 \
  __macro(Square, square);           \
  __macro(BRelu, brelu);             \
  __macro(Pow, pow);                 \
  __macro(STanh, stanh);             \
  __macro(Softplus, softplus);       \
  __macro(Softsign, softsign);       \
  __macro(LeakyRelu, leaky_relu);    \
  __macro(TanhShrink, tanh_shrink);  \
  __macro(ELU, elu);                 \
  __macro(HardShrink, hard_shrink);  \
  __macro(Swish, swish);             \
  __macro(ThresholdedRelu, thresholded_relu);

#define REGISTER_INPLACE_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)        \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp, \
                    ::paddle::operators::OP_NAME##OpMaker,          \
                    ::paddle::operators::OP_NAME##GradMaker);       \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)

D
dzhwinter 已提交
530 531 532 533 534
#define REGISTER_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)                    \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp,     \
                    ::paddle::operators::OP_NAME##OpMaker,              \
                    ::paddle::framework::DefaultGradOpDescMaker<true>); \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)
A
Abhinav Arora 已提交
535

Q
QI JUN 已提交
536 537 538 539 540 541 542 543 544 545 546
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
547
                                ops::grad_functor<double>>);
548

D
dzhwinter 已提交
549
FOR_EACH_OP_FUNCTOR(REGISTER_ACTIVATION_OP);
D
dzhwinter 已提交
550
FOR_EACH_INPLACE_OP_FUNCTOR(REGISTER_INPLACE_ACTIVATION_OP);
551
FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);