activation_op.cc 17.9 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Q
qijun 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Q
qijun 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/activation_op.h"
D
dzhwinter 已提交
16
#include <string>
K
Krzysztof Binias 已提交
17
#include "paddle/fluid/operators/mkldnn_activation_op.h"
D
dzhwinter 已提交
18
#include "paddle/fluid/platform/port.h"
Q
qijun 已提交
19 20 21 22

namespace paddle {
namespace operators {

23 24
using paddle::framework::Tensor;

25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41
#define REGISTER_ACTIVATION_OP_MAKER(OP_NAME, OP_COMMENT)                \
  class OP_NAME##OpMaker                                                 \
      : public ::paddle::framework::OpProtoAndCheckerMaker {             \
   public:                                                               \
    void Make() override {                                               \
      AddInput("X", "Input of " #OP_NAME " operator");                   \
      AddOutput("Out", "Output of " #OP_NAME " operator");               \
      AddAttr<bool>("use_mkldnn",                                        \
                    "(bool, default false) Only used in mkldnn kernel")  \
          .SetDefault(false);                                            \
      AddAttr<bool>(                                                     \
          "is_test",                                                     \
          "(bool, default false) Set to true for inference only, false " \
          "for training. Some layers may run faster when this is true.") \
          .SetDefault(false);                                            \
      AddComment(#OP_COMMENT);                                           \
    }                                                                    \
D
dzhwinter 已提交
42
  }
D
dzhwinter 已提交
43 44 45 46 47 48 49 50 51

#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME, KERNEL_TYPE)              \
  class OP_NAME##GradMaker                                                   \
      : public ::paddle::framework::SingleGradOpDescMaker {                  \
   public:                                                                   \
    using ::paddle::framework::SingleGradOpDescMaker::SingleGradOpDescMaker; \
                                                                             \
   protected:                                                                \
    std::unique_ptr<::paddle::framework::OpDesc> Apply() const override {    \
52
      auto* op = new ::paddle::framework::OpDesc();                          \
D
dzhwinter 已提交
53 54 55 56 57 58 59 60 61 62
      op->SetType(#KERNEL_TYPE "_grad");                                     \
      op->SetInput("Out", Output("Out"));                                    \
      op->SetInput(::paddle::framework::GradVarName("Out"),                  \
                   OutputGrad("Out"));                                       \
                                                                             \
      op->SetAttrMap(Attrs());                                               \
                                                                             \
      op->SetOutput(::paddle::framework::GradVarName("X"), InputGrad("X"));  \
      return std::unique_ptr<::paddle::framework::OpDesc>(op);               \
    }                                                                        \
D
dzhwinter 已提交
63
  }
D
dzhwinter 已提交
64

65 66 67 68
framework::OpKernelType GetKernelType(const framework::ExecutionContext& ctx,
                                      const framework::OperatorWithKernel& oper,
                                      const std::string& name) {
  framework::LibraryType library{framework::LibraryType::kPlain};
M
mozga-intel 已提交
69
  framework::DataLayout layout = framework::DataLayout::kAnyLayout;
70 71 72 73 74
#ifdef PADDLE_WITH_MKLDNN
  auto it = oper.Attrs().find("use_mkldnn");
  if (library == framework::LibraryType::kPlain && it != oper.Attrs().end() &&
      platform::CanMKLDNNBeUsed(ctx)) {
    library = framework::LibraryType::kMKLDNN;
M
mozga-intel 已提交
75
    layout = framework::DataLayout::kMKLDNN;
76 77 78 79 80 81 82
  }
#endif
  return framework::OpKernelType(
      framework::ToDataType(ctx.Input<framework::Tensor>(name)->type()),
      ctx.GetPlace(), layout, library);
}

Q
qijun 已提交
83 84 85 86
class ActivationOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

87
  void InferShape(framework::InferShapeContext* ctx) const override {
88
    ctx->ShareDim("X", /*->*/ "Out");
F
fengjiayi 已提交
89
    ctx->ShareLoD("X", /*->*/ "Out");
Q
qijun 已提交
90
  }
91

92
 protected:
93 94 95 96
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "X");
  }
Q
qijun 已提交
97 98
};

C
chengduo 已提交
99 100 101 102 103 104
class ActivationOpInferVarType
    : public framework::PassInDtypeAndVarTypeToOutput {
 protected:
  std::unordered_map<std::string, std::string> GetInputOutputWithSameType()
      const override {
    return std::unordered_map<std::string, std::string>{{"X", /*->*/ "Out"}};
105 106 107
  }
};

Q
qijun 已提交
108 109 110 111
class ActivationOpGrad : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

112
  void InferShape(framework::InferShapeContext* ctx) const override {
113 114
    ctx->ShareDim("Out", framework::GradVarName("X"));
    ctx->ShareLoD("Out", framework::GradVarName("X"));
Q
qijun 已提交
115
  }
116

117
 protected:
118 119 120 121
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return GetKernelType(ctx, *this, "Out");
  }
Q
qijun 已提交
122 123
};

D
dzhwinter 已提交
124
UNUSED constexpr char SigmoidDoc[] = R"DOC(
125
Sigmoid Activation Operator
K
Kexin Zhao 已提交
126

F
fengjiayi 已提交
127
$$out = \frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
128

D
dzhwinter 已提交
129
)DOC";
Q
qijun 已提交
130

D
dzhwinter 已提交
131
UNUSED constexpr char LogSigmoidDoc[] = R"DOC(
132
Logsigmoid Activation Operator
K
Kexin Zhao 已提交
133

134
$$out = \\log \\frac{1}{1 + e^{-x}}$$
K
Kexin Zhao 已提交
135

D
dzhwinter 已提交
136
)DOC";
137

D
dzhwinter 已提交
138
UNUSED constexpr char ExpDoc[] = R"DOC(
K
kexinzhao 已提交
139
Exp Activation Operator.
K
Kexin Zhao 已提交
140

F
fengjiayi 已提交
141
$out = e^x$
K
Kexin Zhao 已提交
142

D
dzhwinter 已提交
143
)DOC";
Q
qijun 已提交
144

D
dzhwinter 已提交
145
UNUSED constexpr char ReluDoc[] = R"DOC(
K
kexinzhao 已提交
146
Relu Activation Operator.
K
Kexin Zhao 已提交
147

F
fengjiayi 已提交
148
$out = \max(x, 0)$
K
Kexin Zhao 已提交
149

D
dzhwinter 已提交
150
)DOC";
K
Kexin Zhao 已提交
151

D
dzhwinter 已提交
152
UNUSED constexpr char TanhDoc[] = R"DOC(
K
kexinzhao 已提交
153
Tanh Activation Operator.
K
Kexin Zhao 已提交
154

Q
update  
qiaolongfei 已提交
155
$$out = \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
156

D
dzhwinter 已提交
157
)DOC";
158

D
dzhwinter 已提交
159
UNUSED constexpr char TanhShrinkDoc[] = R"DOC(
K
kexinzhao 已提交
160
TanhShrink Activation Operator.
K
Kexin Zhao 已提交
161

Y
Yan Chunwei 已提交
162
$$out = x - \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
K
Kexin Zhao 已提交
163

D
dzhwinter 已提交
164
)DOC";
K
Kexin Zhao 已提交
165

D
dzhwinter 已提交
166
UNUSED constexpr char SqrtDoc[] = R"DOC(
K
kexinzhao 已提交
167
Sqrt Activation Operator.
K
Kexin Zhao 已提交
168

F
fengjiayi 已提交
169
$out = \sqrt{x}$
K
Kexin Zhao 已提交
170

D
dzhwinter 已提交
171
)DOC";
172

D
dzhwinter 已提交
173
UNUSED constexpr char AbsDoc[] = R"DOC(
K
kexinzhao 已提交
174
Abs Activation Operator.
K
Kexin Zhao 已提交
175

F
fengjiayi 已提交
176
$out = |x|$
K
Kexin Zhao 已提交
177

D
dzhwinter 已提交
178
)DOC";
179

D
dzhwinter 已提交
180
UNUSED constexpr char CeilDoc[] = R"DOC(
D
dzhwinter 已提交
181 182
Ceil Activation Operator.

F
fengjiayi 已提交
183
$out = ceil(x)$
D
dzhwinter 已提交
184

D
dzhwinter 已提交
185
)DOC";
D
dzhwinter 已提交
186

D
dzhwinter 已提交
187
UNUSED constexpr char FloorDoc[] = R"DOC(
D
dzhwinter 已提交
188 189
Floor Activation Operator.

F
fengjiayi 已提交
190
$out = floor(x)$
D
dzhwinter 已提交
191

D
dzhwinter 已提交
192
)DOC";
D
dzhwinter 已提交
193

D
dzhwinter 已提交
194
UNUSED constexpr char CosDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
195
Cosine Activation Operator.
C
add cos  
chengduoZH 已提交
196 197 198

$out = cos(x)$

D
dzhwinter 已提交
199
)DOC";
C
add cos  
chengduoZH 已提交
200

D
dzhwinter 已提交
201
UNUSED constexpr char SinDoc[] = R"DOC(
C
add sin  
chengduoZH 已提交
202 203 204 205
Sine Activation Operator.

$out = sin(x)$

D
dzhwinter 已提交
206
)DOC";
C
add sin  
chengduoZH 已提交
207

D
dzhwinter 已提交
208
UNUSED constexpr char RoundDoc[] = R"DOC(
D
dzhwinter 已提交
209 210
Round Activation Operator.

F
fengjiayi 已提交
211
$out = [x]$
D
dzhwinter 已提交
212

D
dzhwinter 已提交
213
)DOC";
D
dzhwinter 已提交
214

D
dzhwinter 已提交
215
UNUSED constexpr char ReciprocalDoc[] = R"DOC(
K
kexinzhao 已提交
216
Reciprocal Activation Operator.
K
Kexin Zhao 已提交
217

218
$$out = \\frac{1}{x}$$
K
Kexin Zhao 已提交
219

D
dzhwinter 已提交
220
)DOC";
221

D
dzhwinter 已提交
222
UNUSED constexpr char LogDoc[] = R"DOC(
K
kexinzhao 已提交
223
Log Activation Operator.
K
Kexin Zhao 已提交
224

F
fengjiayi 已提交
225
$out = \ln(x)$
K
Kexin Zhao 已提交
226 227 228

Natural logarithm of x.

D
dzhwinter 已提交
229 230
)DOC";

D
dzhwinter 已提交
231
UNUSED constexpr char SquareDoc[] = R"DOC(
D
dzhwinter 已提交
232 233 234
Square Activation Operator.

$out = x^2$
235

D
dzhwinter 已提交
236 237
)DOC";

D
dzhwinter 已提交
238
UNUSED constexpr char SoftplusDoc[] = R"DOC(
D
dzhwinter 已提交
239 240 241 242 243 244
Softplus Activation Operator.

$out = \ln(1 + e^{x})$

)DOC";

D
dzhwinter 已提交
245
UNUSED constexpr char SoftsignDoc[] = R"DOC(
D
dzhwinter 已提交
246 247 248 249 250 251 252
Softsign Activation Operator.

$$out = \frac{x}{1 + |x|}$$

)DOC";

class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker {
253
 public:
Y
Yu Yang 已提交
254
  void Make() override {
D
dzhwinter 已提交
255 256 257
    AddInput("X", "Input of LeakyRelu operator");
    AddOutput("Out", "Output of LeakyRelu operator");
    AddAttr<float>("alpha", "The small negative slope").SetDefault(0.02f);
K
Kexin Zhao 已提交
258
    AddComment(R"DOC(
D
dzhwinter 已提交
259
LeakyRelu Activation Operator.
K
Kexin Zhao 已提交
260

D
dzhwinter 已提交
261
$out = \max(x, \alpha * x)$
K
Kexin Zhao 已提交
262 263

)DOC");
264 265 266
  }
};

D
dzhwinter 已提交
267
class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
K
kexinzhao 已提交
268
 public:
Y
Yu Yang 已提交
269
  void Make() override {
D
dzhwinter 已提交
270 271 272
    AddInput("X", "Input of Softshrink operator");
    AddOutput("Out", "Output of Softshrink operator");
    AddAttr<float>("lambda", "non-negative offset").SetDefault(0.5f);
K
Kexin Zhao 已提交
273
    AddComment(R"DOC(
274 275 276
:strong:`Softshrink Activation Operator`

..  math::
277
    out = \begin{cases}
278 279 280 281
         x - \lambda, \text{if } x > \lambda \\
         x + \lambda, \text{if } x < -\lambda \\
         0,  \text{otherwise}
         \end{cases}
K
Kexin Zhao 已提交
282 283

)DOC");
K
kexinzhao 已提交
284 285 286
  }
};

D
dzhwinter 已提交
287
class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
288
 public:
Y
Yu Yang 已提交
289
  void Make() override {
D
dzhwinter 已提交
290 291
    AddInput("X", "Input of HardShrink operator");
    AddOutput("Out", "Output of HardShrink operator");
Y
yuyang18 已提交
292 293
    AddAttr<float>("threshold",
                   "The value of threshold for HardShrink. [default: 0.5]")
D
dzhwinter 已提交
294
        .SetDefault(0.5f);
K
Kexin Zhao 已提交
295
    AddComment(R"DOC(
Y
yuyang18 已提交
296
:strong:`HardShrink activation operator`
K
Kexin Zhao 已提交
297

Y
yuyang18 已提交
298 299 300 301 302 303
..  math::
    out = \begin{cases}
            x, \text{if } x > \lambda \\
            x, \text{if } x < -\lambda \\
            0,  \text{otherwise}
          \end{cases}
K
Kexin Zhao 已提交
304 305

)DOC");
306 307 308
  }
};

309 310
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
311
  void Make() override {
312
    AddInput("X", "Input of BRelu operator");
F
fengjiayi 已提交
313
    AddOutput("Out", "Output of BRelu operator");
314 315 316 317
    AddAttr<float>("t_min", "The min marginal value of BRelu")
        .SetDefault(static_cast<float>(0));
    AddAttr<float>("t_max", "The max marginal value of BRelu")
        .SetDefault(static_cast<float>(24));
K
Kexin Zhao 已提交
318
    AddComment(R"DOC(
K
kexinzhao 已提交
319
BRelu Activation Operator.
K
Kexin Zhao 已提交
320

F
fengjiayi 已提交
321
$out = \max(\min(x, t_{min}), t_{max})$
K
Kexin Zhao 已提交
322 323

)DOC");
324 325 326 327 328
  }
};

class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
329
  void Make() override {
330
    AddInput("X", "Input of SoftRelu operator");
F
fengjiayi 已提交
331
    AddOutput("Out", "Output of SoftRelu operator");
332 333
    AddAttr<float>("threshold", "The threshold value of SoftRelu")
        .SetDefault(40.0f);
K
Kexin Zhao 已提交
334
    AddComment(R"DOC(
K
kexinzhao 已提交
335
SoftRelu Activation Operator.
K
Kexin Zhao 已提交
336

F
fengjiayi 已提交
337
$out = \ln(1 + \exp(\max(\min(x, threshold), threshold))$
K
Kexin Zhao 已提交
338 339

)DOC");
340 341 342
  }
};

343 344
class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
345
  void Make() override {
K
Kexin Zhao 已提交
346
    AddInput("X", "Input of ELU operator");
F
fengjiayi 已提交
347
    AddOutput("Out", "Output of ELU operator");
348
    AddAttr<float>("alpha", "The alpha value of ELU").SetDefault(1.0f);
349
    AddComment(R"DOC(
K
kexinzhao 已提交
350
ELU Activation Operator.
K
Kexin Zhao 已提交
351 352 353 354

Applies the following element-wise computation on the input according to
https://arxiv.org/abs/1511.07289.

F
fengjiayi 已提交
355
$out = \max(0, x) + \min(0, \alpha * (e^x - 1))$
K
Kexin Zhao 已提交
356 357

)DOC");
358 359 360
  }
};

361 362
class Relu6OpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
363
  void Make() override {
364
    AddInput("X", "Input of Relu6 operator");
F
fengjiayi 已提交
365
    AddOutput("Out", "Output of Relu6 operator");
366 367
    AddAttr<float>("threshold", "The threshold value of Relu6")
        .SetDefault(6.0f);
K
Kexin Zhao 已提交
368
    AddComment(R"DOC(
K
kexinzhao 已提交
369
Relu6 Activation Operator.
K
Kexin Zhao 已提交
370

F
fengjiayi 已提交
371
$out = \min(\max(0, x), 6)$
K
Kexin Zhao 已提交
372 373

)DOC");
374 375 376
  }
};

377 378
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
379
  void Make() override {
380
    AddInput("X", "Input of Pow operator");
F
fengjiayi 已提交
381
    AddOutput("Out", "Output of Pow operator");
382
    AddAttr<float>("factor", "The exponential factor of Pow").SetDefault(1.0f);
K
Kexin Zhao 已提交
383
    AddComment(R"DOC(
K
kexinzhao 已提交
384
Pow Activation Operator.
K
Kexin Zhao 已提交
385

F
fengjiayi 已提交
386
$out = x^{factor}$
K
Kexin Zhao 已提交
387 388

)DOC");
389 390 391 392 393
  }
};

class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
394
  void Make() override {
395
    AddInput("X", "Input of STanh operator");
F
fengjiayi 已提交
396
    AddOutput("Out", "Output of STanh operator");
397 398 399 400
    AddAttr<float>("scale_a", "The scale parameter of a for the input")
        .SetDefault(2.0f / 3.0f);
    AddAttr<float>("scale_b", "The scale parameter of b for the input")
        .SetDefault(1.7159f);
K
Kexin Zhao 已提交
401
    AddComment(R"DOC(
K
kexinzhao 已提交
402
STanh Activation Operator.
K
Kexin Zhao 已提交
403

Y
Yan Chunwei 已提交
404
$$out = b * \\frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
K
Kexin Zhao 已提交
405 406

)DOC");
Q
qijun 已提交
407 408 409
  }
};

410 411
class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
412
  void Make() override {
413
    AddInput("X", "Input of ThresholdedRelu operator");
F
fengjiayi 已提交
414
    AddOutput("Out", "Output of ThresholdedRelu operator");
Y
yuyang18 已提交
415 416
    AddAttr<float>("threshold",
                   "The threshold location of activation. [default 1.0].")
417
        .SetDefault(1.0f);
K
Kexin Zhao 已提交
418
    AddComment(R"DOC(
Y
yuyang18 已提交
419
:strong:`ThresholdedRelu activation operator`
K
Kexin Zhao 已提交
420

Y
yuyang18 已提交
421
..  math::
K
Kexin Zhao 已提交
422

Y
yuyang18 已提交
423
    out = \begin{cases}
Y
yuyang18 已提交
424
             x,  \text{if } x > threshold \\
Y
yuyang18 已提交
425 426
             0,  \text{otherwise}
          \end{cases}
K
Kexin Zhao 已提交
427
)DOC");
428 429 430
  }
};

431 432
class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
433
  void Make() override {
434
    AddInput("X", "Input of HardSigmoid operator");
F
fengjiayi 已提交
435
    AddOutput("Out", "Output of HardSigmoid operator");
436 437 438 439
    AddAttr<float>("slope", "Slope for linear approximation of sigmoid")
        .SetDefault(0.2f);
    AddAttr<float>("offset", "Offset for linear approximation of sigmoid")
        .SetDefault(0.5f);
440
    AddComment(R"DOC(
K
kexinzhao 已提交
441
HardSigmoid Activation Operator.
442

443
Segment-wise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391),
K
Kexin Zhao 已提交
444
which is much faster than sigmoid.
445

F
fengjiayi 已提交
446
$out = \max(0, \min(1, slope * x + shift))$
447 448

The slope should be positive. The offset can be either positive or negative.
K
Kexin Zhao 已提交
449
The default slope and shift are set according to the above reference.
450 451
It is recommended to use the defaults for this activation.

K
Kexin Zhao 已提交
452
)DOC");
453 454 455
  }
};

A
Abhinav Arora 已提交
456 457
class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
458
  void Make() override {
A
Abhinav Arora 已提交
459
    AddInput("X", "Input of Swish operator");
F
fengjiayi 已提交
460
    AddOutput("Out", "Output of Swish operator");
A
Abhinav Arora 已提交
461 462 463 464
    AddAttr<float>("beta", "Constant beta of swish operator").SetDefault(1.0f);
    AddComment(R"DOC(
Swish Activation Operator.

F
fengjiayi 已提交
465
$$out = \\frac{x}{1 + e^{- \beta x}}$$
A
Abhinav Arora 已提交
466 467 468 469 470

)DOC");
  }
};

D
dzhwinter 已提交
471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489
REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(Exp, ExpDoc);
REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc);
REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc);
REGISTER_ACTIVATION_OP_MAKER(TanhShrink, TanhShrinkDoc);
REGISTER_ACTIVATION_OP_MAKER(Sqrt, SqrtDoc);
REGISTER_ACTIVATION_OP_MAKER(Abs, AbsDoc);
REGISTER_ACTIVATION_OP_MAKER(Ceil, CeilDoc);
REGISTER_ACTIVATION_OP_MAKER(Floor, FloorDoc);
REGISTER_ACTIVATION_OP_MAKER(Cos, CosDoc);
REGISTER_ACTIVATION_OP_MAKER(Sin, SinDoc);
REGISTER_ACTIVATION_OP_MAKER(Round, RoundDoc);
REGISTER_ACTIVATION_OP_MAKER(Reciprocal, ReciprocalDoc);
REGISTER_ACTIVATION_OP_MAKER(Log, LogDoc);
REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
REGISTER_ACTIVATION_OP_MAKER(Softplus, SoftplusDoc);
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);

D
dzhwinter 已提交
490 491
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sigmoid, sigmoid);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu, relu);
D
dzhwinter 已提交
492
REGISTER_ACTIVATION_OP_GRAD_MAKER(Exp, exp);
D
dzhwinter 已提交
493 494 495
REGISTER_ACTIVATION_OP_GRAD_MAKER(Tanh, tanh);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Ceil, ceil);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Floor, floor);
D
dzhwinter 已提交
496
REGISTER_ACTIVATION_OP_GRAD_MAKER(Sqrt, sqrt);
D
dzhwinter 已提交
497
REGISTER_ACTIVATION_OP_GRAD_MAKER(SoftRelu, soft_relu);
D
dzhwinter 已提交
498 499
REGISTER_ACTIVATION_OP_GRAD_MAKER(Relu6, relu6);
REGISTER_ACTIVATION_OP_GRAD_MAKER(Reciprocal, reciprocal);
D
dzhwinter 已提交
500
REGISTER_ACTIVATION_OP_GRAD_MAKER(HardSigmoid, hard_sigmoid);
Q
qijun 已提交
501 502 503 504
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
505

D
dzhwinter 已提交
506
#define FOR_EACH_INPLACE_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
507
  __macro(Sigmoid, sigmoid);                 \
508
  __macro(Relu, relu);                       \
D
dzhwinter 已提交
509
  __macro(Exp, exp);                         \
510
  __macro(Tanh, tanh);                       \
D
dzhwinter 已提交
511 512
  __macro(Ceil, ceil);                       \
  __macro(Floor, floor);                     \
513
  __macro(Sqrt, sqrt);                       \
D
dzhwinter 已提交
514 515 516 517
  __macro(SoftRelu, soft_relu);              \
  __macro(Relu6, relu6);                     \
  __macro(Reciprocal, reciprocal);           \
  __macro(HardSigmoid, hard_sigmoid);
D
dzhwinter 已提交
518 519

#define FOR_EACH_OP_FUNCTOR(__macro) \
D
dzhwinter 已提交
520 521
  __macro(LogSigmoid, logsigmoid);   \
  __macro(SoftShrink, softshrink);   \
522
  __macro(Abs, abs);                 \
D
dzhwinter 已提交
523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542
  __macro(Cos, cos);                 \
  __macro(Sin, sin);                 \
  __macro(Round, round);             \
  __macro(Log, log);                 \
  __macro(Square, square);           \
  __macro(BRelu, brelu);             \
  __macro(Pow, pow);                 \
  __macro(STanh, stanh);             \
  __macro(Softplus, softplus);       \
  __macro(Softsign, softsign);       \
  __macro(LeakyRelu, leaky_relu);    \
  __macro(TanhShrink, tanh_shrink);  \
  __macro(ELU, elu);                 \
  __macro(HardShrink, hard_shrink);  \
  __macro(Swish, swish);             \
  __macro(ThresholdedRelu, thresholded_relu);

#define REGISTER_INPLACE_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)        \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp, \
                    ::paddle::operators::OP_NAME##OpMaker,          \
543
                    ::paddle::operators::ActivationOpInferVarType,  \
D
dzhwinter 已提交
544 545 546
                    ::paddle::operators::OP_NAME##GradMaker);       \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)

D
dzhwinter 已提交
547 548 549
#define REGISTER_ACTIVATION_OP(OP_NAME, KERNEL_TYPE)                    \
  REGISTER_OPERATOR(KERNEL_TYPE, ::paddle::operators::ActivationOp,     \
                    ::paddle::operators::OP_NAME##OpMaker,              \
550
                    ::paddle::operators::ActivationOpInferVarType,      \
D
dzhwinter 已提交
551 552
                    ::paddle::framework::DefaultGradOpDescMaker<true>); \
  REGISTER_OPERATOR(KERNEL_TYPE##_grad, ::paddle::operators::ActivationOpGrad)
A
Abhinav Arora 已提交
553

Q
QI JUN 已提交
554 555 556 557 558 559 560 561 562 563 564
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor)   \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type, ops::ActivationKernel<paddle::platform::CPUDeviceContext, \
                                      ops::functor<float>>,               \
      ops::ActivationKernel<paddle::platform::CPUDeviceContext,           \
                            ops::functor<double>>);                       \
  REGISTER_OP_CPU_KERNEL(                                                 \
      act_type##_grad,                                                    \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
                                ops::grad_functor<float>>,                \
      ops::ActivationGradKernel<paddle::platform::CPUDeviceContext,       \
Y
Yu Yang 已提交
565
                                ops::grad_functor<double>>);
566

D
dzhwinter 已提交
567
FOR_EACH_OP_FUNCTOR(REGISTER_ACTIVATION_OP);
D
dzhwinter 已提交
568
FOR_EACH_INPLACE_OP_FUNCTOR(REGISTER_INPLACE_ACTIVATION_OP);
569
FOR_EACH_KERNEL_FUNCTOR(REGISTER_ACTIVATION_CPU_KERNEL);