activation_op.h 36.2 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
L
Luo Tao 已提交
2 3 4 5 6 7 8 9 10
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
    http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Q
qijun 已提交
11 12

#pragma once
D
dzhwinter 已提交
13
#include <glog/logging.h>
Y
Yihua Xu 已提交
14
#include <algorithm>
D
dzhwinter 已提交
15 16
#include <string>
#include <unordered_set>
17 18
#include <utility>
#include <vector>
19

C
Clementine 已提交
20 21 22 23 24
#include <cmath>
#ifndef _USE_MATH_DEFINES
#define _USE_MATH_DEFINES
#endif

Y
Yi Wang 已提交
25 26 27
#include "paddle/fluid/framework/eigen.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/operators/detail/safe_ref.h"
Y
Yihua Xu 已提交
28
#include "paddle/fluid/operators/math/blas.h"
29
#include "paddle/fluid/platform/float16.h"
Q
qijun 已提交
30

31 32 33 34
#ifdef PADDLE_WITH_MKLDNN
#include "paddle/fluid/platform/mkldnn_helper.h"
#endif

Q
qijun 已提交
35 36 37
namespace paddle {
namespace operators {

D
dzhwinter 已提交
38 39 40 41 42 43 44 45
/* Use ugly global variable, for the using in python layer side
   Please refer to the layer_helper.py and get the details.
 */
static std::unordered_set<std::string> InplaceOpSet = {
    "sigmoid", "exp",        "relu",  "tanh",      "sqrt",         "ceil",
    "floor",   "reciprocal", "relu6", "soft_relu", "hard_sigmoid",
};

C
chengduo 已提交
46 47 48 49 50 51
/* The following operator can be used to process SelectedRows, because the
 * output of those operator for zero is zero too.
 */
static std::unordered_set<std::string> CanBeUsedBySelectedRows = {
    "abs", "abs_grad", "square", "square_grad", "sqrt", "sqrt_grad"};

D
dzhwinter 已提交
52 53
static bool IsInplace(std::string op) { return InplaceOpSet.count(op); }

Q
QI JUN 已提交
54
template <typename DeviceContext, typename Functor>
55 56
class ActivationKernel
    : public framework::OpKernel<typename Functor::ELEMENT_TYPE> {
Q
qijun 已提交
57
 public:
58 59
  using T = typename Functor::ELEMENT_TYPE;

Q
qijun 已提交
60
  void Compute(const framework::ExecutionContext& context) const override {
C
chengduo 已提交
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90
    auto x_var = context.InputVar("X");
    auto out_var = context.OutputVar("Out");
    PADDLE_ENFORCE(x_var != nullptr,
                   "Cannot get input Variable X, variable name = %s",
                   context.op().Input("X"));
    PADDLE_ENFORCE(out_var != nullptr,
                   "Cannot get output Variable Out, variable name = %s",
                   context.op().Output("Out"));

    framework::Tensor X, *Out;

    if (CanBeUsedBySelectedRows.count(context.op().Type())) {
      X = detail::Ref(
          paddle::framework::GetLoDTensorOrSelectedRowsValueFromVar(*x_var),
          "Cannot get input Tensor X, variable name = %s",
          context.op().Input("X"));
      Out = paddle::framework::GetMutableLoDTensorOrSelectedRowsValueFromVar(
          out_var);
    } else {
      X = detail::Ref(context.Input<framework::Tensor>("X"),
                      "Cannot get input Tensor X, variable name = %s",
                      context.op().Input("X"));
      Out = context.Output<framework::Tensor>("Out");
    }

    PADDLE_ENFORCE(Out != nullptr,
                   "Cannot get output tensor Out, variable name = %s",
                   context.op().Output("Out"));

    Out->mutable_data<T>(context.GetPlace());
Y
Update  
Yang Yu 已提交
91
    auto x = framework::EigenVector<T>::Flatten(X);
C
chengduo 已提交
92
    auto out = framework::EigenVector<T>::Flatten(*Out);
Q
QI JUN 已提交
93 94
    auto* place =
        context.template device_context<DeviceContext>().eigen_device();
Q
qijun 已提交
95
    Functor functor;
96 97 98 99 100

    auto attrs = functor.GetAttrs();
    for (auto& attr : attrs) {
      *attr.second = context.Attr<float>(attr.first);
    }
F
fengjiayi 已提交
101
    functor(*place, x, out);
Q
qijun 已提交
102 103 104
  }
};

Q
QI JUN 已提交
105
template <typename DeviceContext, typename Functor>
106 107
class ActivationGradKernel
    : public framework::OpKernel<typename Functor::ELEMENT_TYPE> {
Q
qijun 已提交
108
 public:
109
  using T = typename Functor::ELEMENT_TYPE;
Q
qijun 已提交
110
  void Compute(const framework::ExecutionContext& context) const override {
C
chengduo 已提交
111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154
    auto out_var = context.InputVar("Out");
    auto out_grad_var = context.InputVar(framework::GradVarName("Out"));
    auto x_grad_var = context.OutputVar(framework::GradVarName("X"));
    PADDLE_ENFORCE(out_var != nullptr,
                   "Cannot get input Variable Out, variable name = %s",
                   context.op().Input("Out"));
    PADDLE_ENFORCE(out_grad_var != nullptr,
                   "Cannot get input Variable %s, variable name = %s",
                   framework::GradVarName("Out"),
                   context.op().Input(framework::GradVarName("Out")));
    PADDLE_ENFORCE(x_grad_var != nullptr,
                   "Cannot get output Variable %s, variable name = %s",
                   framework::GradVarName("X"),
                   context.op().Output(framework::GradVarName("X")));

    framework::Tensor Out, dOut, *dX;
    if (CanBeUsedBySelectedRows.count(context.op().Type())) {
      Out = detail::Ref(
          paddle::framework::GetLoDTensorOrSelectedRowsValueFromVar(*out_var),
          "Cannot get input Tensor Out, variable name = %s",
          context.op().Input("Out"));
      dOut =
          detail::Ref(paddle::framework::GetLoDTensorOrSelectedRowsValueFromVar(
                          *out_grad_var),
                      "Cannot get input Tensor %s, variable name = %s",
                      framework::GradVarName("Out"),
                      context.op().Input(framework::GradVarName("Out")));
      dX = paddle::framework::GetMutableLoDTensorOrSelectedRowsValueFromVar(
          x_grad_var);
    } else {
      Out = detail::Ref(context.Input<framework::Tensor>("Out"),
                        "Cannot get input Tensor Out, variable name = %s",
                        context.op().Input("Out"));
      dOut = detail::Ref(
          context.Input<framework::Tensor>(framework::GradVarName("Out")),
          "Cannot get input Tensor %s, variable name = %s",
          framework::GradVarName("Out"),
          context.op().Input(framework::GradVarName("Out")));
      dX = context.Output<framework::Tensor>(framework::GradVarName("X"));
    }
    PADDLE_ENFORCE(dX != nullptr,
                   "Cannot get output tensor %s, variable name = %s",
                   framework::GradVarName("X"),
                   context.op().Output(framework::GradVarName("X")));
Q
qijun 已提交
155 156
    dX->mutable_data<T>(context.GetPlace());

C
chengduo 已提交
157 158
    auto dout = framework::EigenVector<T>::Flatten(dOut);
    auto out = framework::EigenVector<T>::Flatten(Out);
Q
qijun 已提交
159
    auto dx = framework::EigenVector<T>::Flatten(*dX);
Q
QI JUN 已提交
160 161
    auto* place =
        context.template device_context<DeviceContext>().eigen_device();
Q
qijun 已提交
162
    Functor functor;
163 164 165 166
    auto attrs = functor.GetAttrs();
    for (auto& attr : attrs) {
      *attr.second = context.Attr<float>(attr.first);
    }
D
dzhwinter 已提交
167 168
    bool inplace = functor.Inplace();
    if (!inplace) {
C
chengduo 已提交
169 170 171 172 173 174 175 176 177 178 179 180 181
      auto x_var = context.InputVar("X");
      PADDLE_ENFORCE(x_var != nullptr,
                     "Cannot get input tensor X, variable name = %s",
                     context.op().Input("X"));
      framework::Tensor X;
      if (CanBeUsedBySelectedRows.count(context.op().Type())) {
        X = detail::Ref(
            paddle::framework::GetLoDTensorOrSelectedRowsValueFromVar(*x_var));
      } else {
        X = detail::Ref(context.Input<framework::Tensor>("X"));
      }

      auto x = framework::EigenVector<T>::Flatten(X);
D
dzhwinter 已提交
182 183
      functor(*place, x, out, dout, dx);
    } else {
M
minqiyang 已提交
184
      VLOG(10) << " Inplace activation ";
D
dzhwinter 已提交
185 186 187
      auto x = framework::EigenVector<T>::Flatten(*dX);
      functor(*place, x, out, dout, dx);
    }
Q
qijun 已提交
188 189 190
  }
};

191 192 193 194 195 196 197
template <typename T>
struct BaseActivationFunctor {
  using ELEMENT_TYPE = T;

  using AttrPair = std::vector<std::pair<const char*, float*>>;

  AttrPair GetAttrs() { return AttrPair(); }
D
dzhwinter 已提交
198 199 200 201 202 203 204 205

  /* NOTE(*): Output reuse X memory if X is not dependented by its Gradient.
     For example, sigmoid op's gradient didn't involve x, so its output can
     reuse
     input memory. But abs op's gradient use x, it can not be inplaced.
     gradient did use x.
   */
  bool Inplace() const { return false; }
206 207
};

208
// sigmoid(x) = 1 / (1 + exp(-x))
Q
qijun 已提交
209
template <typename T>
210
struct SigmoidFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
211 212 213
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = static_cast<T>(1) / (static_cast<T>(1) + (-x).exp());
Q
qijun 已提交
214 215 216
  }
};

217
template <typename T>
218
struct SigmoidGradFunctor : public BaseActivationFunctor<T> {
D
dzhwinter 已提交
219
  bool Inplace() const { return IsInplace("sigmoid"); }
F
fengjiayi 已提交
220 221 222 223
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * out * (static_cast<T>(1) - out);
Q
qijun 已提交
224 225 226
  }
};

227 228 229 230
// Originally: logsigmoid(x) = -log (1 + exp(-x))
// For numerical stability, we can use the log-sum-exp trick:
// https://hips.seas.harvard.edu/blog/2013/01/09/computing-log-sum-exp/
// We can rewrite the above equation as:
F
fengjiayi 已提交
231
// out = -log( exp(0) + exp(-x)) [since exp(0) = 1]
232 233 234 235 236 237 238 239 240 241
//   = -log( exp(max(-x, 0) - max(-x, 0)) + exp(-x + max(-x, 0) - max(-x, 0)))
//   = -log( exp(max(-x, 0)) * exp(-max(-x, 0)) - exp(max(-x, 0)) * exp(-x -
//           max(-x, 0)))
//   = -log( exp(max(-x, 0)) * (exp(-max(-x, 0)) + exp(-x - max(-x, 0))))
//   = -log( exp(max(-x, 0)) - log(exp(-max(-x, 0)) + exp(-x - max(-x, 0)))
//
// Hence, logsigmoid(x) = - (max(-x, 0) + log(exp(-max(-x, 0))
// + exp(-x - max(-x, 0))))
template <typename T>
struct LogSigmoidFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
242 243
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
244
    auto temp = (-x).cwiseMax(static_cast<T>(0));  // temp = max(-x, 0)
F
fengjiayi 已提交
245
    out.device(d) = -temp - (((-temp).exp() + (-x - temp).exp()).log());
246 247 248 249 250 251 252 253
  }
};

// Originally: f' = exp(-x) / (1 + exp(-x))
// For numerical stability: f' = exp(-x - max(-x, 0)) / (exp(-max(-x, 0)) +
// exp(-x - max(-x, 0)))
template <typename T>
struct LogSigmoidGradFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
254 255 256
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
257 258
    auto temp = (-x).cwiseMax(static_cast<T>(0));  // temp = max(-x, 0)
    dx.device(d) =
F
fengjiayi 已提交
259
        dout * ((-x - temp).exp() / ((-temp).exp() + (-x - temp).exp()));
260 261 262
  }
};

Q
qijun 已提交
263
// exp(x) = e^x
264 265
template <typename T>
struct ExpFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
266 267 268
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.exp();
Q
qijun 已提交
269 270 271
  }
};

272 273
template <typename T>
struct ExpGradFunctor : public BaseActivationFunctor<T> {
D
dzhwinter 已提交
274
  bool Inplace() const { return IsInplace("exp"); }
F
fengjiayi 已提交
275 276 277 278
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * out;
Q
qijun 已提交
279 280 281
  }
};

Q
qijun 已提交
282
// relu(x) = max(x, 0)
Q
qijun 已提交
283
template <typename T>
284
struct ReluFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
285 286 287
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.cwiseMax(static_cast<T>(0));
Q
qijun 已提交
288 289
  }
};
Q
qijun 已提交
290

Q
qijun 已提交
291
template <typename T>
292
struct ReluGradFunctor : public BaseActivationFunctor<T> {
D
dzhwinter 已提交
293
  bool Inplace() const { return IsInplace("relu"); }
F
fengjiayi 已提交
294 295 296
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
D
dzhwinter 已提交
297
    dx.device(d) = dout * (out > static_cast<T>(0)).template cast<T>();
Q
qijun 已提交
298 299
  }
};
Q
qijun 已提交
300

C
Clementine 已提交
301 302 303 304 305
// gelu(x) = 0.5 * x *  (1 + erf(x / sqrt(2)))
template <typename T>
struct GeluFunctor : public BaseActivationFunctor<T> {
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
Y
Yihua Xu 已提交
306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324
// Because the execute or device context can not be deliver here, it keep the
// marco for NVCC.
#if defined(PADDLE_WITH_MKLML) && !defined(_WIN32) && !defined(__APPLE__) && \
    !defined(__OSX__) && !defined(PADDLE_WITH_CUDA)
    auto x_data = x.data();
    auto out_data = out.data();
    int n = std::min(x.size(), out.size());

    std::memset(out_data, 0, n * sizeof(T));
    math::CBlas<T>::AXPY(n, static_cast<T>(M_SQRT1_2), x_data, 1, out_data, 1);
    math::CBlas<T>::VMERF(n, out_data, out_data, VML_LA);
    for (int i = 0; i < n; i++) {
      out_data[i] += static_cast<T>(1);
    }
    math::CBlas<T>::VMUL(n, x_data, out_data, out_data);
    for (int i = 0; i < n; i++) {
      out_data[i] *= static_cast<T>(0.5);
    }
#else
325
    auto temp = (x * static_cast<T>(M_SQRT1_2)).erf();
C
Clementine 已提交
326
    out.device(d) = x * static_cast<T>(0.5) * (static_cast<T>(1) + temp);
Y
Yihua Xu 已提交
327
#endif
C
Clementine 已提交
328 329 330 331 332 333 334 335
  }
};

template <typename T>
struct GeluGradFunctor : BaseActivationFunctor<T> {
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
336 337 338 339 340 341
    auto first = static_cast<T>(0.5) *
                 (static_cast<T>(1) + ((x * static_cast<T>(M_SQRT1_2)).erf()));

    auto second = static_cast<T>(0.5 * M_2_SQRTPI * M_SQRT1_2) * x *
                  (-static_cast<T>(0.5) * x.square()).exp();
    dx.device(d) = dout * (first + second);
C
Clementine 已提交
342 343 344
  }
};

345
// tanh(x) = (exp(x) - exp(-x)) / (exp(x) + exp(-x))
346 347
template <typename T>
struct TanhFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
348 349 350
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.tanh();
Q
qijun 已提交
351 352 353 354
  }
};

template <typename T>
355
struct TanhGradFunctor : public BaseActivationFunctor<T> {
D
dzhwinter 已提交
356
  bool Inplace() const { return IsInplace("tanh"); }
F
fengjiayi 已提交
357 358 359 360
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * (static_cast<T>(1) - out * out);
Q
qijun 已提交
361 362 363
  }
};

K
Kavya Srinet 已提交
364 365 366 367
// tanhshrink(x) = x - tanh(x)
// where tanh(x) = (exp(x) - exp(-x)) / (exp(x) + exp(-x))
template <typename T>
struct TanhShrinkFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
368 369 370
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x - x.tanh();
K
Kavya Srinet 已提交
371 372 373 374 375
  }
};

template <typename T>
struct TanhShrinkGradFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
376 377 378 379
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * (x.tanh() * x.tanh());
K
Kavya Srinet 已提交
380 381 382
  }
};

383 384 385 386 387 388 389 390 391
// tanhshrink(x) = x - tanh(x)
// where tanh(x) = (exp(x) - exp(-x)) / (exp(x) + exp(-x))
template <typename T>
struct HardShrinkFunctor : public BaseActivationFunctor<T> {
  float threshold;

  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"threshold", &threshold}};
  }
F
fengjiayi 已提交
392 393
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
Y
Yu Yang 已提交
394 395
    auto temp1 = (x < static_cast<T>(threshold * -1)).template cast<T>().eval();
    auto temp2 = (x > static_cast<T>(threshold)).template cast<T>().eval();
F
fengjiayi 已提交
396
    out.device(d) = x * (temp1 + temp2);
397 398 399 400 401 402 403 404 405 406 407
  }
};

template <typename T>
struct HardShrinkGradFunctor : public BaseActivationFunctor<T> {
  float threshold;

  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"threshold", &threshold}};
  }

F
fengjiayi 已提交
408 409 410
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
Y
Yu Yang 已提交
411 412
    auto temp1 = (x < static_cast<T>(threshold * -1)).template cast<T>().eval();
    auto temp2 = (x > static_cast<T>(threshold)).template cast<T>().eval();
F
fengjiayi 已提交
413
    dx.device(d) = dout * (temp1 + temp2).template cast<T>();
414 415 416
  }
};

K
Kexin Zhao 已提交
417
// softshrink(x) = x - lambda, if x > lambda; x + lambda, if x < -lambda; 0
418 419 420 421 422 423 424 425
// otherwise
template <typename T>
struct SoftShrinkFunctor : public BaseActivationFunctor<T> {
  float lambda;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"lambda", &lambda}};
  }

F
fengjiayi 已提交
426 427
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
Y
Yu Yang 已提交
428 429 430
    auto lambdaT = static_cast<T>(lambda);
    auto temp1 = (x > lambdaT).template cast<T>().eval();
    auto temp2 = (x < -lambdaT).template cast<T>().eval();
F
fengjiayi 已提交
431
    out.device(d) = temp1 * (x - lambdaT) + temp2 * (x + lambdaT);
432 433 434 435 436 437 438 439 440
  }
};

template <typename T>
struct SoftShrinkGradFunctor : public BaseActivationFunctor<T> {
  float lambda;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"lambda", &lambda}};
  }
F
fengjiayi 已提交
441 442 443
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
Y
Yu Yang 已提交
444 445 446
    auto lambdaT = static_cast<T>(lambda);
    auto temp1 = (x > lambdaT).template cast<T>().eval();
    auto temp2 = (x < -lambdaT).template cast<T>().eval();
F
fengjiayi 已提交
447
    dx.device(d) = dout * (temp1 + temp2).template cast<T>();
448 449 450
  }
};

Q
qijun 已提交
451
// sqrt(x) = x^(1/2)
452 453
template <typename T>
struct SqrtFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
454 455 456
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.sqrt();
Q
qijun 已提交
457 458 459 460
  }
};

template <typename T>
461
struct SqrtGradFunctor : public BaseActivationFunctor<T> {
D
dzhwinter 已提交
462
  bool Inplace() const { return IsInplace("sqrt"); }
F
fengjiayi 已提交
463 464 465
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
C
chengduo 已提交
466
    dx.device(d) = static_cast<T>(0.5) * dout / out;
Q
qijun 已提交
467 468 469
  }
};

D
dzhwinter 已提交
470 471 472
// ceil(x) = ceiling(x)
template <typename T>
struct CeilFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
473 474 475
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.ceil();
D
dzhwinter 已提交
476 477 478 479 480
  }
};

template <typename T>
struct ZeroGradFunctor : public BaseActivationFunctor<T> {
D
dzhwinter 已提交
481
  bool Inplace() const { return IsInplace("ceil"); }
F
fengjiayi 已提交
482 483 484
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
D
dzhwinter 已提交
485
    dx.device(d) = static_cast<T>(0) / out;
D
dzhwinter 已提交
486 487 488 489 490 491
  }
};

// floor(x) = flooring(x)
template <typename T>
struct FloorFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
492 493
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
Q
Qiao Longfei 已提交
494
    out.device(d) = x.floor();
D
dzhwinter 已提交
495 496 497
  }
};

C
add cos  
chengduoZH 已提交
498 499 500 501 502
template <typename T>
struct Sine {
  HOSTDEVICE T operator()(const T& val) const { return sin(val); }
};

503 504 505 506 507 508 509
template <>
struct Sine<platform::float16> {
  HOSTDEVICE platform::float16 operator()(const platform::float16& val) const {
    return platform::float16(sin(static_cast<float>(val)));
  }
};

C
add cos  
chengduoZH 已提交
510 511 512 513 514
template <typename T>
struct Cosine {
  HOSTDEVICE T operator()(const T& val) const { return cos(val); }
};

515 516 517 518 519 520 521
template <>
struct Cosine<platform::float16> {
  HOSTDEVICE platform::float16 operator()(const platform::float16& val) const {
    return platform::float16(cos(static_cast<float>(val)));
  }
};

C
add cos  
chengduoZH 已提交
522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559
// cosine'(x) = -sin(x)
template <typename T>
struct CosGradFunctor : public BaseActivationFunctor<T> {
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = -dout * x.unaryExpr(Sine<T>());
  }
};

// cosine(x) = cos(x)
template <typename T>
struct CosFunctor : public BaseActivationFunctor<T> {
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.unaryExpr(Cosine<T>());
  }
};

// sine'(x) = cos(x)
template <typename T>
struct SinGradFunctor : public BaseActivationFunctor<T> {
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * x.unaryExpr(Cosine<T>());
  }
};

// sine(x) = sin(x)
template <typename T>
struct SinFunctor : public BaseActivationFunctor<T> {
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.unaryExpr(Sine<T>());
  }
};

D
dzhwinter 已提交
560 561 562
// round(x) = [x]
template <typename T>
struct RoundFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
563 564 565
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.round();
D
dzhwinter 已提交
566 567 568
  }
};

Q
qijun 已提交
569
// abs(x) = |x|
570 571
template <typename T>
struct AbsFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
572 573 574
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.abs();
Q
qijun 已提交
575 576 577
  }
};

578 579
template <typename T>
struct AbsGradFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
580 581 582 583
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * x.sign();
584 585 586
  }
};

Q
qijun 已提交
587 588
// reciprocal(x) = 1 / x
template <typename T>
589
struct ReciprocalFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
590 591 592
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = static_cast<T>(1) / x;
Q
qijun 已提交
593 594 595
  }
};

596
template <typename T>
597
struct ReciprocalGradFunctor : public BaseActivationFunctor<T> {
D
dzhwinter 已提交
598
  bool Inplace() const { return IsInplace("reciprocal"); }
F
fengjiayi 已提交
599 600 601 602
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * static_cast<T>(-1) * out * out;
Q
qijun 已提交
603 604 605 606
  }
};

// log(x) = natural logarithm of x
607 608
template <typename T>
struct LogFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
609 610 611
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.log();
Q
qijun 已提交
612 613 614
  }
};

615
template <typename T>
616
struct LogGradFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
617 618 619 620
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * (static_cast<T>(1) / x);
Q
qijun 已提交
621 622 623 624
  }
};

// square(x) = x^2
625 626
template <typename T>
struct SquareFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
627 628 629
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.square();
Q
qijun 已提交
630
  }
631
};
Q
qijun 已提交
632

633
template <typename T>
634
struct SquareGradFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
635 636 637 638
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * static_cast<T>(2) * x;
639 640 641
  }
};

642 643 644 645 646 647 648 649 650 651
template <typename T>
struct BReluFunctor : public BaseActivationFunctor<T> {
  float t_min;
  float t_max;

  // NOTE: Explicit hides the `BaseActivationFunctor<T>::GetAttrs`
  // not polymorphism for speed.
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"t_min", &t_min}, {"t_max", &t_max}};
  }
652

F
fengjiayi 已提交
653 654 655
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) =
Y
Yu Yang 已提交
656
        x.cwiseMax(static_cast<T>(t_min)).cwiseMin(static_cast<T>(t_max));
657 658 659
  }
};

660 661 662 663 664 665 666
template <typename T>
struct BReluGradFunctor : public BaseActivationFunctor<T> {
  float t_min;
  float t_max;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"t_min", &t_min}, {"t_max", &t_max}};
  }
F
fengjiayi 已提交
667 668 669 670
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout *
Y
Yu Yang 已提交
671 672
                   ((x > static_cast<T>(t_min)) * (x < static_cast<T>(t_max)))
                       .template cast<T>();
673 674 675
  }
};

676 677 678 679 680 681 682 683 684
// relu6(x) = min(max(0, x), 6)
template <typename T>
struct Relu6Functor : public BaseActivationFunctor<T> {
  float threshold;

  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"threshold", &threshold}};
  }

F
fengjiayi 已提交
685 686 687
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) =
Y
Yu Yang 已提交
688
        x.cwiseMax(static_cast<T>(0)).cwiseMin(static_cast<T>(threshold));
689 690 691 692 693 694 695 696 697
  }
};

template <typename T>
struct Relu6GradFunctor : public BaseActivationFunctor<T> {
  float threshold;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"threshold", &threshold}};
  }
D
dzhwinter 已提交
698
  bool Inplace() const { return IsInplace("relu6"); }
F
fengjiayi 已提交
699 700 701
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
D
dzhwinter 已提交
702 703 704 705
    dx.device(d) =
        dout *
        ((out > static_cast<T>(0)) * (out < static_cast<T>(threshold)))
            .template cast<T>();
706 707 708
  }
};

K
kexinzhao 已提交
709 710 711 712 713 714 715
// softplus(x) = log(1 + exp(x))
// When x is a very large positive number, exp(x) may explode to inf,
// Using trick below for numerical stability
// https://hips.seas.harvard.edu/blog/2013/01/09/computing-log-sum-exp/
// Then: softplus(x) = max(x, 0) + log(exp(-max(x, 0)) + exp(x - max(x, 0)))
template <typename T>
struct SoftplusFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
716 717
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) {
K
kexinzhao 已提交
718
    auto temp = x.cwiseMax(static_cast<T>(0));  // temp = max(x, 0)
F
fengjiayi 已提交
719
    out.device(d) = temp + (((-temp).exp() + (x - temp).exp()).log());
K
kexinzhao 已提交
720 721 722 723 724 725 726 727 728
  }
};

// d(softplus(x))/dx = exp(x) / (1 + exp(x))
// For numerical stability:
// d(softplus(x))/dx = exp(x - max(x, 0)) / (exp(-max(x, 0)) +
// exp(x - max(x, 0)))
template <typename T>
struct SoftplusGradFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
729 730 731
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) {
K
kexinzhao 已提交
732
    auto temp = x.cwiseMax(static_cast<T>(0));  // temp = max(x, 0)
F
fengjiayi 已提交
733 734
    dx.device(d) =
        dout * ((x - temp).exp() / ((-temp).exp() + (x - temp).exp()));
K
kexinzhao 已提交
735 736 737
  }
};

738 739
// softsign(x) = x / (1 + |x|)
template <typename T>
740
struct SoftsignFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
741 742 743
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) {
    out.device(d) = x / (static_cast<T>(1) + x.abs());
744 745 746 747 748 749
  }
};

// d(softsign(x))/dx = 1 / (1 + |x|)^2
// Taken from https://en.wikipedia.org/wiki/Activation_function
template <typename T>
750
struct SoftsignGradFunctor : public BaseActivationFunctor<T> {
F
fengjiayi 已提交
751 752 753
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) {
754
    dx.device(d) =
F
fengjiayi 已提交
755
        dout * (static_cast<T>(1) / (static_cast<T>(1) + x.abs()).square());
756 757 758
  }
};

759 760 761 762 763 764
template <typename T>
struct SoftReluFunctor : public BaseActivationFunctor<T> {
  float threshold;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"threshold", &threshold}};
  }
765

F
fengjiayi 已提交
766 767
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
Y
Yu Yang 已提交
768 769
    auto tmp = static_cast<T>(threshold);
    auto temp = x.cwiseMax(-tmp).cwiseMin(tmp);
F
fengjiayi 已提交
770
    out.device(d) = (static_cast<T>(1) + temp.exp()).log();
771 772 773
  }
};

774 775 776 777 778 779
template <typename T>
struct SoftReluGradFunctor : public BaseActivationFunctor<T> {
  float threshold;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"threshold", &threshold}};
  }
D
dzhwinter 已提交
780
  bool Inplace() const { return IsInplace("soft_relu"); }
F
fengjiayi 已提交
781 782 783
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
Y
Yu Yang 已提交
784
    auto tmp = static_cast<T>(threshold);
D
dzhwinter 已提交
785
    auto temp = ((out > -tmp) * (out < tmp)).template cast<T>().eval();
F
fengjiayi 已提交
786
    dx.device(d) = dout * (static_cast<T>(1) - (-out).exp()) * temp;
787 788 789
  }
};

K
Kavya Srinet 已提交
790 791 792 793 794 795
template <typename T>
struct LeakyReluFunctor : public BaseActivationFunctor<T> {
  float alpha;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"alpha", &alpha}};
  }
796

F
fengjiayi 已提交
797 798 799
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.cwiseMax(static_cast<T>(alpha) * x);
800 801 802
  }
};

K
Kavya Srinet 已提交
803 804 805 806 807 808
template <typename T>
struct LeakyReluGradFunctor : public BaseActivationFunctor<T> {
  float alpha;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"alpha", &alpha}};
  }
F
fengjiayi 已提交
809 810 811
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
Y
Yu Yang 已提交
812 813
    auto temp1 = static_cast<T>(alpha) *
                 (x < static_cast<T>(0)).template cast<T>().eval();
K
Kavya Srinet 已提交
814
    auto temp2 = (x >= static_cast<T>(0)).template cast<T>().eval();
F
fengjiayi 已提交
815
    dx.device(d) = dout * (temp1 + temp2).template cast<T>();
816 817 818
  }
};

819 820 821 822 823 824
template <typename T>
struct ELUFunctor : public BaseActivationFunctor<T> {
  float alpha;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"alpha", &alpha}};
  }
825

F
fengjiayi 已提交
826 827 828 829 830
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.cwiseMax(static_cast<T>(0)) +
                    (static_cast<T>(alpha) * (x.exp() - static_cast<T>(1)))
                        .cwiseMin(static_cast<T>(0));
831 832 833
  }
};

834 835 836 837 838 839
template <typename T>
struct ELUGradFunctor : public BaseActivationFunctor<T> {
  float alpha;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"alpha", &alpha}};
  }
F
fengjiayi 已提交
840 841 842 843 844
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * (x > static_cast<T>(0)).template cast<T>() +
                   dout * (out + static_cast<T>(alpha)) *
Y
Yu Yang 已提交
845
                       (x < static_cast<T>(0)).template cast<T>();
846 847 848
  }
};

Q
QI JUN 已提交
849
// FIXME(qijun) https://github.com/PaddlePaddle/Paddle/issues/5198
850 851 852 853 854 855
template <typename T>
struct PowFunctor : public BaseActivationFunctor<T> {
  float factor;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"factor", &factor}};
  }
F
fengjiayi 已提交
856 857 858
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x.pow(static_cast<T>(factor));
859 860 861
  }
};

862 863 864 865 866 867
template <typename T>
struct PowGradFunctor : public BaseActivationFunctor<T> {
  float factor;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"factor", &factor}};
  }
F
fengjiayi 已提交
868 869 870 871
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout * static_cast<T>(factor) *
C
chengduo 已提交
872
                   x.pow(static_cast<T>(factor) - static_cast<T>(1));
873 874 875
  }
};

876 877 878 879 880 881 882
template <typename T>
struct STanhFunctor : public BaseActivationFunctor<T> {
  float scale_a;
  float scale_b;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"scale_a", &scale_a}, {"scale_b", &scale_b}};
  }
883

F
fengjiayi 已提交
884 885 886
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) =
Y
Yu Yang 已提交
887
        static_cast<T>(scale_b) * (static_cast<T>(scale_a) * x).tanh();
888 889 890
  }
};

891 892 893 894 895 896 897
template <typename T>
struct STanhGradFunctor : public BaseActivationFunctor<T> {
  float scale_a;
  float scale_b;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"scale_a", &scale_a}, {"scale_b", &scale_b}};
  }
898

F
fengjiayi 已提交
899 900 901
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
Y
Yu Yang 已提交
902 903 904
    auto a = static_cast<T>(scale_a);
    auto b = static_cast<T>(scale_b);
    auto temp = (a * x).tanh() * (a * x).tanh();
F
fengjiayi 已提交
905
    dx.device(d) = dout * a * b * (static_cast<T>(1) - temp);
Q
qijun 已提交
906 907 908
  }
};

909 910 911 912 913 914 915
template <typename T>
struct ThresholdedReluFunctor : public BaseActivationFunctor<T> {
  float threshold;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"threshold", &threshold}};
  }

F
fengjiayi 已提交
916 917
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
Y
Yu Yang 已提交
918
    auto th = static_cast<T>(threshold);
F
fengjiayi 已提交
919
    out.device(d) = (x > th).template cast<T>() * x;
920 921 922 923 924 925 926 927 928 929
  }
};

template <typename T>
struct ThresholdedReluGradFunctor : public BaseActivationFunctor<T> {
  float threshold;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"threshold", &threshold}};
  }

F
fengjiayi 已提交
930 931 932
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
Y
Yu Yang 已提交
933
    auto th = static_cast<T>(threshold);
F
fengjiayi 已提交
934
    dx.device(d) = dout * (x > th).template cast<T>();
935 936 937
  }
};

938 939 940 941 942 943 944 945
template <typename T>
struct HardSigmoidFunctor : public BaseActivationFunctor<T> {
  float slope;
  float offset;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"slope", &slope}, {"offset", &offset}};
  }

F
fengjiayi 已提交
946 947
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
948
    auto temp = x * static_cast<T>(slope) + static_cast<T>(offset);
F
fengjiayi 已提交
949 950
    out.device(d) =
        temp.cwiseMax(static_cast<T>(0)).cwiseMin(static_cast<T>(1));
951 952 953 954 955 956 957 958 959 960
  }
};

template <typename T>
struct HardSigmoidGradFunctor : public BaseActivationFunctor<T> {
  float slope;
  float offset;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"slope", &slope}, {"offset", &offset}};
  }
D
dzhwinter 已提交
961
  bool Inplace() { return IsInplace("hard_sigmoid"); }
F
fengjiayi 已提交
962 963 964 965 966 967 968
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
    dx.device(d) = dout *
                   ((out > static_cast<T>(0)) * (out < static_cast<T>(1)))
                       .template cast<T>() *
                   static_cast<T>(slope);
969 970 971
  }
};

A
Abhinav Arora 已提交
972 973 974 975 976 977 978
template <typename T>
struct SwishFunctor : public BaseActivationFunctor<T> {
  float beta;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"beta", &beta}};
  }

F
fengjiayi 已提交
979 980 981
  template <typename Device, typename X, typename Out>
  void operator()(Device d, X x, Out out) const {
    out.device(d) = x / (static_cast<T>(1) + (static_cast<T>(-beta) * x).exp());
A
Abhinav Arora 已提交
982 983 984 985 986 987 988 989 990 991
  }
};

template <typename T>
struct SwishGradFunctor : public BaseActivationFunctor<T> {
  float beta;
  typename BaseActivationFunctor<T>::AttrPair GetAttrs() {
    return {{"beta", &beta}};
  }

F
fengjiayi 已提交
992 993 994
  template <typename Device, typename X, typename Out, typename dOut,
            typename dX>
  void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
A
Abhinav Arora 已提交
995
    auto temp1 = static_cast<T>(1) /
996
                 (static_cast<T>(1) + (static_cast<T>(-beta) * x).exp());
D
dzhwinter 已提交
997 998
    auto temp2 = temp1 * (static_cast<T>(1) - (static_cast<T>(beta) * out));
    dx.device(d) = dout * ((static_cast<T>(beta) * out) + temp2);
A
Abhinav Arora 已提交
999 1000 1001
  }
};

Q
qijun 已提交
1002 1003
}  // namespace operators
}  // namespace paddle
1004

1005 1006 1007 1008
#define FOR_EACH_KERNEL_FUNCTOR(__macro)                             \
  __macro(sigmoid, SigmoidFunctor, SigmoidGradFunctor);              \
  __macro(logsigmoid, LogSigmoidFunctor, LogSigmoidGradFunctor);     \
  __macro(exp, ExpFunctor, ExpGradFunctor);                          \
1009
  __macro(relu, ReluFunctor, ReluGradFunctor);                       \
C
Clementine 已提交
1010
  __macro(gelu, GeluFunctor, GeluGradFunctor);                       \
1011 1012 1013 1014
  __macro(tanh, TanhFunctor, TanhGradFunctor);                       \
  __macro(softshrink, SoftShrinkFunctor, SoftShrinkGradFunctor);     \
  __macro(sqrt, SqrtFunctor, SqrtGradFunctor);                       \
  __macro(abs, AbsFunctor, AbsGradFunctor);                          \
D
dzhwinter 已提交
1015 1016
  __macro(ceil, CeilFunctor, ZeroGradFunctor);                       \
  __macro(floor, FloorFunctor, ZeroGradFunctor);                     \
C
add cos  
chengduoZH 已提交
1017
  __macro(cos, CosFunctor, CosGradFunctor);                          \
C
add sin  
chengduoZH 已提交
1018
  __macro(sin, SinFunctor, SinGradFunctor);                          \
D
dzhwinter 已提交
1019
  __macro(round, RoundFunctor, ZeroGradFunctor);                     \
1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034
  __macro(reciprocal, ReciprocalFunctor, ReciprocalGradFunctor);     \
  __macro(log, LogFunctor, LogGradFunctor);                          \
  __macro(square, SquareFunctor, SquareGradFunctor);                 \
  __macro(brelu, BReluFunctor, BReluGradFunctor);                    \
  __macro(soft_relu, SoftReluFunctor, SoftReluGradFunctor);          \
  __macro(pow, PowFunctor, PowGradFunctor);                          \
  __macro(stanh, STanhFunctor, STanhGradFunctor);                    \
  __macro(softplus, SoftplusFunctor, SoftplusGradFunctor);           \
  __macro(softsign, SoftsignFunctor, SoftsignGradFunctor);           \
  __macro(relu6, Relu6Functor, Relu6GradFunctor);                    \
  __macro(leaky_relu, LeakyReluFunctor, LeakyReluGradFunctor);       \
  __macro(tanh_shrink, TanhShrinkFunctor, TanhShrinkGradFunctor);    \
  __macro(elu, ELUFunctor, ELUGradFunctor);                          \
  __macro(hard_shrink, HardShrinkFunctor, HardShrinkGradFunctor);    \
  __macro(hard_sigmoid, HardSigmoidFunctor, HardSigmoidGradFunctor); \
A
Abhinav Arora 已提交
1035
  __macro(swish, SwishFunctor, SwishGradFunctor);                    \
1036
  __macro(thresholded_relu, ThresholdedReluFunctor, ThresholdedReluGradFunctor);