提交 d7f98f37 编写于 作者: D dzhwinter

more platform is done

上级 efd0884f
...@@ -246,16 +246,14 @@ struct OpKernelRegistrarFunctorEx<PlaceType, false, I, ...@@ -246,16 +246,14 @@ struct OpKernelRegistrarFunctorEx<PlaceType, false, I,
__use_op_itself_##op_type, \ __use_op_itself_##op_type, \
"USE_OP_ITSELF must be called in global namespace"); \ "USE_OP_ITSELF must be called in global namespace"); \
extern int TouchOpRegistrar_##op_type(); \ extern int TouchOpRegistrar_##op_type(); \
static int use_op_itself_##op_type##_ __attribute__((unused)) = \ UNUSED static int use_op_itself_##op_type##_ = TouchOpRegistrar_##op_type()
TouchOpRegistrar_##op_type()
#define USE_OP_DEVICE_KERNEL(op_type, LIBRARY_TYPE) \ #define USE_OP_DEVICE_KERNEL(op_type, LIBRARY_TYPE) \
STATIC_ASSERT_GLOBAL_NAMESPACE( \ STATIC_ASSERT_GLOBAL_NAMESPACE( \
__use_op_kernel_##op_type##_##LIBRARY_TYPE##__, \ __use_op_kernel_##op_type##_##LIBRARY_TYPE##__, \
"USE_OP_DEVICE_KERNEL must be in global namespace"); \ "USE_OP_DEVICE_KERNEL must be in global namespace"); \
extern int TouchOpKernelRegistrar_##op_type##_##LIBRARY_TYPE(); \ extern int TouchOpKernelRegistrar_##op_type##_##LIBRARY_TYPE(); \
static int use_op_kernel_##op_type##_##LIBRARY_TYPE##_ \ UNUSED static int use_op_kernel_##op_type##_##LIBRARY_TYPE##_ = \
__attribute__((unused)) = \
TouchOpKernelRegistrar_##op_type##_##LIBRARY_TYPE() TouchOpKernelRegistrar_##op_type##_##LIBRARY_TYPE()
// TODO(fengjiayi): The following macros // TODO(fengjiayi): The following macros
......
...@@ -15,6 +15,7 @@ limitations under the License. */ ...@@ -15,6 +15,7 @@ limitations under the License. */
#include "paddle/fluid/operators/activation_op.h" #include "paddle/fluid/operators/activation_op.h"
#include <string> #include <string>
#include "paddle/fluid/operators/mkldnn_activation_op.h" #include "paddle/fluid/operators/mkldnn_activation_op.h"
#include "paddle/fluid/platform/port.h"
namespace paddle { namespace paddle {
namespace operators { namespace operators {
...@@ -105,105 +106,105 @@ class ActivationOpGrad : public framework::OperatorWithKernel { ...@@ -105,105 +106,105 @@ class ActivationOpGrad : public framework::OperatorWithKernel {
} }
}; };
__attribute__((unused)) constexpr char SigmoidDoc[] = R"DOC( UNUSED constexpr char SigmoidDoc[] = R"DOC(
Sigmoid Activation Operator Sigmoid Activation Operator
$$out = \frac{1}{1 + e^{-x}}$$ $$out = \frac{1}{1 + e^{-x}}$$
)DOC"; )DOC";
__attribute__((unused)) constexpr char LogSigmoidDoc[] = R"DOC( UNUSED constexpr char LogSigmoidDoc[] = R"DOC(
Logsigmoid Activation Operator Logsigmoid Activation Operator
$$out = \\log \\frac{1}{1 + e^{-x}}$$ $$out = \\log \\frac{1}{1 + e^{-x}}$$
)DOC"; )DOC";
__attribute__((unused)) constexpr char ExpDoc[] = R"DOC( UNUSED constexpr char ExpDoc[] = R"DOC(
Exp Activation Operator. Exp Activation Operator.
$out = e^x$ $out = e^x$
)DOC"; )DOC";
__attribute__((unused)) constexpr char ReluDoc[] = R"DOC( UNUSED constexpr char ReluDoc[] = R"DOC(
Relu Activation Operator. Relu Activation Operator.
$out = \max(x, 0)$ $out = \max(x, 0)$
)DOC"; )DOC";
__attribute__((unused)) constexpr char TanhDoc[] = R"DOC( UNUSED constexpr char TanhDoc[] = R"DOC(
Tanh Activation Operator. Tanh Activation Operator.
$$out = \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$ $$out = \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
)DOC"; )DOC";
__attribute__((unused)) constexpr char TanhShrinkDoc[] = R"DOC( UNUSED constexpr char TanhShrinkDoc[] = R"DOC(
TanhShrink Activation Operator. TanhShrink Activation Operator.
$$out = x - \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$ $$out = x - \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
)DOC"; )DOC";
__attribute__((unused)) constexpr char SqrtDoc[] = R"DOC( UNUSED constexpr char SqrtDoc[] = R"DOC(
Sqrt Activation Operator. Sqrt Activation Operator.
$out = \sqrt{x}$ $out = \sqrt{x}$
)DOC"; )DOC";
__attribute__((unused)) constexpr char AbsDoc[] = R"DOC( UNUSED constexpr char AbsDoc[] = R"DOC(
Abs Activation Operator. Abs Activation Operator.
$out = |x|$ $out = |x|$
)DOC"; )DOC";
__attribute__((unused)) constexpr char CeilDoc[] = R"DOC( UNUSED constexpr char CeilDoc[] = R"DOC(
Ceil Activation Operator. Ceil Activation Operator.
$out = ceil(x)$ $out = ceil(x)$
)DOC"; )DOC";
__attribute__((unused)) constexpr char FloorDoc[] = R"DOC( UNUSED constexpr char FloorDoc[] = R"DOC(
Floor Activation Operator. Floor Activation Operator.
$out = floor(x)$ $out = floor(x)$
)DOC"; )DOC";
__attribute__((unused)) constexpr char CosDoc[] = R"DOC( UNUSED constexpr char CosDoc[] = R"DOC(
Cosine Activation Operator. Cosine Activation Operator.
$out = cos(x)$ $out = cos(x)$
)DOC"; )DOC";
__attribute__((unused)) constexpr char SinDoc[] = R"DOC( UNUSED constexpr char SinDoc[] = R"DOC(
Sine Activation Operator. Sine Activation Operator.
$out = sin(x)$ $out = sin(x)$
)DOC"; )DOC";
__attribute__((unused)) constexpr char RoundDoc[] = R"DOC( UNUSED constexpr char RoundDoc[] = R"DOC(
Round Activation Operator. Round Activation Operator.
$out = [x]$ $out = [x]$
)DOC"; )DOC";
__attribute__((unused)) constexpr char ReciprocalDoc[] = R"DOC( UNUSED constexpr char ReciprocalDoc[] = R"DOC(
Reciprocal Activation Operator. Reciprocal Activation Operator.
$$out = \\frac{1}{x}$$ $$out = \\frac{1}{x}$$
)DOC"; )DOC";
__attribute__((unused)) constexpr char LogDoc[] = R"DOC( UNUSED constexpr char LogDoc[] = R"DOC(
Log Activation Operator. Log Activation Operator.
$out = \ln(x)$ $out = \ln(x)$
...@@ -212,21 +213,21 @@ Natural logarithm of x. ...@@ -212,21 +213,21 @@ Natural logarithm of x.
)DOC"; )DOC";
__attribute__((unused)) constexpr char SquareDoc[] = R"DOC( UNUSED constexpr char SquareDoc[] = R"DOC(
Square Activation Operator. Square Activation Operator.
$out = x^2$ $out = x^2$
)DOC"; )DOC";
__attribute__((unused)) constexpr char SoftplusDoc[] = R"DOC( UNUSED constexpr char SoftplusDoc[] = R"DOC(
Softplus Activation Operator. Softplus Activation Operator.
$out = \ln(1 + e^{x})$ $out = \ln(1 + e^{x})$
)DOC"; )DOC";
__attribute__((unused)) constexpr char SoftsignDoc[] = R"DOC( UNUSED constexpr char SoftsignDoc[] = R"DOC(
Softsign Activation Operator. Softsign Activation Operator.
$$out = \frac{x}{1 + |x|}$$ $$out = \frac{x}{1 + |x|}$$
......
...@@ -865,8 +865,8 @@ struct SwishGradFunctor : public BaseActivationFunctor<T> { ...@@ -865,8 +865,8 @@ struct SwishGradFunctor : public BaseActivationFunctor<T> {
void operator()(Device d, X x, Out out, dOut dout, dX dx) const { void operator()(Device d, X x, Out out, dOut dout, dX dx) const {
auto temp1 = static_cast<T>(1) / auto temp1 = static_cast<T>(1) /
(static_cast<T>(1) + (static_cast<T>(-beta) * x).exp()); (static_cast<T>(1) + (static_cast<T>(-beta) * x).exp());
auto temp2 = temp1 * (static_cast<T>(1) - (beta * out)); auto temp2 = temp1 * (static_cast<T>(1) - (static_cast<T>(beta) * out));
dx.device(d) = dout * ((beta * out) + temp2); dx.device(d) = dout * ((static_cast<T>(beta) * out) + temp2);
} }
}; };
......
...@@ -17,6 +17,11 @@ limitations under the License. */ ...@@ -17,6 +17,11 @@ limitations under the License. */
#include "paddle/fluid/framework/tensor.h" #include "paddle/fluid/framework/tensor.h"
#include "paddle/fluid/platform/device_context.h" #include "paddle/fluid/platform/device_context.h"
#if defined(_WIN32)
#include <intrin.h>
#include <windows.h>
#endif // _WIN32
namespace paddle { namespace paddle {
namespace operators { namespace operators {
namespace math { namespace math {
...@@ -55,12 +60,40 @@ namespace math { ...@@ -55,12 +60,40 @@ namespace math {
* FindLastSet(x) = 1 + \floor*{\log_{2}x} * FindLastSet(x) = 1 + \floor*{\log_{2}x}
* \f] * \f]
*/ */
#if !defined(_WIN32)
inline constexpr size_t FindLastSet(size_t x) { inline constexpr size_t FindLastSet(size_t x) {
return std::is_same<size_t, unsigned int>::value return std::is_same<size_t, unsigned int>::value
? (x ? 8 * sizeof(x) - __builtin_clz(x) : 0) ? (x ? 8 * sizeof(x) - __builtin_clz(x) : 0)
: (std::is_same<size_t, unsigned long>::value // NOLINT : (std::is_same<size_t, unsigned long>::value // NOLINT
? (x ? 8 * sizeof(x) - __builtin_clzl(x) : 0) ? (x ? 8 * sizeof(x) - __builtin_clzl(x) : 0)
: (x ? 8 * sizeof(x) - __builtin_clzll(x) : 0)); : (x ? 8 * sizeof(x) - __builtin_clzll(x) : 0));
#else
// windows don't have built-in clz, ctz function
template <typename T>
unint32_t __inline ctz(const T& value) {
DWORD trailing_zero = 0;
if (_BitScanForward(&trailing_zero, value)) {
return static_cast<unint32_t>(trailing_zero);
} else {
return static_cast<unint32_t>(0);
}
}
template <typename T>
unint32_t __inline clz(const T& value) {
DWORD leadning_zero = 0;
if (_BitScanReverse(&leadning_zero, value)) {
return sizeof(T) * 8 - leadning_zero;
} else {
return static_cast<unint32_t>(0);
}
}
template <typename T>
inline size_t FindLastSet(const T& x) {
return sizeof(T) * 8 - clz(x);
}
#endif // !_WIN32
} }
struct SimpleCode { struct SimpleCode {
......
...@@ -20,6 +20,8 @@ ...@@ -20,6 +20,8 @@
#include <string> #include <string>
#if !defined(_WIN32) #if !defined(_WIN32)
#define UNUSED __attribute__((unused))
#include <dlfcn.h> // for dladdr #include <dlfcn.h> // for dladdr
#include <execinfo.h> // for backtrace #include <execinfo.h> // for backtrace
#include <sys/stat.h> #include <sys/stat.h>
...@@ -28,6 +30,9 @@ ...@@ -28,6 +30,9 @@
#include <io.h> // _popen, _pclose #include <io.h> // _popen, _pclose
#include <windows.h> #include <windows.h>
// windows version of __attribute__((unused))
#define UNUSED __pragma(warning(suppress : 4100))
#ifndef S_ISDIR // windows port for sys/stat.h #ifndef S_ISDIR // windows port for sys/stat.h
#define S_ISDIR(mode) (((mode)&S_IFMT) == S_IFDIR) #define S_ISDIR(mode) (((mode)&S_IFMT) == S_IFDIR)
#endif #endif
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册