Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
d7f98f37
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
d7f98f37
编写于
8月 25, 2018
作者:
D
dzhwinter
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
more platform is done
上级
efd0884f
变更
5
显示空白变更内容
内联
并排
Showing
5 changed file
with
67 addition
and
30 deletion
+67
-30
paddle/fluid/framework/op_registry.h
paddle/fluid/framework/op_registry.h
+8
-10
paddle/fluid/operators/activation_op.cc
paddle/fluid/operators/activation_op.cc
+19
-18
paddle/fluid/operators/activation_op.h
paddle/fluid/operators/activation_op.h
+2
-2
paddle/fluid/operators/math/matrix_bit_code.h
paddle/fluid/operators/math/matrix_bit_code.h
+33
-0
paddle/fluid/platform/port.h
paddle/fluid/platform/port.h
+5
-0
未找到文件。
paddle/fluid/framework/op_registry.h
浏览文件 @
d7f98f37
...
...
@@ -246,16 +246,14 @@ struct OpKernelRegistrarFunctorEx<PlaceType, false, I,
__use_op_itself_##op_type, \
"USE_OP_ITSELF must be called in global namespace"); \
extern int TouchOpRegistrar_##op_type(); \
static int use_op_itself_##op_type##_ __attribute__((unused)) = \
TouchOpRegistrar_##op_type()
UNUSED static int use_op_itself_##op_type##_ = TouchOpRegistrar_##op_type()
#define USE_OP_DEVICE_KERNEL(op_type, LIBRARY_TYPE) \
STATIC_ASSERT_GLOBAL_NAMESPACE( \
__use_op_kernel_##op_type##_##LIBRARY_TYPE##__, \
"USE_OP_DEVICE_KERNEL must be in global namespace"); \
extern int TouchOpKernelRegistrar_##op_type##_##LIBRARY_TYPE(); \
static int use_op_kernel_##op_type##_##LIBRARY_TYPE##_ \
__attribute__((unused)) = \
UNUSED static int use_op_kernel_##op_type##_##LIBRARY_TYPE##_ = \
TouchOpKernelRegistrar_##op_type##_##LIBRARY_TYPE()
// TODO(fengjiayi): The following macros
...
...
paddle/fluid/operators/activation_op.cc
浏览文件 @
d7f98f37
...
...
@@ -15,6 +15,7 @@ limitations under the License. */
#include "paddle/fluid/operators/activation_op.h"
#include <string>
#include "paddle/fluid/operators/mkldnn_activation_op.h"
#include "paddle/fluid/platform/port.h"
namespace
paddle
{
namespace
operators
{
...
...
@@ -105,105 +106,105 @@ class ActivationOpGrad : public framework::OperatorWithKernel {
}
};
__attribute__
((
unused
))
constexpr
char
SigmoidDoc
[]
=
R"DOC(
UNUSED
constexpr
char
SigmoidDoc
[]
=
R"DOC(
Sigmoid Activation Operator
$$out = \frac{1}{1 + e^{-x}}$$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
LogSigmoidDoc
[]
=
R"DOC(
UNUSED
constexpr
char
LogSigmoidDoc
[]
=
R"DOC(
Logsigmoid Activation Operator
$$out = \\log \\frac{1}{1 + e^{-x}}$$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
ExpDoc
[]
=
R"DOC(
UNUSED
constexpr
char
ExpDoc
[]
=
R"DOC(
Exp Activation Operator.
$out = e^x$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
ReluDoc
[]
=
R"DOC(
UNUSED
constexpr
char
ReluDoc
[]
=
R"DOC(
Relu Activation Operator.
$out = \max(x, 0)$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
TanhDoc
[]
=
R"DOC(
UNUSED
constexpr
char
TanhDoc
[]
=
R"DOC(
Tanh Activation Operator.
$$out = \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
TanhShrinkDoc
[]
=
R"DOC(
UNUSED
constexpr
char
TanhShrinkDoc
[]
=
R"DOC(
TanhShrink Activation Operator.
$$out = x - \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
SqrtDoc
[]
=
R"DOC(
UNUSED
constexpr
char
SqrtDoc
[]
=
R"DOC(
Sqrt Activation Operator.
$out = \sqrt{x}$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
AbsDoc
[]
=
R"DOC(
UNUSED
constexpr
char
AbsDoc
[]
=
R"DOC(
Abs Activation Operator.
$out = |x|$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
CeilDoc
[]
=
R"DOC(
UNUSED
constexpr
char
CeilDoc
[]
=
R"DOC(
Ceil Activation Operator.
$out = ceil(x)$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
FloorDoc
[]
=
R"DOC(
UNUSED
constexpr
char
FloorDoc
[]
=
R"DOC(
Floor Activation Operator.
$out = floor(x)$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
CosDoc
[]
=
R"DOC(
UNUSED
constexpr
char
CosDoc
[]
=
R"DOC(
Cosine Activation Operator.
$out = cos(x)$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
SinDoc
[]
=
R"DOC(
UNUSED
constexpr
char
SinDoc
[]
=
R"DOC(
Sine Activation Operator.
$out = sin(x)$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
RoundDoc
[]
=
R"DOC(
UNUSED
constexpr
char
RoundDoc
[]
=
R"DOC(
Round Activation Operator.
$out = [x]$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
ReciprocalDoc
[]
=
R"DOC(
UNUSED
constexpr
char
ReciprocalDoc
[]
=
R"DOC(
Reciprocal Activation Operator.
$$out = \\frac{1}{x}$$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
LogDoc
[]
=
R"DOC(
UNUSED
constexpr
char
LogDoc
[]
=
R"DOC(
Log Activation Operator.
$out = \ln(x)$
...
...
@@ -212,21 +213,21 @@ Natural logarithm of x.
)DOC"
;
__attribute__
((
unused
))
constexpr
char
SquareDoc
[]
=
R"DOC(
UNUSED
constexpr
char
SquareDoc
[]
=
R"DOC(
Square Activation Operator.
$out = x^2$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
SoftplusDoc
[]
=
R"DOC(
UNUSED
constexpr
char
SoftplusDoc
[]
=
R"DOC(
Softplus Activation Operator.
$out = \ln(1 + e^{x})$
)DOC"
;
__attribute__
((
unused
))
constexpr
char
SoftsignDoc
[]
=
R"DOC(
UNUSED
constexpr
char
SoftsignDoc
[]
=
R"DOC(
Softsign Activation Operator.
$$out = \frac{x}{1 + |x|}$$
...
...
paddle/fluid/operators/activation_op.h
浏览文件 @
d7f98f37
...
...
@@ -865,8 +865,8 @@ struct SwishGradFunctor : public BaseActivationFunctor<T> {
void
operator
()(
Device
d
,
X
x
,
Out
out
,
dOut
dout
,
dX
dx
)
const
{
auto
temp1
=
static_cast
<
T
>
(
1
)
/
(
static_cast
<
T
>
(
1
)
+
(
static_cast
<
T
>
(
-
beta
)
*
x
).
exp
());
auto
temp2
=
temp1
*
(
static_cast
<
T
>
(
1
)
-
(
beta
*
out
));
dx
.
device
(
d
)
=
dout
*
((
beta
*
out
)
+
temp2
);
auto
temp2
=
temp1
*
(
static_cast
<
T
>
(
1
)
-
(
static_cast
<
T
>
(
beta
)
*
out
));
dx
.
device
(
d
)
=
dout
*
((
static_cast
<
T
>
(
beta
)
*
out
)
+
temp2
);
}
};
...
...
paddle/fluid/operators/math/matrix_bit_code.h
浏览文件 @
d7f98f37
...
...
@@ -17,6 +17,11 @@ limitations under the License. */
#include "paddle/fluid/framework/tensor.h"
#include "paddle/fluid/platform/device_context.h"
#if defined(_WIN32)
#include <intrin.h>
#include <windows.h>
#endif // _WIN32
namespace
paddle
{
namespace
operators
{
namespace
math
{
...
...
@@ -55,12 +60,40 @@ namespace math {
* FindLastSet(x) = 1 + \floor*{\log_{2}x}
* \f]
*/
#if !defined(_WIN32)
inline
constexpr
size_t
FindLastSet
(
size_t
x
)
{
return
std
::
is_same
<
size_t
,
unsigned
int
>::
value
?
(
x
?
8
*
sizeof
(
x
)
-
__builtin_clz
(
x
)
:
0
)
:
(
std
::
is_same
<
size_t
,
unsigned
long
>::
value
// NOLINT
?
(
x
?
8
*
sizeof
(
x
)
-
__builtin_clzl
(
x
)
:
0
)
:
(
x
?
8
*
sizeof
(
x
)
-
__builtin_clzll
(
x
)
:
0
));
#else
// windows don't have built-in clz, ctz function
template
<
typename
T
>
unint32_t
__inline
ctz
(
const
T
&
value
)
{
DWORD
trailing_zero
=
0
;
if
(
_BitScanForward
(
&
trailing_zero
,
value
))
{
return
static_cast
<
unint32_t
>
(
trailing_zero
);
}
else
{
return
static_cast
<
unint32_t
>
(
0
);
}
}
template
<
typename
T
>
unint32_t
__inline
clz
(
const
T
&
value
)
{
DWORD
leadning_zero
=
0
;
if
(
_BitScanReverse
(
&
leadning_zero
,
value
))
{
return
sizeof
(
T
)
*
8
-
leadning_zero
;
}
else
{
return
static_cast
<
unint32_t
>
(
0
);
}
}
template
<
typename
T
>
inline
size_t
FindLastSet
(
const
T
&
x
)
{
return
sizeof
(
T
)
*
8
-
clz
(
x
);
}
#endif // !_WIN32
}
struct
SimpleCode
{
...
...
paddle/fluid/platform/port.h
浏览文件 @
d7f98f37
...
...
@@ -20,6 +20,8 @@
#include <string>
#if !defined(_WIN32)
#define UNUSED __attribute__((unused))
#include <dlfcn.h> // for dladdr
#include <execinfo.h> // for backtrace
#include <sys/stat.h>
...
...
@@ -28,6 +30,9 @@
#include <io.h> // _popen, _pclose
#include <windows.h>
// windows version of __attribute__((unused))
#define UNUSED __pragma(warning(suppress : 4100))
#ifndef S_ISDIR // windows port for sys/stat.h
#define S_ISDIR(mode) (((mode)&S_IFMT) == S_IFDIR)
#endif
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录