Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
626227eb
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
626227eb
编写于
4月 17, 2018
作者:
D
dzhwinter
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
"fix ci"
上级
b92b408e
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
55 addition
and
45 deletion
+55
-45
paddle/fluid/operators/activation_op.cc
paddle/fluid/operators/activation_op.cc
+55
-44
paddle/fluid/operators/activation_op.cu
paddle/fluid/operators/activation_op.cu
+0
-1
未找到文件。
paddle/fluid/operators/activation_op.cc
浏览文件 @
626227eb
...
...
@@ -32,14 +32,16 @@ namespace operators {
} \
}
#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME
)
\
#define REGISTER_ACTIVATION_OP_GRAD_MAKER(OP_NAME
, KERNEL_TYPE)
\
class OP_NAME##GradMaker : public framework::SingleGradOpDescMaker { \
public: \
using framework::SingleGradOpDescMaker::SingleGradOpDescMaker; \
\
protected: \
std::unique_ptr<framework::OpDesc> Apply() const override { \
auto *op = new framework::OpDesc(); \
op->SetType(#
OP_NAME "_grad");
\
op->SetInput("Out",
Input("Out"));
\
op->SetType(#
KERNEL_TYPE "_grad");
\
op->SetInput("Out",
Output("Out"));
\
op->SetInput(framework::GradVarName("Out"), OutputGrad("Out")); \
\
op->SetAttrMap(Attrs()); \
...
...
@@ -452,56 +454,64 @@ REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);
// variable
// is used in gradient operator.
// The operator name written in lowercase intentionally.
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
sigmoid
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
exp
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
relu
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
tanh
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
sqrt
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
ceil
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
floor
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
reciprocal
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
relu6
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
soft_relu
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
hard_sigmoid
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
Sigmoid
,
sigmoid
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
Exp
,
exp
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
Relu
,
relu
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
Tanh
,
tanh
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
Sqrt
,
sqrt
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
Ceil
,
ceil
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
Floor
,
floor
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
Reciprocal
,
reciprocal
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
Relu6
,
relu6
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
SoftRelu
,
soft_relu
);
REGISTER_ACTIVATION_OP_GRAD_MAKER
(
HardSigmoid
,
hard_sigmoid
);
}
// namespace operators
}
// namespace paddle
namespace
ops
=
paddle
::
operators
;
#define REGISTER_INPLACE_ACTIVATION_OP(act_type, op_name) \
REGISTER_OPERATOR(act_type, ops::ActivationOp, ops::op_name##OpMaker, \
ops::op_name##GradMaker); \
REGISTER_OPERATOR(act_type##grad, ops::ActivationOpGrad)
#define REGISTER_ACTIVATION_OP(act_type, op_name) \
REGISTER_OP(act_type, ops::ActivationOp, ops::op_name##OpMaker, \
act_type##_grad, ops::ActivationOpGrad);
#define FOR_EACH_OP_FUNCTOR(__macro) \
__macro(sigmoid, Sigmoid); \
__macro(logsigmoid, LogSigmoid); \
__macro(exp, Exp); \
__macro(relu, Relu); \
__macro(tanh, Tanh); \
__macro(softshrink, SoftShrink); \
__macro(sqrt, Sqrt); \
__macro(abs, Abs); \
__macro(ceil, Ceil); \
__macro(floor, Floor); \
__macro(cos, Cos); \
__macro(sin, Sin); \
__macro(round, Round); \
__macro(reciprocal, Reciprocal); \
__macro(log, Log); \
__macro(square, Square); \
__macro(brelu, BRelu); \
__macro(soft_relu, SoftRelu); \
__macro(pow, Pow); \
__macro(stanh, STanh); \
__macro(softplus, Softplus); \
__macro(softsign, Softsign); \
__macro(relu6, Relu6); \
__macro(leaky_relu, LeakyRelu); \
__macro(tanh_shrink, TanhShrink); \
__macro(elu, ELU); \
__macro(hard_shrink, HardShrink); \
__macro(hard_sigmoid, HardSigmoid); \
__macro(swish, Swish); \
#define FOR_EACH_INPLACE_OP_FUNCTOR(__macro) \
__macro(sigmoid, Sigmoid); \
__macro(relu, Relu); \
__macro(exp, Exp); \
__macro(tanh, Tanh); \
__macro(ceil, Ceil); \
__macro(floor, Floor); \
__macro(sqrt, Sqrt); \
__macro(soft_relu, SoftRelu); \
__macro(relu6, Relu6); \
__macro(reciprocal, Reciprocal); \
__macro(hard_sigmoid, HardSigmoid);
#define FOR_EACH_OP_FUNCTOR(__macro) \
__macro(logsigmoid, LogSigmoid); \
__macro(softshrink, SoftShrink); \
__macro(abs, Abs); \
__macro(cos, Cos); \
__macro(sin, Sin); \
__macro(round, Round); \
__macro(log, Log); \
__macro(square, Square); \
__macro(brelu, BRelu); \
__macro(pow, Pow); \
__macro(stanh, STanh); \
__macro(softplus, Softplus); \
__macro(softsign, Softsign); \
__macro(leaky_relu, LeakyRelu); \
__macro(tanh_shrink, TanhShrink); \
__macro(elu, ELU); \
__macro(hard_shrink, HardShrink); \
__macro(swish, Swish); \
__macro(thresholded_relu, ThresholdedRelu);
#define REGISTER_ACTIVATION_CPU_KERNEL(act_type, functor, grad_functor) \
...
...
@@ -518,4 +528,5 @@ namespace ops = paddle::operators;
ops::grad_functor<double>>);
FOR_EACH_OP_FUNCTOR
(
REGISTER_ACTIVATION_OP
);
FOR_EACH_INPLACE_OP_FUNCTOR
(
REGISTER_INPLACE_ACTIVATION_OP
);
FOR_EACH_KERNEL_FUNCTOR
(
REGISTER_ACTIVATION_CPU_KERNEL
);
paddle/fluid/operators/activation_op.cu
浏览文件 @
626227eb
...
...
@@ -9,7 +9,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#define EIGEN_USE_GPU
#include "paddle/fluid/operators/activation_op.h"
#include "paddle/fluid/platform/float16.h"
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录