Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
e782b54b
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
e782b54b
编写于
5月 07, 2019
作者:
B
baojun
提交者:
tensor-tang
5月 07, 2019
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
update sofmax with axis arg test=develop (#17190)
上级
71f0c6d5
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
24 addition
and
22 deletion
+24
-22
paddle/fluid/operators/ngraph/ops/softmax_op.h
paddle/fluid/operators/ngraph/ops/softmax_op.h
+23
-21
python/paddle/fluid/tests/unittests/ngraph/test_softmax_ngraph_op.py
...le/fluid/tests/unittests/ngraph/test_softmax_ngraph_op.py
+1
-1
未找到文件。
paddle/fluid/operators/ngraph/ops/softmax_op.h
浏览文件 @
e782b54b
...
...
@@ -27,38 +27,38 @@ namespace paddle {
namespace
operators
{
namespace
ngraphs
{
std
::
shared_ptr
<
ngraph
::
Node
>
GetSoftmax
(
std
::
shared_ptr
<
ngraph
::
Node
>
x
)
{
std
::
shared_ptr
<
ngraph
::
Node
>
GetSoftmax
(
std
::
shared_ptr
<
ngraph
::
Node
>
x
,
int
axis
=
-
1
)
{
auto
x_shape
=
x
->
get_shape
();
in
t
rank
=
x_shape
.
size
();
auto
x_2d_shape
=
paddle
::
platform
::
FlattenTo2d
(
x_shape
,
rank
-
1
)
;
x
=
paddle
::
platform
::
NgReshaper
(
x
,
x_2d_shape
)
;
size_
t
rank
=
x_shape
.
size
();
size_t
softmax_axis
=
axis
;
if
(
axis
<
0
)
softmax_axis
=
rank
+
axis
;
auto
x_max
=
std
::
make_shared
<
ngraph
::
op
::
Max
>
(
x
,
ngraph
::
AxisSet
{
1
});
auto
x_max
=
std
::
make_shared
<
ngraph
::
op
::
Max
>
(
x
,
ngraph
::
AxisSet
{
softmax_axis
});
auto
x_max_bcast
=
std
::
make_shared
<
ngraph
::
op
::
Broadcast
>
(
x_max
,
x_
2d_shape
,
ngraph
::
AxisSet
{
1
});
x_max
,
x_
shape
,
ngraph
::
AxisSet
{
softmax_axis
});
auto
x_shifted
=
x
-
x_max_bcast
;
auto
x_clipped
=
paddle
::
operators
::
ngraphs
::
ElementwiseScalar
<
ngraph
::
op
::
Maximum
>
(
-
64.
,
x_shifted
);
auto
softmax
=
std
::
make_shared
<
ngraph
::
op
::
Softmax
>
(
x_clipped
,
ngraph
::
AxisSet
{
1
});
auto
softmax
=
std
::
make_shared
<
ngraph
::
op
::
Softmax
>
(
x_clipped
,
ngraph
::
AxisSet
{
softmax_axis
});
return
softmax
;
}
std
::
shared_ptr
<
ngraph
::
Node
>
GetSoftmaxGrad
(
std
::
shared_ptr
<
ngraph
::
Node
>
out
,
std
::
shared_ptr
<
ngraph
::
Node
>
dout
)
{
std
::
shared_ptr
<
ngraph
::
Node
>
GetSoftmaxGrad
(
std
::
shared_ptr
<
ngraph
::
Node
>
out
,
std
::
shared_ptr
<
ngraph
::
Node
>
dout
,
int
axis
=
-
1
)
{
auto
out_shape
=
out
->
get_shape
();
int
rank
=
out_shape
.
size
();
auto
out_2d_shape
=
paddle
::
platform
::
FlattenTo2d
(
out_shape
,
rank
-
1
);
auto
dout_2d_shape
=
paddle
::
platform
::
FlattenTo2d
(
dout
->
get_shape
(),
rank
-
1
);
out
=
paddle
::
platform
::
NgReshaper
(
out
,
out_2d_shape
);
dout
=
paddle
::
platform
::
NgReshaper
(
dout
,
dout_2d_shape
);
size_t
rank
=
out_shape
.
size
();
size_t
softmax_axis
=
axis
;
if
(
axis
<
0
)
softmax_axis
=
rank
+
axis
;
auto
node_sum
=
std
::
make_shared
<
ngraph
::
op
::
Sum
>
(
out
*
dout
,
ngraph
::
AxisSet
{
1
});
auto
node_sum
=
std
::
make_shared
<
ngraph
::
op
::
Sum
>
(
out
*
dout
,
ngraph
::
AxisSet
{
softmax_axis
});
auto
node_bcast
=
std
::
make_shared
<
ngraph
::
op
::
Broadcast
>
(
node_sum
,
out_
2d_shape
,
ngraph
::
AxisSet
{
1
});
node_sum
,
out_
shape
,
ngraph
::
AxisSet
{
softmax_axis
});
auto
dx
=
(
dout
-
node_bcast
)
*
out
;
return
dx
;
}
...
...
@@ -68,8 +68,9 @@ void BuildSoftmaxNode(
std
::
shared_ptr
<
std
::
unordered_map
<
std
::
string
,
std
::
shared_ptr
<
ngraph
::
Node
>>>
ngb_node_map
)
{
auto
op_attrs
=
framework
::
AttrReader
(
op
->
Attrs
());
auto
x
=
paddle
::
platform
::
GetInputNode
(
op
,
"X"
,
ngb_node_map
);
auto
softmax
=
GetSoftmax
(
x
);
auto
softmax
=
GetSoftmax
(
x
,
op_attrs
.
Get
<
int
>
(
"axis"
)
);
paddle
::
platform
::
SetOutputNode
(
op
,
"Out"
,
softmax
,
ngb_node_map
);
}
...
...
@@ -78,9 +79,10 @@ void BuildSoftmaxGradNode(
std
::
shared_ptr
<
std
::
unordered_map
<
std
::
string
,
std
::
shared_ptr
<
ngraph
::
Node
>>>
ngb_node_map
)
{
auto
op_attrs
=
framework
::
AttrReader
(
op
->
Attrs
());
auto
out
=
paddle
::
platform
::
GetInputNode
(
op
,
"Out"
,
ngb_node_map
);
auto
dout
=
paddle
::
platform
::
GetInputNode
(
op
,
"Out@GRAD"
,
ngb_node_map
);
auto
dx
=
GetSoftmaxGrad
(
out
,
dout
);
auto
dx
=
GetSoftmaxGrad
(
out
,
dout
,
op_attrs
.
Get
<
int
>
(
"axis"
)
);
paddle
::
platform
::
SetOutputNode
(
op
,
"X@GRAD"
,
dx
,
ngb_node_map
);
}
}
// namespace ngraphs
...
...
python/paddle/fluid/tests/unittests/ngraph/test_softmax_ngraph_op.py
浏览文件 @
e782b54b
...
...
@@ -14,7 +14,7 @@
from
__future__
import
print_function
import
unittest
from
paddle.fluid.tests.unittests.test_softmax_op
import
TestSoftmaxOp
from
paddle.fluid.tests.unittests.test_softmax_op
import
TestSoftmaxOp
,
TestSoftmaxOp2
,
TestSoftmaxOp3
,
TestSoftmaxOp4
,
TestSoftmaxOp5
if
__name__
==
"__main__"
:
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录