Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
b2034c28
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
b2034c28
编写于
8月 11, 2020
作者:
Z
zhupengyang
提交者:
GitHub
8月 11, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
softmax: imperative->static; fix doc examples (#26134)
上级
b6d14d9d
变更
3
显示空白变更内容
内联
并排
Showing
3 changed file
with
22 addition
and
22 deletion
+22
-22
python/paddle/fluid/tests/unittests/test_softmax_op.py
python/paddle/fluid/tests/unittests/test_softmax_op.py
+17
-17
python/paddle/nn/functional/activation.py
python/paddle/nn/functional/activation.py
+2
-2
python/paddle/nn/layer/activation.py
python/paddle/nn/layer/activation.py
+3
-3
未找到文件。
python/paddle/fluid/tests/unittests/test_softmax_op.py
浏览文件 @
b2034c28
...
...
@@ -21,6 +21,7 @@ import paddle.fluid.core as core
import
paddle.fluid
as
fluid
from
paddle.fluid
import
compiler
,
Program
,
program_guard
import
paddle
import
paddle.nn.functional
as
F
np
.
random
.
seed
(
10
)
...
...
@@ -231,34 +232,33 @@ class TestNnFunctionalSoftmaxApi(unittest.TestCase):
self
.
out_ref
=
np
.
apply_along_axis
(
stable_softmax
,
-
1
,
self
.
x_np
)
def
test_api_static
(
self
):
train_program
=
Program
()
startup_program
=
Program
()
with
program_guard
(
train_program
,
startup_program
):
with
program_guard
(
Program
()):
x
=
paddle
.
data
(
'X'
,
self
.
x_np
.
shape
,
'float32'
)
out
=
paddle
.
nn
.
functional
.
softmax
(
x
)
exe
=
paddle
.
Executor
(
self
.
place
)
res
=
exe
.
run
(
train_program
,
feed
=
{
'X'
:
self
.
x_np
},
fetch_list
=
[
out
])
assert
np
.
allclose
(
self
.
out_ref
,
res
[
0
])
out
=
F
.
softmax
(
x
)
exe
=
paddle
.
static
.
Executor
(
self
.
place
)
res
=
exe
.
run
(
feed
=
{
'X'
:
self
.
x_np
},
fetch_list
=
[
out
])
self
.
assertEqual
(
np
.
allclose
(
self
.
out_ref
,
res
[
0
]),
True
)
def
test_api_imperative
(
self
):
with
paddle
.
imperative
.
guard
(
self
.
place
):
x
=
paddle
.
imperative
.
to_variable
(
self
.
x_np
)
out
=
paddle
.
nn
.
functional
.
softmax
(
x
)
assert
np
.
allclose
(
self
.
out_ref
,
out
.
numpy
())
paddle
.
disable_static
(
self
.
place
)
out
=
paddle
.
nn
.
functional
.
softmax
(
x
,
axis
=
0
)
x
=
paddle
.
to_variable
(
self
.
x_np
)
out
=
F
.
softmax
(
x
)
self
.
assertEqual
(
np
.
allclose
(
self
.
out_ref
,
out
.
numpy
()),
True
)
out
=
F
.
softmax
(
x
,
axis
=
0
)
out_ref
=
np
.
apply_along_axis
(
stable_softmax
,
0
,
self
.
x_np
)
assert
np
.
allclose
(
out_ref
,
out
.
numpy
())
self
.
assertEqual
(
np
.
allclose
(
out_ref
,
out
.
numpy
()),
True
)
paddle
.
enable_static
()
def
test_error
(
self
):
with
program_guard
(
Program
(),
Program
()):
# The x should be variable and its dtype should be float32, float64.
self
.
assertRaises
(
TypeError
,
paddle
.
nn
.
functional
.
softmax
,
[
1
])
self
.
assertRaises
(
TypeError
,
F
.
softmax
,
[
1
])
x
=
paddle
.
data
(
name
=
'x'
,
shape
=
[
2
,
3
],
dtype
=
'int32'
)
self
.
assertRaises
(
TypeError
,
paddle
.
nn
.
functional
.
softmax
,
x
)
self
.
assertRaises
(
TypeError
,
F
.
softmax
,
x
)
if
__name__
==
"__main__"
:
...
...
python/paddle/nn/functional/activation.py
浏览文件 @
b2034c28
...
...
@@ -401,7 +401,7 @@ def softmax(x, axis=-1, name=None):
import paddle.nn.functional as F
import numpy as np
paddle.
enable_imperative
()
paddle.
disable_static
()
x = np.array([[[2.0, 3.0, 4.0, 5.0],
[3.0, 4.0, 5.0, 6.0],
...
...
@@ -409,7 +409,7 @@ def softmax(x, axis=-1, name=None):
[[1.0, 2.0, 3.0, 4.0],
[5.0, 6.0, 7.0, 8.0],
[6.0, 7.0, 8.0, 9.0]]], 'float32')
x = paddle.
imperative.
to_variable(x)
x = paddle.to_variable(x)
out = F.softmax(x)
# [[[0.0320586 , 0.08714432, 0.23688282, 0.64391426],
# [0.0320586 , 0.08714432, 0.23688282, 0.64391426],
...
...
python/paddle/nn/layer/activation.py
浏览文件 @
b2034c28
...
...
@@ -232,11 +232,11 @@ class LeakyReLU(layers.Layer):
import paddle
import numpy as np
paddle.
enable_imperative
()
paddle.
disable_static
()
lrelu = paddle.nn.LeakyReLU()
x = paddle.
imperative.
to_variable(np.array([-2, 0, 1], 'float32'))
out = lrelu(x) # [-0.02, 0
, 1
]
x = paddle.to_variable(np.array([-2, 0, 1], 'float32'))
out = lrelu(x) # [-0.02, 0
., 1.
]
"""
def
__init__
(
self
,
alpha
=
1e-2
,
name
=
None
):
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录