Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
6164b898
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
6164b898
编写于
9月 29, 2017
作者:
Y
Yu Yang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Fix CI
上级
762a99cc
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
4 addition
and
50 deletion
+4
-50
python/paddle/v2/framework/tests/test_gradient_checker.py
python/paddle/v2/framework/tests/test_gradient_checker.py
+0
-46
python/paddle/v2/framework/tests/test_net.py
python/paddle/v2/framework/tests/test_net.py
+2
-2
python/paddle/v2/framework/tests/test_operator.py
python/paddle/v2/framework/tests/test_operator.py
+2
-2
未找到文件。
python/paddle/v2/framework/tests/test_gradient_checker.py
已删除
100644 → 0
浏览文件 @
762a99cc
import
unittest
import
numpy
as
np
import
paddle.v2.framework.core
as
core
from
op_test
import
get_numeric_gradient
from
op_test
import
create_op
class
GetNumericGradientTest
(
unittest
.
TestCase
):
def
test_add_op
(
self
):
x
=
np
.
random
.
random
((
10
,
1
)).
astype
(
"float32"
)
y
=
np
.
random
.
random
((
10
,
1
)).
astype
(
"float32"
)
z
=
x
+
y
scope
=
core
.
Scope
()
add_op
=
create_op
(
scope
,
"add"
,
{
'X'
:
x
,
'Y'
:
y
},
{
'Out'
:
z
},
dict
())
arr
=
get_numeric_gradient
(
scope
,
add_op
,
{
'X'
:
x
,
'Y'
:
y
},
'X'
,
[
'Out'
])
self
.
assertAlmostEqual
(
arr
.
mean
(),
1.0
,
delta
=
1e-4
)
def
test_softmax_op
(
self
):
def
stable_softmax
(
x
):
"""Compute the softmax of vector x in a numerically stable way."""
shiftx
=
x
-
np
.
max
(
x
)
exps
=
np
.
exp
(
shiftx
)
return
exps
/
np
.
sum
(
exps
)
def
label_softmax_grad
(
Y
,
dY
):
dX
=
Y
*
0.0
for
i
in
range
(
Y
.
shape
[
0
]):
d
=
np
.
dot
(
Y
[
i
,
:],
dY
[
i
,
:])
dX
[
i
,
:]
=
Y
[
i
,
:]
*
(
dY
[
i
,
:]
-
d
)
return
dX
X
=
np
.
random
.
random
((
2
,
2
)).
astype
(
"float32"
)
Y
=
np
.
apply_along_axis
(
stable_softmax
,
1
,
X
)
dY
=
np
.
ones
(
Y
.
shape
)
dX
=
label_softmax_grad
(
Y
,
dY
)
scope
=
core
.
Scope
()
softmax_op
=
create_op
(
scope
,
"softmax"
,
{
"X"
:
X
},
{
"Y"
:
Y
},
dict
())
arr
=
get_numeric_gradient
(
scope
,
softmax_op
,
{
"X"
:
X
},
"X"
,
"Y"
)
np
.
testing
.
assert_almost_equal
(
arr
,
dX
,
decimal
=
1e-2
)
if
__name__
==
"__main__"
:
unittest
.
main
()
python/paddle/v2/framework/tests/test_net.py
浏览文件 @
6164b898
...
...
@@ -15,7 +15,7 @@ def fc(X, W, Y):
class
TestNet
(
unittest
.
TestCase
):
def
test_net_all
(
self
):
net
=
core
.
Net
.
create
()
op1
=
Operator
(
"
add"
,
X
=
"X"
,
Y
=
"Y"
,
Out
=
"Out"
)
op1
=
Operator
(
"
sum"
,
X
=
[
"X"
,
"Y"
]
,
Out
=
"Out"
)
net
.
append_op
(
op1
)
net2
=
core
.
Net
.
create
()
...
...
@@ -26,7 +26,7 @@ class TestNet(unittest.TestCase):
expected
=
'''
Op(plain_net), inputs:{all[W, X, Y]}, outputs:{all[Out, fc.out, pre_activation]}.
Op(
add), inputs:{X[X], Y[
Y]}, outputs:{Out[Out]}.
Op(
sum), inputs:{X[X,
Y]}, outputs:{Out[Out]}.
Op(plain_net), inputs:{all[W, X]}, outputs:{all[fc.out, pre_activation]}.
Op(plain_net), inputs:{all[W, X]}, outputs:{all[fc.out, pre_activation]}.
Op(mul), inputs:{X[X], Y[W]}, outputs:{Out[pre_activation]}.
...
...
python/paddle/v2/framework/tests/test_operator.py
浏览文件 @
6164b898
...
...
@@ -193,10 +193,10 @@ class TestOpDescCreationMethod(unittest.TestCase):
class
TestOpCreations
(
unittest
.
TestCase
):
def
test_all
(
self
):
add_op
=
op
.
Operator
(
"
add"
,
X
=
"a"
,
Y
=
"b"
,
Out
=
"z"
)
add_op
=
op
.
Operator
(
"
sum"
,
X
=
[
"a"
,
"b"
]
,
Out
=
"z"
)
self
.
assertIsNotNone
(
add_op
)
# Invoke C++ DebugString()
self
.
assertEqual
(
'Op(
add), inputs:{X[a], Y[
b]}, outputs:{Out[z]}.'
,
self
.
assertEqual
(
'Op(
sum), inputs:{X[a,
b]}, outputs:{Out[z]}.'
,
str
(
add_op
))
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录