Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
1e6c917e
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
1e6c917e
编写于
1月 10, 2017
作者:
H
hedaoyuan
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix unit test of paramRelu
上级
7df67bae
变更
3
显示空白变更内容
内联
并排
Showing
3 changed file
with
24 addition
and
10 deletion
+24
-10
paddle/math/Matrix.cpp
paddle/math/Matrix.cpp
+18
-6
paddle/math/tests/test_Matrix.cpp
paddle/math/tests/test_Matrix.cpp
+3
-2
paddle/math/tests/test_matrixCompare.cpp
paddle/math/tests/test_matrixCompare.cpp
+3
-2
未找到文件。
paddle/math/Matrix.cpp
浏览文件 @
1e6c917e
...
...
@@ -1311,7 +1311,9 @@ void GpuMatrix::paramReluForward(Matrix& data, Matrix& W) {
real
*
w
=
W
.
getData
();
size_t
numElements
=
data
.
getWidth
();
size_t
numSamples
=
data
.
getHeight
();
size_t
partial_sum
=
numElements
/
(
W
.
getHeight
()
*
W
.
getWidth
());
size_t
paraSize
=
W
.
getHeight
()
*
W
.
getWidth
();
CHECK
(
!
(
numElements
%
paraSize
));
// this check from ParameterReluLayer::init
size_t
partial_sum
=
numElements
/
paraSize
;
real
*
output
=
getData
();
hl_param_relu_forward
(
output
,
input
,
w
,
numElements
,
numSamples
,
partial_sum
);
}
...
...
@@ -1324,7 +1326,9 @@ void GpuMatrix::paramReluBackwardW(Matrix& oGrad, Matrix& data) {
real
*
wgrad
=
data_
;
size_t
numElements
=
data
.
getWidth
();
size_t
numSamples
=
data
.
getHeight
();
size_t
partial_sum
=
numElements
/
(
this
->
getHeight
()
*
this
->
getWidth
());
size_t
paraSize
=
this
->
getHeight
()
*
this
->
getWidth
();
CHECK
(
!
(
numElements
%
paraSize
));
// this check from ParameterReluLayer::init
size_t
partial_sum
=
numElements
/
paraSize
;
hl_param_relu_backward_w
(
wgrad
,
ograd
,
input
,
numElements
,
numSamples
,
partial_sum
);
}
...
...
@@ -1336,7 +1340,9 @@ void GpuMatrix::paramReluBackwardDiff(Matrix& oGrad, Matrix& data, Matrix& W) {
real
*
w
=
W
.
getData
();
size_t
numElements
=
data
.
getWidth
();
size_t
numSamples
=
data
.
getHeight
();
size_t
partial_sum
=
numElements
/
(
W
.
getHeight
()
*
W
.
getWidth
());
size_t
paraSize
=
W
.
getHeight
()
*
W
.
getWidth
();
CHECK
(
!
(
numElements
%
paraSize
));
// this check from ParameterReluLayer::init
size_t
partial_sum
=
numElements
/
paraSize
;
hl_param_relu_backward_diff
(
ograd
,
input
,
w
,
diff
,
numElements
,
numSamples
,
partial_sum
);
}
...
...
@@ -3764,7 +3770,9 @@ void CpuMatrix::paramReluForward(Matrix& data, Matrix& W) {
real
*
w
=
W
.
getData
();
size_t
numElements
=
data
.
getWidth
();
size_t
numSamples
=
data
.
getHeight
();
size_t
partial_sum
=
numElements
/
(
W
.
getHeight
()
*
W
.
getWidth
());
size_t
paraSize
=
W
.
getHeight
()
*
W
.
getWidth
();
CHECK
(
!
(
numElements
%
paraSize
));
// this check from ParameterReluLayer::init
size_t
partial_sum
=
numElements
/
paraSize
;
for
(
size_t
n
=
0
,
k
=
0
;
n
<
numSamples
;
++
n
)
{
for
(
size_t
i
=
0
;
i
<
numElements
;
++
i
,
++
k
)
{
data_
[
k
]
=
input
[
k
]
>
0
?
input
[
k
]
:
input
[
k
]
*
w
[
i
/
partial_sum
];
...
...
@@ -3778,7 +3786,9 @@ void CpuMatrix::paramReluBackwardW(Matrix& oGrad, Matrix& data) {
real
*
wgrad
=
data_
;
size_t
numElements
=
data
.
getWidth
();
size_t
numSamples
=
data
.
getHeight
();
size_t
partial_sum
=
numElements
/
(
this
->
getHeight
()
*
this
->
getWidth
());
size_t
paraSize
=
this
->
getHeight
()
*
this
->
getWidth
();
CHECK
(
!
(
numElements
%
paraSize
));
// this check from ParameterReluLayer::init
size_t
partial_sum
=
numElements
/
paraSize
;
for
(
size_t
n
=
0
,
k
=
0
;
n
<
numSamples
;
++
n
)
{
for
(
size_t
i
=
0
;
i
<
numElements
;
++
i
,
++
k
)
{
wgrad
[
i
/
partial_sum
]
+=
ograd
[
k
]
*
(
input
[
k
]
>
0
?
0
:
input
[
k
]);
...
...
@@ -3793,7 +3803,9 @@ void CpuMatrix::paramReluBackwardDiff(Matrix& oGrad, Matrix& data, Matrix& W) {
real
*
w
=
W
.
getData
();
size_t
numElements
=
data
.
getWidth
();
size_t
numSamples
=
data
.
getHeight
();
size_t
partial_sum
=
numElements
/
(
W
.
getHeight
()
*
W
.
getWidth
());
size_t
paraSize
=
W
.
getHeight
()
*
W
.
getWidth
();
CHECK
(
!
(
numElements
%
paraSize
));
// this check from ParameterReluLayer::init
size_t
partial_sum
=
numElements
/
paraSize
;
for
(
size_t
n
=
0
,
k
=
0
;
n
<
numSamples
;
++
n
)
{
for
(
size_t
i
=
0
;
i
<
numElements
;
++
i
,
++
k
)
{
diff
[
k
]
+=
ograd
[
k
]
*
(
input
[
k
]
>
0
?
1
:
w
[
i
/
partial_sum
]);
...
...
paddle/math/tests/test_Matrix.cpp
浏览文件 @
1e6c917e
...
...
@@ -224,10 +224,11 @@ void testParamReluBackwardW(int height, int width, int w_height, int w_width) {
}
TEST
(
Matrix
,
paramRelu
)
{
for
(
auto
height
:
{
10
,
100
})
{
for
(
auto
width
:
{
10
,
100
})
{
for
(
auto
height
:
{
10
,
40
,
100
})
{
for
(
auto
width
:
{
10
,
40
,
100
})
{
for
(
auto
w_height
:
{
1
,
2
})
{
for
(
auto
w_width
:
{
1
,
2
})
{
if
(
width
%
(
w_height
*
w_width
))
continue
;
testParamReluForward
(
height
,
width
,
w_height
,
w_width
);
testParamReluBackwardW
(
height
,
width
,
w_height
,
w_width
);
}
...
...
paddle/math/tests/test_matrixCompare.cpp
浏览文件 @
1e6c917e
...
...
@@ -773,10 +773,11 @@ void testParamReluBackwardDiff(int height,
}
TEST
(
Matrix
,
paramReluBackwardDiff
)
{
for
(
auto
height
:
{
10
,
100
})
{
for
(
auto
width
:
{
10
,
100
})
{
for
(
auto
height
:
{
10
,
40
,
100
})
{
for
(
auto
width
:
{
10
,
40
,
100
})
{
for
(
auto
w_height
:
{
1
,
2
})
{
for
(
auto
w_width
:
{
1
,
2
})
{
if
(
width
%
(
w_height
*
w_width
))
continue
;
testParamReluBackwardDiff
(
height
,
width
,
w_height
,
w_width
);
}
}
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录