Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
7ed6463e
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
7ed6463e
编写于
5月 24, 2017
作者:
Y
yangyaming
提交者:
yangyaming
7月 05, 2017
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix bugs for CrossChannelNormLayer
上级
98378968
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
40 addition
and
20 deletion
+40
-20
paddle/gserver/layers/CrossChannelNormLayer.cpp
paddle/gserver/layers/CrossChannelNormLayer.cpp
+24
-8
paddle/gserver/layers/NormLayer.cpp
paddle/gserver/layers/NormLayer.cpp
+0
-10
paddle/gserver/tests/LayerGradUtil.cpp
paddle/gserver/tests/LayerGradUtil.cpp
+6
-1
paddle/gserver/tests/LayerGradUtil.h
paddle/gserver/tests/LayerGradUtil.h
+6
-0
paddle/gserver/tests/test_LayerGrad.cpp
paddle/gserver/tests/test_LayerGrad.cpp
+4
-1
未找到文件。
paddle/gserver/layers/CrossChannelNormLayer.cpp
浏览文件 @
7ed6463e
...
@@ -36,6 +36,16 @@ MatrixPtr CrossChannelNormLayer::createSpatialMatrix(MatrixPtr data,
...
@@ -36,6 +36,16 @@ MatrixPtr CrossChannelNormLayer::createSpatialMatrix(MatrixPtr data,
data
->
getData
()
+
iter
*
spatialDim
,
1
,
spatialDim
,
false
,
useGpu_
);
data
->
getData
()
+
iter
*
spatialDim
,
1
,
spatialDim
,
false
,
useGpu_
);
}
}
bool
CrossChannelNormLayer
::
init
(
const
LayerMap
&
layerMap
,
const
ParameterMap
&
parameterMap
)
{
Layer
::
init
(
layerMap
,
parameterMap
);
CHECK
(
parameters_
[
0
]);
const
NormConfig
&
conf
=
config_
.
inputs
(
0
).
norm_conf
();
channels_
=
conf
.
channels
();
scale_
.
reset
(
new
Weight
(
channels_
,
1
,
parameters_
[
0
]));
return
true
;
}
void
CrossChannelNormLayer
::
forward
(
PassType
passType
)
{
void
CrossChannelNormLayer
::
forward
(
PassType
passType
)
{
Layer
::
forward
(
passType
);
Layer
::
forward
(
passType
);
MatrixPtr
inV
=
getInputValue
(
0
);
MatrixPtr
inV
=
getInputValue
(
0
);
...
@@ -63,6 +73,7 @@ void CrossChannelNormLayer::forward(PassType passType) {
...
@@ -63,6 +73,7 @@ void CrossChannelNormLayer::forward(PassType passType) {
// compute norm.
// compute norm.
spatialBuffer_
->
sumCols
(
*
dataTmp
,
1
,
0
);
spatialBuffer_
->
sumCols
(
*
dataTmp
,
1
,
0
);
spatialBuffer_
->
add
(
*
normTmp
);
spatialBuffer_
->
sqrt2
(
*
spatialBuffer_
);
spatialBuffer_
->
sqrt2
(
*
spatialBuffer_
);
normTmp
->
copyFrom
(
*
spatialBuffer_
);
normTmp
->
copyFrom
(
*
spatialBuffer_
);
outVTmp
->
copyFrom
(
*
inVTmp
);
outVTmp
->
copyFrom
(
*
inVTmp
);
...
@@ -82,6 +93,9 @@ void CrossChannelNormLayer::backward(const UpdateCallback& callback) {
...
@@ -82,6 +93,9 @@ void CrossChannelNormLayer::backward(const UpdateCallback& callback) {
size_t
dataDim
=
inG
->
getWidth
();
size_t
dataDim
=
inG
->
getWidth
();
size_t
spatialDim
=
dataDim
/
channels_
;
size_t
spatialDim
=
dataDim
/
channels_
;
MatrixPtr
inGBuffer
;
Matrix
::
resizeOrCreate
(
inGBuffer
,
channels_
,
spatialDim
,
false
,
useGpu_
);
dataBuffer_
->
dotMul
(
*
outG
,
*
outV
);
dataBuffer_
->
dotMul
(
*
outG
,
*
outV
);
Matrix
::
resizeOrCreate
(
scaleDiff_
,
channels_
,
1
,
false
,
useGpu_
);
Matrix
::
resizeOrCreate
(
scaleDiff_
,
channels_
,
1
,
false
,
useGpu_
);
Matrix
::
resizeOrCreate
(
channelBuffer_
,
channels_
,
1
,
false
,
useGpu_
);
Matrix
::
resizeOrCreate
(
channelBuffer_
,
channels_
,
1
,
false
,
useGpu_
);
...
@@ -100,22 +114,24 @@ void CrossChannelNormLayer::backward(const UpdateCallback& callback) {
...
@@ -100,22 +114,24 @@ void CrossChannelNormLayer::backward(const UpdateCallback& callback) {
scaleDiff_
->
add
(
*
channelBuffer_
,
1.
);
scaleDiff_
->
add
(
*
channelBuffer_
,
1.
);
sampleBuffer_
->
dotMul
(
*
inVTmp
,
*
outGTmp
);
sampleBuffer_
->
dotMul
(
*
inVTmp
,
*
outGTmp
);
spatialBuffer_
->
sumCols
(
*
sampleBuffer_
,
1.
,
1
.
);
spatialBuffer_
->
sumCols
(
*
sampleBuffer_
,
1.
,
0
.
);
// scale the grad
// scale the grad
inG
Tmp
->
copyFrom
(
*
inVTmp
);
inG
Buffer
->
copyFrom
(
*
inVTmp
);
inG
Tmp
->
mulRowVector
(
*
spatialBuffer_
);
inG
Buffer
->
mulRowVector
(
*
spatialBuffer_
);
// divide by square of norm
// divide by square of norm
spatialBuffer_
->
dotMul
(
*
normTmp
,
*
normTmp
);
spatialBuffer_
->
dotMul
(
*
normTmp
,
*
normTmp
);
inG
Tmp
->
divRowVector
(
*
spatialBuffer_
);
inG
Buffer
->
divRowVector
(
*
spatialBuffer_
);
// subtract
// subtract
inG
Tmp
->
add
(
*
outGTmp
,
-
1
,
1
);
inG
Buffer
->
add
(
*
outGTmp
,
-
1
,
1
);
// divide by norm
// divide by norm
inG
Tmp
->
divRowVector
(
*
normTmp
);
inG
Buffer
->
divRowVector
(
*
normTmp
);
// scale the diff
// scale the diff
inGTmp
->
mulColVector
(
*
scale_
->
getW
());
inGBuffer
->
mulColVector
(
*
scale_
->
getW
());
inGTmp
->
add
(
*
inGBuffer
);
}
}
// updata scale
// updata scale
if
(
scale_
->
getWGrad
())
scale_
->
getWGrad
()
->
copyFrom
(
*
scaleDiff_
);
if
(
scale_
->
getWGrad
())
scale_
->
getWGrad
()
->
add
(
*
scaleDiff_
);
scale_
->
getParameterPtr
()
->
incUpdate
(
callback
);
scale_
->
getParameterPtr
()
->
incUpdate
(
callback
);
}
}
...
...
paddle/gserver/layers/NormLayer.cpp
浏览文件 @
7ed6463e
...
@@ -56,14 +56,4 @@ bool ResponseNormLayer::init(const LayerMap& layerMap,
...
@@ -56,14 +56,4 @@ bool ResponseNormLayer::init(const LayerMap& layerMap,
return
true
;
return
true
;
}
}
bool
CrossChannelNormLayer
::
init
(
const
LayerMap
&
layerMap
,
const
ParameterMap
&
parameterMap
)
{
Layer
::
init
(
layerMap
,
parameterMap
);
CHECK
(
parameters_
[
0
]);
const
NormConfig
&
conf
=
config_
.
inputs
(
0
).
norm_conf
();
channels_
=
conf
.
channels
();
scale_
.
reset
(
new
Weight
(
channels_
,
1
,
parameters_
[
0
]));
return
true
;
}
}
// namespace paddle
}
// namespace paddle
paddle/gserver/tests/LayerGradUtil.cpp
浏览文件 @
7ed6463e
...
@@ -465,7 +465,6 @@ void initTestLayer(TestConfig testConf,
...
@@ -465,7 +465,6 @@ void initTestLayer(TestConfig testConf,
ParameterConfig
paraConfig
)
{
ParameterConfig
paraConfig
)
{
paraConfig
.
set_name
(
paraName
);
paraConfig
.
set_name
(
paraName
);
paraConfig
.
set_size
(
paraSize
);
paraConfig
.
set_size
(
paraSize
);
paraConfig
.
set_initial_std
(
1
);
paraConfig
.
set_is_static
(
isStatic
);
paraConfig
.
set_is_static
(
isStatic
);
auto
para
=
auto
para
=
std
::
make_shared
<
Parameter
>
(
paraConfig
,
FLAGS_use_gpu
,
initialize
);
std
::
make_shared
<
Parameter
>
(
paraConfig
,
FLAGS_use_gpu
,
initialize
);
...
@@ -499,6 +498,12 @@ void initTestLayer(TestConfig testConf,
...
@@ -499,6 +498,12 @@ void initTestLayer(TestConfig testConf,
paraConfig
.
add_dims
((
*
layerMap
)[
input
.
input_layer_name
()]
->
getSize
());
paraConfig
.
add_dims
((
*
layerMap
)[
input
.
input_layer_name
()]
->
getSize
());
paraConfig
.
add_dims
(
testConf
.
layerConfig
.
size
());
paraConfig
.
add_dims
(
testConf
.
layerConfig
.
size
());
}
}
if
(
testConf
.
hasParamInitialValue
)
{
paraConfig
.
set_initial_mean
(
testConf
.
paramInitialMean
);
paraConfig
.
set_initial_std
(
testConf
.
paramInitialStd
);
}
else
{
paraConfig
.
set_initial_std
(
1
);
}
initParameter
(
paraName
,
paraSize
,
inputDef
.
isStatic
,
false
,
paraConfig
);
initParameter
(
paraName
,
paraSize
,
inputDef
.
isStatic
,
false
,
paraConfig
);
}
}
}
}
...
...
paddle/gserver/tests/LayerGradUtil.h
浏览文件 @
7ed6463e
...
@@ -125,12 +125,18 @@ struct TestConfig {
...
@@ -125,12 +125,18 @@ struct TestConfig {
LayerConfig
layerConfig
;
LayerConfig
layerConfig
;
std
::
vector
<
InputDef
>
inputDefs
;
std
::
vector
<
InputDef
>
inputDefs
;
size_t
biasSize
;
size_t
biasSize
;
real
paramInitialMean
;
real
paramInitialStd
;
bool
hasParamInitialValue
;
bool
testAccumulate
;
bool
testAccumulate
;
bool
testState
;
bool
testState
;
bool
staticBias
;
bool
staticBias
;
bool
testBatchState
;
bool
testBatchState
;
TestConfig
()
TestConfig
()
:
biasSize
(
0
),
:
biasSize
(
0
),
paramInitialMean
(
0
),
paramInitialStd
(
1
),
hasParamInitialValue
(
false
),
testAccumulate
(
true
),
testAccumulate
(
true
),
testState
(
false
),
testState
(
false
),
staticBias
(
false
),
staticBias
(
false
),
...
...
paddle/gserver/tests/test_LayerGrad.cpp
浏览文件 @
7ed6463e
...
@@ -1661,6 +1661,9 @@ TEST(Layer, PadLayer) {
...
@@ -1661,6 +1661,9 @@ TEST(Layer, PadLayer) {
TEST
(
Layer
,
CrossChannelNormLayer
)
{
TEST
(
Layer
,
CrossChannelNormLayer
)
{
TestConfig
config
;
TestConfig
config
;
config
.
hasParamInitialValue
=
true
;
config
.
paramInitialMean
=
1.
;
config
.
paramInitialStd
=
0.
;
config
.
layerConfig
.
set_type
(
"norm"
);
config
.
layerConfig
.
set_type
(
"norm"
);
config
.
layerConfig
.
set_size
(
100
);
config
.
layerConfig
.
set_size
(
100
);
LayerInputConfig
*
input
=
config
.
layerConfig
.
add_inputs
();
LayerInputConfig
*
input
=
config
.
layerConfig
.
add_inputs
();
...
@@ -1674,7 +1677,7 @@ TEST(Layer, CrossChannelNormLayer) {
...
@@ -1674,7 +1677,7 @@ TEST(Layer, CrossChannelNormLayer) {
config
.
inputDefs
.
push_back
({
INPUT_DATA
,
"layer_0"
,
100
,
10
});
config
.
inputDefs
.
push_back
({
INPUT_DATA
,
"layer_0"
,
100
,
10
});
for
(
auto
useGpu
:
{
false
,
true
})
{
for
(
auto
useGpu
:
{
false
,
true
})
{
testLayerGrad
(
config
,
"cross-channel-norm"
,
10
,
false
,
useGpu
,
false
,
5
);
testLayerGrad
(
config
,
"cross-channel-norm"
,
10
,
false
,
useGpu
,
false
);
}
}
}
}
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录