Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
70394792
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
70394792
编写于
10月 25, 2017
作者:
T
tensor-tang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
refine comment and code
上级
88452186
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
12 addition
and
18 deletion
+12
-18
paddle/gserver/layers/MKLDNNBatchNormLayer.cpp
paddle/gserver/layers/MKLDNNBatchNormLayer.cpp
+8
-16
paddle/gserver/layers/MKLDNNBatchNormLayer.h
paddle/gserver/layers/MKLDNNBatchNormLayer.h
+4
-2
未找到文件。
paddle/gserver/layers/MKLDNNBatchNormLayer.cpp
浏览文件 @
70394792
...
@@ -109,19 +109,10 @@ void MKLDNNBatchNormLayer::convertWeightsFromPaddle() {
...
@@ -109,19 +109,10 @@ void MKLDNNBatchNormLayer::convertWeightsFromPaddle() {
void
MKLDNNBatchNormLayer
::
calMovingMeanAndVar
()
{
void
MKLDNNBatchNormLayer
::
calMovingMeanAndVar
()
{
// calculating and saving moving mean and variance
// calculating and saving moving mean and variance
CHECK_EQ
(
useGlobalStats_
,
false
);
CHECK_EQ
(
useGlobalStats_
,
false
);
MatrixPtr
movingMean
=
movingMean_
->
getW
();
movingMean_
->
getW
()
->
add
(
MatrixPtr
movingVar
=
movingVar_
->
getW
();
*
mean_
,
movingAvgFraction_
,
1.0
-
movingAvgFraction_
);
if
(
FLAGS_trainer_count
>
1
)
{
// here var is v^2
auto
mvMean
=
std
::
dynamic_pointer_cast
<
SharedCpuMatrix
>
(
movingMean
);
movingVar_
->
getW
()
->
add
(
*
var_
,
movingAvgFraction_
,
1.0
-
movingAvgFraction_
);
auto
mvVar
=
std
::
dynamic_pointer_cast
<
SharedCpuMatrix
>
(
movingVar
);
CHECK
(
mvMean
&&
mvVar
);
mvMean
->
add
(
*
mean_
,
movingAvgFraction_
,
1.0
-
movingAvgFraction_
);
mvVar
->
add
(
*
var_
,
movingAvgFraction_
,
1.0
-
movingAvgFraction_
);
}
else
{
movingMean
->
add
(
*
mean_
,
movingAvgFraction_
,
1.0
-
movingAvgFraction_
);
// here var is v^2
movingVar
->
add
(
*
var_
,
movingAvgFraction_
,
1.0
-
movingAvgFraction_
);
}
}
}
void
MKLDNNBatchNormLayer
::
reshape
(
void
MKLDNNBatchNormLayer
::
reshape
(
...
@@ -142,8 +133,9 @@ void MKLDNNBatchNormLayer::resetFwd(std::vector<primitive>& pipeline,
...
@@ -142,8 +133,9 @@ void MKLDNNBatchNormLayer::resetFwd(std::vector<primitive>& pipeline,
MKLDNNMatrixPtr
&
wgt
,
MKLDNNMatrixPtr
&
wgt
,
MKLDNNMatrixPtr
&
bias
,
MKLDNNMatrixPtr
&
bias
,
MKLDNNMatrixPtr
&
out
)
{
MKLDNNMatrixPtr
&
out
)
{
// in training always calculate mean and var, so useGlobalStats must be false
// In training phase, it will always calculate mean and var,
// in test depends on useGlobalStats
// so useGlobalStats must be false.
// In scoring phase, it depends on useGlobalStats choice.
if
(
passType_
!=
PASS_TEST
&&
useGlobalStats_
==
true
)
{
if
(
passType_
!=
PASS_TEST
&&
useGlobalStats_
==
true
)
{
LOG
(
WARNING
)
<<
"use_global_stats is invalid setting in training phase"
;
LOG
(
WARNING
)
<<
"use_global_stats is invalid setting in training phase"
;
useGlobalStats_
=
false
;
useGlobalStats_
=
false
;
...
@@ -173,7 +165,7 @@ void MKLDNNBatchNormLayer::resetBwd(std::vector<primitive>& pipeline,
...
@@ -173,7 +165,7 @@ void MKLDNNBatchNormLayer::resetBwd(std::vector<primitive>& pipeline,
void
MKLDNNBatchNormLayer
::
forward
(
PassType
passType
)
{
void
MKLDNNBatchNormLayer
::
forward
(
PassType
passType
)
{
MKLDNNLayer
::
forward
(
passType
);
MKLDNNLayer
::
forward
(
passType
);
// calculat
ing and saving
moving mean and variance
// calculat
e and save
moving mean and variance
if
(
passType_
!=
PASS_TEST
)
{
if
(
passType_
!=
PASS_TEST
)
{
calMovingMeanAndVar
();
calMovingMeanAndVar
();
}
}
...
...
paddle/gserver/layers/MKLDNNBatchNormLayer.h
浏览文件 @
70394792
...
@@ -56,8 +56,10 @@ protected:
...
@@ -56,8 +56,10 @@ protected:
bool
hasInitedWgt_
;
bool
hasInitedWgt_
;
// local mean and variance
// local mean and variance
MKLDNNMatrixPtr
mean_
;
// output of mkldnn: m
// when useGlobalStats_ they are loaded from moving mean and variance
MKLDNNMatrixPtr
var_
;
// output of mkldnn: v^2
// when do not useGlobalStats_ they are calculated from this mini-batch
MKLDNNMatrixPtr
mean_
;
MKLDNNMatrixPtr
var_
;
public:
public:
explicit
MKLDNNBatchNormLayer
(
const
LayerConfig
&
config
)
explicit
MKLDNNBatchNormLayer
(
const
LayerConfig
&
config
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录