Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Greenplum
Opencv
提交
df305e83
O
Opencv
项目概览
Greenplum
/
Opencv
大约 1 年 前同步成功
通知
7
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
DevOps
流水线
流水线任务
计划
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
O
Opencv
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
DevOps
DevOps
流水线
流水线任务
计划
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
流水线任务
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
提交
df305e83
编写于
5月 13, 2020
作者:
D
Dmitry Kurtaev
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Fix BatchNorm reinitialization after fusion
上级
fd06139c
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
66 addition
and
0 deletion
+66
-0
modules/dnn/src/layers/batch_norm_layer.cpp
modules/dnn/src/layers/batch_norm_layer.cpp
+9
-0
modules/dnn/test/test_layers.cpp
modules/dnn/test/test_layers.cpp
+57
-0
未找到文件。
modules/dnn/src/layers/batch_norm_layer.cpp
浏览文件 @
df305e83
...
...
@@ -94,6 +94,15 @@ public:
dstWeightsData
[
i
]
=
w
;
dstBiasData
[
i
]
=
(
hasBias
?
biasData
[
i
]
:
0.0
f
)
-
w
*
meanData
[
i
]
*
varMeanScale
;
}
// We will use blobs to store origin weights and bias to restore them in case of reinitialization.
weights_
.
copyTo
(
blobs
[
0
].
reshape
(
1
,
1
));
bias_
.
copyTo
(
blobs
[
1
].
reshape
(
1
,
1
));
}
virtual
void
finalize
(
InputArrayOfArrays
,
OutputArrayOfArrays
)
CV_OVERRIDE
{
blobs
[
0
].
reshape
(
1
,
1
).
copyTo
(
weights_
);
blobs
[
1
].
reshape
(
1
,
1
).
copyTo
(
bias_
);
}
void
getScaleShift
(
Mat
&
scale
,
Mat
&
shift
)
const
CV_OVERRIDE
...
...
modules/dnn/test/test_layers.cpp
浏览文件 @
df305e83
...
...
@@ -1780,4 +1780,61 @@ TEST_P(Layer_Test_Slice, variable_input_shape)
INSTANTIATE_TEST_CASE_P
(
/**/
,
Layer_Test_Slice
,
dnnBackendsAndTargets
());
typedef
testing
::
TestWithParam
<
tuple
<
Backend
,
Target
>
>
Layer_Test_BatchNorm
;
TEST_P
(
Layer_Test_BatchNorm
,
fusion
)
{
// This tests reinitializes network by forwarding different batch size input.
// We check BatchNorm layer weights restoring after fusion.
int
backendId
=
get
<
0
>
(
GetParam
());
int
targetId
=
get
<
1
>
(
GetParam
());
const
int
ch
=
4
;
Mat
mean
(
1
,
ch
,
CV_32F
),
var
(
1
,
ch
,
CV_32F
),
weights
(
1
,
ch
,
CV_32F
);
randu
(
mean
,
0
,
1
);
randu
(
var
,
0
,
1
);
randu
(
weights
,
0
,
1
);
Net
net
;
{
LayerParams
lp
;
lp
.
type
=
"BatchNorm"
;
lp
.
name
=
"bn"
;
lp
.
set
(
"has_weight"
,
false
);
lp
.
set
(
"has_bias"
,
false
);
lp
.
blobs
.
push_back
(
mean
);
lp
.
blobs
.
push_back
(
var
);
net
.
addLayerToPrev
(
lp
.
name
,
lp
.
type
,
lp
);
}
{
LayerParams
lp
;
lp
.
type
=
"Scale"
;
lp
.
name
=
"scale"
;
lp
.
set
(
"has_bias"
,
false
);
lp
.
blobs
.
push_back
(
weights
);
net
.
addLayerToPrev
(
lp
.
name
,
lp
.
type
,
lp
);
}
Mat
inp
(
4
,
5
,
CV_32FC
(
ch
));
randu
(
inp
,
0
,
1
);
net
.
setPreferableBackend
(
backendId
);
net
.
setPreferableTarget
(
targetId
);
net
.
setInput
(
blobFromImage
(
inp
));
Mat
ref
=
net
.
forward
();
net
.
setInput
(
blobFromImages
(
std
::
vector
<
Mat
>
(
2
,
inp
)));
Mat
out
=
net
.
forward
();
for
(
int
i
=
0
;
i
<
2
;
++
i
)
{
std
::
vector
<
Range
>
ranges
(
4
,
Range
::
all
());
ranges
[
0
].
start
=
i
;
ranges
[
0
].
end
=
i
+
1
;
normAssert
(
out
(
ranges
),
ref
);
}
}
INSTANTIATE_TEST_CASE_P
(
/**/
,
Layer_Test_BatchNorm
,
dnnBackendsAndTargets
());
}}
// namespace
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录