Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
1eab8cce
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
1eab8cce
编写于
6月 21, 2017
作者:
Z
zlx
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
modify the annotations of HookAttribute, Variable declaration
上级
15bf6e05
变更
2
显示空白变更内容
内联
并排
Showing
2 changed file
with
29 addition
and
22 deletion
+29
-22
paddle/parameter/ParameterUpdaterHook.cpp
paddle/parameter/ParameterUpdaterHook.cpp
+16
-15
python/paddle/trainer_config_helpers/attrs.py
python/paddle/trainer_config_helpers/attrs.py
+13
-7
未找到文件。
paddle/parameter/ParameterUpdaterHook.cpp
浏览文件 @
1eab8cce
...
@@ -31,9 +31,9 @@ namespace paddle {
...
@@ -31,9 +31,9 @@ namespace paddle {
/**
/**
* The static pruning hook
* The static pruning hook
* Static means user specif
ic a sparsity_ratio before training start
, and the
* Static means user specif
y a sparsity_ratio before training started
, and the
* network will prune the parameters based on the sparsity_ratio. More deatils
* network will prune the parameters based on the sparsity_ratio. More deatils
* can
see
https://arxiv.org/pdf/1506.02626.pdf.
* can
be found
https://arxiv.org/pdf/1506.02626.pdf.
*/
*/
class
StaticPruningHook
:
public
IParameterUpdaterHook
{
class
StaticPruningHook
:
public
IParameterUpdaterHook
{
...
@@ -57,29 +57,31 @@ public:
...
@@ -57,29 +57,31 @@ public:
}
}
void
generateMask
(
Parameter
*
para
)
{
void
generateMask
(
Parameter
*
para
)
{
VectorPtr
vec
=
para
->
getBuf
(
PARAMETER_VALUE
);
maskTemp_
=
Vector
::
create
(
para
->
getSize
(),
false
);
VectorPtr
maskTemp
=
Vector
::
create
(
para
->
getSize
(),
false
);
maskTemp
_
->
zeroMem
();
maskTemp
->
zeroMem
();
real
*
dataPtr
=
maskTemp_
->
getData
();
real
*
maskTempData
=
maskTemp
->
getData
();
size_t
nonZeroNum
=
para
->
getSize
()
*
(
1
-
sparsityRatio_
);
size_t
nonZeroNum
=
para
->
getSize
()
*
(
1
-
sparsityRatio_
);
VectorPtr
vecCpu
=
Vector
::
create
(
para
->
getSize
(),
false
);
VectorPtr
paraVec
=
para
->
getBuf
(
PARAMETER_VALUE
);
vecCpu
->
copyFrom
(
*
vec
);
VectorPtr
paraCpuCopy
=
Vector
::
create
(
para
->
getSize
(),
false
);
paraCpuCopy
->
copyFrom
(
*
paraVec
);
std
::
vector
<
std
::
pair
<
real
,
size_t
>>
param
;
std
::
vector
<
std
::
pair
<
real
,
size_t
>>
param
;
for
(
size_t
i
=
0
;
i
<
para
->
getSize
();
i
++
)
for
(
size_t
i
=
0
;
i
<
para
->
getSize
();
i
++
)
param
.
push_back
(
std
::
make_pair
(
fabs
(
vecCpu
->
getData
()[
i
]),
i
));
param
.
push_back
(
std
::
make_pair
(
fabs
(
paraCpuCopy
->
getData
()[
i
]),
i
));
std
::
partial_sort
(
std
::
partial_sort
(
param
.
begin
(),
param
.
begin
()
+
nonZeroNum
,
param
.
end
(),
sortPairAscend
);
param
.
begin
(),
param
.
begin
()
+
nonZeroNum
,
param
.
end
(),
sortPairAscend
);
for
(
size_t
i
=
0
;
i
<
nonZeroNum
;
i
++
)
dataPtr
[
param
[
i
].
second
]
=
1.0
;
for
(
size_t
i
=
0
;
i
<
nonZeroNum
;
i
++
)
maskTempData
[
param
[
i
].
second
]
=
1.0
;
// Currently just use a mask vector for hack.
// Currently just use a mask vector for hack.
if
(
para
->
useGpu
())
{
if
(
para
->
useGpu
())
{
maskVec_
=
Vector
::
create
(
para
->
getSize
(),
para
->
useGpu
());
maskVec_
=
Vector
::
create
(
para
->
getSize
(),
para
->
useGpu
());
maskVec_
->
copyFrom
(
*
maskTemp
_
);
maskVec_
->
copyFrom
(
*
maskTemp
);
}
else
{
}
else
{
maskVec_
=
maskTemp
_
;
maskVec_
=
maskTemp
;
}
}
}
}
...
@@ -91,15 +93,14 @@ public:
...
@@ -91,15 +93,14 @@ public:
VLOG
(
3
)
<<
"Initialize Parameter "
<<
para
;
VLOG
(
3
)
<<
"Initialize Parameter "
<<
para
;
SetDevice
device
(
para
->
getDeviceId
());
SetDevice
device
(
para
->
getDeviceId
());
auto
&
v
ec
=
para
->
getBuf
(
PARAMETER_VALUE
);
auto
&
paraV
ec
=
para
->
getBuf
(
PARAMETER_VALUE
);
v
ec
->
dotMul
(
*
maskVec_
);
paraV
ec
->
dotMul
(
*
maskVec_
);
}
}
private:
private:
SameThreadChecker
updateThreadChecker_
;
SameThreadChecker
updateThreadChecker_
;
std
::
atomic
<
size_t
>
initCount_
;
std
::
atomic
<
size_t
>
initCount_
;
VectorPtr
maskVec_
;
VectorPtr
maskVec_
;
VectorPtr
maskTemp_
;
real
sparsityRatio_
;
real
sparsityRatio_
;
};
};
...
...
python/paddle/trainer_config_helpers/attrs.py
浏览文件 @
1eab8cce
...
@@ -58,11 +58,17 @@ def is_compatible_with(x, Type):
...
@@ -58,11 +58,17 @@ def is_compatible_with(x, Type):
class
HookAttribute
(
object
):
class
HookAttribute
(
object
):
"""
"""
Hook Attribute object. The hook is an auxiliary operation that occurs
Hook Attribute object. As a member of ParameterAttribute class, the hook is an auxiliary operation that occurs
during network propagation.
during training process of a layer with parameters, such as img_conv layer, fc layer.
NOTE: IT IS A HIGH LEVEL USER INTERFACE.
:param type: Hook type, currently supported types:
:param type: Hook type, eg: 'pruning'
'pruning' : user specify a sparsity_ratio before training started, and the
network will prune the parameters based on the sparsity_ratio.
eg: The definition of Hook object can be hk = HookAttribute('pruning', 0.6)
The specific usage can be paddle.layer.img_conv(input=img, filter_size=3,
num_channels=3, num_filters=64,
param_attr=ParameterAttribute(update_hooks=hk) )
The pruning deatils can be found https://arxiv.org/pdf/1506.02626.pdf
:type type: string
:type type: string
:param sparsity_ratio: Must be specified if hook type is 'pruning',
:param sparsity_ratio: Must be specified if hook type is 'pruning',
...
@@ -78,7 +84,7 @@ class HookAttribute(object):
...
@@ -78,7 +84,7 @@ class HookAttribute(object):
assert
is_compatible_with
(
assert
is_compatible_with
(
self
.
sparsity_ratio
,
self
.
sparsity_ratio
,
float
),
'sparisity_ratio must be float type'
float
),
'sparisity_ratio must be float type'
assert
self
.
sparsity_ratio
<=
1
and
self
.
sparsity_ratio
>=
0
,
'sparisity
must be a flao
t between [0, 1] '
assert
self
.
sparsity_ratio
<=
1
and
self
.
sparsity_ratio
>=
0
,
'sparisity
_ratio must be a floa
t between [0, 1] '
def
__call__
(
self
):
def
__call__
(
self
):
return
ParameterHook
(
self
.
type
,
sparsity_ratio
=
self
.
sparsity_ratio
)
return
ParameterHook
(
self
.
type
,
sparsity_ratio
=
self
.
sparsity_ratio
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录