Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
6605f3f3
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
6605f3f3
编写于
6月 14, 2017
作者:
E
emailweixu
提交者:
GitHub
6月 14, 2017
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #2400 from emailweixu/param_initializer
Parameter initializer in V2 API
上级
91f82aba
dd2cbb63
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
64 addition
and
20 deletion
+64
-20
python/paddle/trainer/config_parser.py
python/paddle/trainer/config_parser.py
+30
-19
python/paddle/trainer_config_helpers/attrs.py
python/paddle/trainer_config_helpers/attrs.py
+8
-1
python/paddle/v2/parameters.py
python/paddle/v2/parameters.py
+4
-0
python/paddle/v2/tests/test_parameters.py
python/paddle/v2/tests/test_parameters.py
+22
-0
未找到文件。
python/paddle/trainer/config_parser.py
浏览文件 @
6605f3f3
...
...
@@ -126,6 +126,7 @@ def init_config_environment(
g_config
=
TrainerConfig
(),
g_layer_map
=
{},
g_parameter_map
=
{},
g_parameter_initializer_map
=
{},
g_extended_config_funcs
=
{},
# store command args of paddle_trainer
...
...
@@ -439,22 +440,22 @@ def model_type(name):
@
config_class
class
Bias
(
Cfg
):
def
__init__
(
self
,
parameter_nam
e
=
None
,
learning_rate
=
None
,
momentum
=
None
,
decay_rate
=
None
,
decay_rate_l1
=
None
,
initial_mean
=
None
,
initial_std
=
None
,
initial_strategy
=
None
,
initial_smart
=
None
,
num_batches_regularization
=
None
,
sparse_remote_update
=
None
,
gradient_clipping_threshold
=
None
,
is_static
=
None
,
is_shared
=
None
,
):
def
__init__
(
self
,
parameter_name
=
None
,
learning_rat
e
=
None
,
momentum
=
None
,
decay_rate
=
None
,
decay_rate_l1
=
None
,
initial_mean
=
None
,
initial_std
=
None
,
initial_strategy
=
None
,
initial_smart
=
None
,
num_batches_regularization
=
None
,
sparse_remote_update
=
None
,
gradient_clipping_threshold
=
None
,
is_static
=
None
,
is_shared
=
None
,
initializer
=
None
):
self
.
add_keys
(
locals
())
...
...
@@ -465,6 +466,7 @@ class Input(Cfg):
self
,
input_layer_name
,
parameter_name
=
None
,
initializer
=
None
,
learning_rate
=
None
,
momentum
=
None
,
decay_rate
=
None
,
...
...
@@ -521,6 +523,7 @@ class Projection(Input):
initial_std
=
None
,
initial_strategy
=
None
,
initial_smart
=
None
,
initializer
=
None
,
num_batches_regularization
=
None
,
sparse_remote_update
=
None
,
sparse_update
=
None
,
...
...
@@ -1479,7 +1482,8 @@ class LayerBase(object):
gradient_clipping_threshold
=
bias
.
gradient_clipping_threshold
,
is_static
=
bias
.
is_static
,
is_shared
=
bias
.
is_shared
,
)
is_shared
=
bias
.
is_shared
,
initializer
=
bias
.
initializer
)
if
for_self
:
self
.
config
.
bias_parameter_name
=
bias
.
parameter_name
else
:
...
...
@@ -1536,7 +1540,8 @@ class LayerBase(object):
format
=
format
,
is_static
=
input_config
.
is_static
,
is_shared
=
input_config
.
is_shared
,
update_hooks
=
input_config
.
update_hooks
)
update_hooks
=
input_config
.
update_hooks
,
initializer
=
input_config
.
initializer
)
def
set_layer_size
(
self
,
size
):
if
self
.
config
.
size
==
0
:
...
...
@@ -3221,7 +3226,8 @@ def Parameter(name,
need_compact
=
None
,
is_static
=
None
,
is_shared
=
None
,
update_hooks
=
None
):
update_hooks
=
None
,
initializer
=
None
):
config_assert
(
name
not
in
g_parameter_map
,
'Duplicated parameter name: '
+
name
)
...
...
@@ -3309,6 +3315,11 @@ def Parameter(name,
para
.
update_hooks
.
extend
(
update_hooks
)
g_parameter_map
[
name
]
=
para
if
initializer
is
not
None
:
config_assert
(
callable
(
initializer
),
"parameter initializer should be a callable object"
)
g_parameter_initializer_map
[
name
]
=
initializer
@
config_func
...
...
python/paddle/trainer_config_helpers/attrs.py
浏览文件 @
6605f3f3
...
...
@@ -95,6 +95,10 @@ class ParameterAttribute(object):
:param sparse_update: Enable sparse update for this parameter. It will
enable both local and remote sparse update.
:type sparse_update: bool
:param initializer: If not None, it should be a callable object which accepts
a parameter name and returns numpy array for the initial
value of the parameter
:param initializer: callable object
"""
def
__init__
(
self
,
...
...
@@ -109,7 +113,8 @@ class ParameterAttribute(object):
learning_rate
=
None
,
momentum
=
None
,
gradient_clipping_threshold
=
None
,
sparse_update
=
False
):
sparse_update
=
False
,
initializer
=
None
):
self
.
attr
=
{}
if
is_static
:
...
...
@@ -161,6 +166,8 @@ class ParameterAttribute(object):
is_compatible_with
(
gradient_clipping_threshold
,
float
):
self
.
attr
[
'gradient_clipping_threshold'
]
=
\
gradient_clipping_threshold
if
initializer
is
not
None
:
self
.
attr
[
'initializer'
]
=
initializer
def
set_default_parameter_name
(
self
,
name
):
"""
...
...
python/paddle/v2/parameters.py
浏览文件 @
6605f3f3
import
numpy
as
np
import
py_paddle.swig_paddle
as
api
from
paddle.proto.ParameterConfig_pb2
import
ParameterConfig
import
paddle.trainer.config_parser
as
cp
import
struct
import
tarfile
import
cStringIO
...
...
@@ -18,8 +19,11 @@ def create(layers):
"""
topology
=
Topology
(
layers
)
pool
=
Parameters
()
initializers
=
cp
.
g_parameter_initializer_map
for
param
in
topology
.
proto
().
parameters
:
pool
.
__append_config__
(
param
)
if
param
.
name
in
initializers
:
pool
[
param
.
name
]
=
initializers
[
param
.
name
](
param
.
name
)
return
pool
...
...
python/paddle/v2/tests/test_parameters.py
浏览文件 @
6605f3f3
...
...
@@ -11,6 +11,9 @@ except ImportError:
sys
.
exit
(
0
)
import
paddle.v2.parameters
as
parameters
import
paddle.v2.data_type
as
data_type
import
paddle.v2.layer
as
layer
from
paddle.v2.attr
import
ParamAttr
from
paddle.proto.ParameterConfig_pb2
import
ParameterConfig
import
random
import
cStringIO
...
...
@@ -55,6 +58,25 @@ class TestParameters(unittest.TestCase):
p1
=
params_dup
.
get
(
name
)
self
.
assertTrue
(
numpy
.
isclose
(
p0
,
p1
).
all
())
def
test_initializer
(
self
):
def
initializer
(
name
):
assert
name
==
"fc.w"
mat
=
numpy
.
ones
((
3
,
2
),
dtype
=
numpy
.
float32
)
mat
[
1
,
1
]
=
2
return
mat
x
=
layer
.
data
(
name
=
"x"
,
type
=
data_type
.
dense_vector
(
3
))
y
=
layer
.
fc
(
x
,
size
=
2
,
bias_attr
=
False
,
param_attr
=
ParamAttr
(
name
=
"fc.w"
,
initializer
=
initializer
))
params
=
parameters
.
create
(
y
)
val
=
params
[
"fc.w"
]
assert
val
.
shape
==
(
3
,
2
)
expected
=
numpy
.
array
([[
1
,
1
],
[
1
,
2
],
[
1
,
1
]],
numpy
.
float32
)
assert
numpy
.
logical_and
.
reduce
(
numpy
.
reshape
(
val
==
expected
,
6
))
if
__name__
==
'__main__'
:
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录