Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
6248e566
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
6248e566
编写于
6月 15, 2017
作者:
Z
zlx
浏览文件
操作
浏览文件
下载
差异文件
merge from remote
上级
5405dc0a
f2da10bf
变更
6
显示空白变更内容
内联
并排
Showing
6 changed file
with
70 addition
and
26 deletion
+70
-26
go/pserver/cclient/CMakeLists.txt
go/pserver/cclient/CMakeLists.txt
+2
-2
go/pserver/cclient/test/CMakeLists.txt
go/pserver/cclient/test/CMakeLists.txt
+4
-4
python/paddle/trainer/config_parser.py
python/paddle/trainer/config_parser.py
+30
-19
python/paddle/trainer_config_helpers/attrs.py
python/paddle/trainer_config_helpers/attrs.py
+8
-1
python/paddle/v2/parameters.py
python/paddle/v2/parameters.py
+4
-0
python/paddle/v2/tests/test_parameters.py
python/paddle/v2/tests/test_parameters.py
+22
-0
未找到文件。
go/pserver/cclient/CMakeLists.txt
浏览文件 @
6248e566
...
...
@@ -13,8 +13,8 @@ go_library(paddle_pserver_cclient STATIC)
if
(
PROJ_ROOT
)
add_custom_command
(
OUTPUT
${
PROJ_ROOT
}
/paddle/trainer/libpaddle_pserver_cclient.a
COMMAND cp
${
CMAKE_
BINARY_DIR
}
/go/pserver/cclient
/libpaddle_pserver_cclient.h
${
PROJ_ROOT
}
/paddle/trainer/
COMMAND cp
${
CMAKE_
BINARY_DIR
}
/go/pserver/cclient
/libpaddle_pserver_cclient.a
${
PROJ_ROOT
}
/paddle/trainer/
COMMAND cp
${
CMAKE_
CURRENT_BINARY_DIR
}
/libpaddle_pserver_cclient.h
${
PROJ_ROOT
}
/paddle/trainer/
COMMAND cp
${
CMAKE_
CURRENT_BINARY_DIR
}
/libpaddle_pserver_cclient.a
${
PROJ_ROOT
}
/paddle/trainer/
WORKING_DIRECTORY
${
PROJ_ROOT
}
/paddle
DEPENDS paddle_pserver_cclient
)
add_custom_target
(
paddle_pserver_cclient_lib ALL DEPENDS
${
PROJ_ROOT
}
/paddle/trainer/libpaddle_pserver_cclient.a
)
...
...
go/pserver/cclient/test/CMakeLists.txt
浏览文件 @
6248e566
...
...
@@ -12,9 +12,9 @@ else()
endif
()
if
(
PROJ_ROOT
)
include_directories
(
${
CMAKE_
BINARY_DIR
}
/go/pserver/cclient/
)
target_link_libraries
(
main
${
CMAKE_
BINARY_DIR
}
/go/pserver/cclient
/libpaddle_pserver_cclient.a pthread
)
target_link_libraries
(
test_cclient
${
CMAKE_
BINARY_DIR
}
/go/pserver/cclient
/libpaddle_pserver_cclient.a pthread
)
include_directories
(
${
CMAKE_
CURRENT_BINARY_DIR
}
/..
)
target_link_libraries
(
main
${
CMAKE_
CURRENT_BINARY_DIR
}
/..
/libpaddle_pserver_cclient.a pthread
)
target_link_libraries
(
test_cclient
${
CMAKE_
CURRENT_BINARY_DIR
}
/..
/libpaddle_pserver_cclient.a pthread
)
else
(
PROJ_ROOT
)
include_directories
(
${
CMAKE_BINARY_DIR
}
)
target_link_libraries
(
main
${
CMAKE_BINARY_DIR
}
/libpaddle_pserver_cclient.a pthread
)
...
...
python/paddle/trainer/config_parser.py
浏览文件 @
6248e566
...
...
@@ -126,6 +126,7 @@ def init_config_environment(
g_config
=
TrainerConfig
(),
g_layer_map
=
{},
g_parameter_map
=
{},
g_parameter_initializer_map
=
{},
g_extended_config_funcs
=
{},
# store command args of paddle_trainer
...
...
@@ -439,8 +440,7 @@ def model_type(name):
@
config_class
class
Bias
(
Cfg
):
def
__init__
(
self
,
def
__init__
(
self
,
parameter_name
=
None
,
learning_rate
=
None
,
momentum
=
None
,
...
...
@@ -454,7 +454,8 @@ class Bias(Cfg):
sparse_remote_update
=
None
,
gradient_clipping_threshold
=
None
,
is_static
=
None
,
is_shared
=
None
,
):
is_shared
=
None
,
initializer
=
None
):
self
.
add_keys
(
locals
())
...
...
@@ -465,6 +466,7 @@ class Input(Cfg):
self
,
input_layer_name
,
parameter_name
=
None
,
initializer
=
None
,
learning_rate
=
None
,
momentum
=
None
,
decay_rate
=
None
,
...
...
@@ -521,6 +523,7 @@ class Projection(Input):
initial_std
=
None
,
initial_strategy
=
None
,
initial_smart
=
None
,
initializer
=
None
,
num_batches_regularization
=
None
,
sparse_remote_update
=
None
,
sparse_update
=
None
,
...
...
@@ -1479,7 +1482,8 @@ class LayerBase(object):
gradient_clipping_threshold
=
bias
.
gradient_clipping_threshold
,
is_static
=
bias
.
is_static
,
is_shared
=
bias
.
is_shared
,
)
is_shared
=
bias
.
is_shared
,
initializer
=
bias
.
initializer
)
if
for_self
:
self
.
config
.
bias_parameter_name
=
bias
.
parameter_name
else
:
...
...
@@ -1536,7 +1540,8 @@ class LayerBase(object):
format
=
format
,
is_static
=
input_config
.
is_static
,
is_shared
=
input_config
.
is_shared
,
update_hooks
=
input_config
.
update_hooks
)
update_hooks
=
input_config
.
update_hooks
,
initializer
=
input_config
.
initializer
)
def
set_layer_size
(
self
,
size
):
if
self
.
config
.
size
==
0
:
...
...
@@ -3221,7 +3226,8 @@ def Parameter(name,
need_compact
=
None
,
is_static
=
None
,
is_shared
=
None
,
update_hooks
=
None
):
update_hooks
=
None
,
initializer
=
None
):
config_assert
(
name
not
in
g_parameter_map
,
'Duplicated parameter name: '
+
name
)
...
...
@@ -3309,6 +3315,11 @@ def Parameter(name,
para
.
update_hooks
.
extend
([
update_hooks
])
g_parameter_map
[
name
]
=
para
if
initializer
is
not
None
:
config_assert
(
callable
(
initializer
),
"parameter initializer should be a callable object"
)
g_parameter_initializer_map
[
name
]
=
initializer
@
config_func
...
...
python/paddle/trainer_config_helpers/attrs.py
浏览文件 @
6248e566
...
...
@@ -123,6 +123,10 @@ class ParameterAttribute(object):
:param sparse_update: Enable sparse update for this parameter. It will
enable both local and remote sparse update.
:type sparse_update: bool
:param initializer: If not None, it should be a callable object which accepts
a parameter name and returns numpy array for the initial
value of the parameter
:param initializer: callable object
"""
def
__init__
(
self
,
...
...
@@ -138,7 +142,8 @@ class ParameterAttribute(object):
momentum
=
None
,
gradient_clipping_threshold
=
None
,
sparse_update
=
False
,
update_hooks
=
None
):
update_hooks
=
None
,
initializer
=
None
):
self
.
attr
=
{}
if
is_static
:
...
...
@@ -190,6 +195,8 @@ class ParameterAttribute(object):
is_compatible_with
(
gradient_clipping_threshold
,
float
):
self
.
attr
[
'gradient_clipping_threshold'
]
=
\
gradient_clipping_threshold
if
initializer
is
not
None
:
self
.
attr
[
'initializer'
]
=
initializer
if
update_hooks
:
self
.
attr
[
'update_hooks'
]
=
update_hooks
...
...
python/paddle/v2/parameters.py
浏览文件 @
6248e566
import
numpy
as
np
import
py_paddle.swig_paddle
as
api
from
paddle.proto.ParameterConfig_pb2
import
ParameterConfig
import
paddle.trainer.config_parser
as
cp
import
struct
import
tarfile
import
cStringIO
...
...
@@ -18,8 +19,11 @@ def create(layers):
"""
topology
=
Topology
(
layers
)
pool
=
Parameters
()
initializers
=
cp
.
g_parameter_initializer_map
for
param
in
topology
.
proto
().
parameters
:
pool
.
__append_config__
(
param
)
if
param
.
name
in
initializers
:
pool
[
param
.
name
]
=
initializers
[
param
.
name
](
param
.
name
)
return
pool
...
...
python/paddle/v2/tests/test_parameters.py
浏览文件 @
6248e566
...
...
@@ -11,6 +11,9 @@ except ImportError:
sys
.
exit
(
0
)
import
paddle.v2.parameters
as
parameters
import
paddle.v2.data_type
as
data_type
import
paddle.v2.layer
as
layer
from
paddle.v2.attr
import
ParamAttr
from
paddle.proto.ParameterConfig_pb2
import
ParameterConfig
import
random
import
cStringIO
...
...
@@ -55,6 +58,25 @@ class TestParameters(unittest.TestCase):
p1
=
params_dup
.
get
(
name
)
self
.
assertTrue
(
numpy
.
isclose
(
p0
,
p1
).
all
())
def
test_initializer
(
self
):
def
initializer
(
name
):
assert
name
==
"fc.w"
mat
=
numpy
.
ones
((
3
,
2
),
dtype
=
numpy
.
float32
)
mat
[
1
,
1
]
=
2
return
mat
x
=
layer
.
data
(
name
=
"x"
,
type
=
data_type
.
dense_vector
(
3
))
y
=
layer
.
fc
(
x
,
size
=
2
,
bias_attr
=
False
,
param_attr
=
ParamAttr
(
name
=
"fc.w"
,
initializer
=
initializer
))
params
=
parameters
.
create
(
y
)
val
=
params
[
"fc.w"
]
assert
val
.
shape
==
(
3
,
2
)
expected
=
numpy
.
array
([[
1
,
1
],
[
1
,
2
],
[
1
,
1
]],
numpy
.
float32
)
assert
numpy
.
logical_and
.
reduce
(
numpy
.
reshape
(
val
==
expected
,
6
))
if
__name__
==
'__main__'
:
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录