Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
7d0355cd
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
7d0355cd
编写于
5月 26, 2017
作者:
X
xuwei06
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Fix V2 API
上级
da83d286
变更
11
展开全部
隐藏空白更改
内联
并排
Showing
11 changed file
with
278 addition
and
623 deletion
+278
-623
paddle/parameter/Parameter.h
paddle/parameter/Parameter.h
+1
-0
python/paddle/trainer/config_parser.py
python/paddle/trainer/config_parser.py
+17
-14
python/paddle/trainer_config_helpers/config_parser_utils.py
python/paddle/trainer_config_helpers/config_parser_utils.py
+18
-4
python/paddle/trainer_config_helpers/layers.py
python/paddle/trainer_config_helpers/layers.py
+6
-0
python/paddle/v2/evaluator.py
python/paddle/v2/evaluator.py
+1
-13
python/paddle/v2/layer.py
python/paddle/v2/layer.py
+204
-542
python/paddle/v2/networks.py
python/paddle/v2/networks.py
+1
-14
python/paddle/v2/tests/test_layer.py
python/paddle/v2/tests/test_layer.py
+9
-9
python/paddle/v2/tests/test_rnn_layer.py
python/paddle/v2/tests/test_rnn_layer.py
+7
-0
python/paddle/v2/tests/test_topology.py
python/paddle/v2/tests/test_topology.py
+6
-6
python/paddle/v2/topology.py
python/paddle/v2/topology.py
+8
-21
未找到文件。
paddle/parameter/Parameter.h
浏览文件 @
7d0355cd
...
...
@@ -324,6 +324,7 @@ protected:
std
::
vector
<
std
::
shared_ptr
<
IParameterUpdaterHook
>>
updaterHooks_
;
public:
void
setSharedCount
(
int
cnt
)
{
sharedCount_
=
cnt
;
}
int
getSharedCount
()
{
return
sharedCount_
;
}
bool
isSparse
()
{
return
config_
.
is_sparse
();
}
...
...
python/paddle/trainer/config_parser.py
浏览文件 @
7d0355cd
...
...
@@ -3371,7 +3371,7 @@ def make_importer(config_dir, config_args):
return
Import
settings
=
dict
(
default_
settings
=
dict
(
batch_size
=
None
,
mini_batch_size
=
None
,
algorithm
=
'async_sgd'
,
...
...
@@ -3404,6 +3404,8 @@ settings = dict(
adam_beta2
=
0.999
,
adam_epsilon
=
1e-8
,
)
settings
=
copy
.
deepcopy
(
default_settings
)
settings_deprecated
=
dict
(
usage_ratio
=
1.
,
)
trainer_settings
=
dict
(
...
...
@@ -3544,10 +3546,8 @@ def update_g_config():
return
g_config
def
parse_config
(
trainer_config
,
config_arg_str
):
def
begin_parse
(
config_arg_str
=
''
):
'''
@param trainer_config: can be a string of config file name or a function name
with config logic
@param config_arg_str: a string of the form var1=val1,var2=val2. It will be
passed to config script as a dictionary CONFIG_ARGS
'''
...
...
@@ -3555,12 +3555,23 @@ def parse_config(trainer_config, config_arg_str):
for
hook
in
_parse_config_hooks
:
hook
()
config_args
=
{}
logger
.
findCaller
=
find_caller
logger
.
fatal
=
my_fatal
g_config
.
model_config
.
type
=
"nn"
global
g_current_submodel
,
g_root_submodel
g_root_submodel
=
g_config
.
model_config
.
sub_models
.
add
()
g_root_submodel
.
name
=
'root'
g_root_submodel
.
is_recurrent_layer_group
=
False
g_current_submodel
=
g_root_submodel
def
parse_config
(
trainer_config
,
config_arg_str
):
begin_parse
(
config_arg_str
)
config_args
=
{}
if
config_arg_str
:
config_args
=
dict
([
f
.
split
(
'='
)
for
f
in
config_arg_str
.
split
(
','
)])
...
...
@@ -3573,14 +3584,6 @@ def parse_config(trainer_config, config_arg_str):
extension_module
=
importlib
(
extension_module_name
)
g_extended_config_funcs
=
extension_module
.
get_config_funcs
(
g_config
)
g_config
.
model_config
.
type
=
'nn'
global
g_current_submodel
,
g_root_submodel
g_root_submodel
=
g_config
.
model_config
.
sub_models
.
add
()
g_root_submodel
.
name
=
'root'
g_root_submodel
.
is_recurrent_layer_group
=
False
g_current_submodel
=
g_root_submodel
if
hasattr
(
trainer_config
,
'__call__'
):
trainer_config
.
func_globals
.
update
(
make_config_environment
(
""
,
config_args
))
...
...
python/paddle/trainer_config_helpers/config_parser_utils.py
浏览文件 @
7d0355cd
...
...
@@ -12,15 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import
copy
import
paddle.trainer.config_parser
as
config_parser
from
paddle.proto.TrainerConfig_pb2
import
OptimizationConfig
'''
This file is a wrapper of formal config_parser. The main idea of this file is to
This file is a wrapper of formal config_parser. The main idea of this file is to
separete different config logic into different function, such as network configuration
and optimizer configuration.
'''
__all__
=
[
"parse_trainer_config"
,
"parse_network_config"
,
"parse_optimizer_config"
"parse_trainer_config"
,
"parse_network_config"
,
"parse_optimizer_config"
,
"reset_parser"
]
...
...
@@ -34,5 +38,15 @@ def parse_network_config(network_conf, config_arg_str=''):
def
parse_optimizer_config
(
optimizer_conf
,
config_arg_str
=
''
):
config
=
config_parser
.
parse_config
(
optimizer_conf
,
config_arg_str
)
return
config
.
opt_config
config_parser
.
settings
=
copy
.
deepcopy
(
config_parser
.
default_settings
)
optimizer_conf
()
opt_config
=
OptimizationConfig
()
for
k
,
v
in
config_parser
.
settings
.
iteritems
():
if
v
is
None
:
continue
opt_config
.
__setattr__
(
k
,
v
)
return
opt_config
def
reset_parser
():
config_parser
.
begin_parse
()
python/paddle/trainer_config_helpers/layers.py
浏览文件 @
7d0355cd
...
...
@@ -285,6 +285,7 @@ class LayerOutput(object):
assert
size
is
not
None
assert
LayerType
.
is_layer_type
(
layer_type
)
self
.
name
=
name
self
.
full_name
=
MakeLayerNameInSubmodel
(
name
)
self
.
layer_type
=
layer_type
if
parents
is
not
None
and
type
(
parents
)
!=
list
:
parents
=
[
parents
]
...
...
@@ -3489,6 +3490,11 @@ def recurrent_group(step,
RecurrentLayerGroupEnd
(
name
=
name
)
for
layer_out
in
layer_outs
:
# Thee previous full_name is the name is the rnn group
# We need a full_name outside the rnn group
layer_out
.
full_name
=
MakeLayerNameInSubmodel
(
layer_out
.
name
)
if
len
(
layer_outs
)
==
1
:
return
layer_outs
[
0
]
else
:
...
...
python/paddle/v2/evaluator.py
浏览文件 @
7d0355cd
...
...
@@ -25,21 +25,9 @@ def initialize():
for
__ev_name__
in
filter
(
lambda
x
:
x
.
endswith
(
'_evaluator'
),
evs
.
__all__
):
__ev__
=
getattr
(
evs
,
__ev_name__
)
if
hasattr
(
__ev__
,
'argspec'
):
argspec
=
__ev__
.
argspec
else
:
argspec
=
inspect
.
getargspec
(
__ev__
)
parent_names
=
filter
(
lambda
x
:
x
in
[
'input'
,
'label'
,
'weight'
],
argspec
.
args
)
v2_ev
=
__convert_to_v2__
(
__ev_name__
,
parent_names
=
parent_names
,
is_default_name
=
'name'
in
argspec
.
args
,
attach_parent
=
True
)
__new_name__
=
convert_to_new_name
(
__ev_name__
)
globals
()[
__new_name__
]
=
v2_ev
globals
()[
__new_name__
]
=
__ev__
globals
()[
__new_name__
].
__name__
=
__new_name__
__all__
.
append
(
__new_name__
)
...
...
python/paddle/v2/layer.py
浏览文件 @
7d0355cd
此差异已折叠。
点击以展开。
python/paddle/v2/networks.py
浏览文件 @
7d0355cd
...
...
@@ -24,20 +24,7 @@ def __initialize__():
if
each_subnetwork
in
[
'inputs'
,
'outputs'
]:
continue
func
=
getattr
(
conf_nw
,
each_subnetwork
)
if
hasattr
(
func
,
'argspec'
):
argspec
=
func
.
argspec
else
:
argspec
=
inspect
.
getargspec
(
func
)
if
each_subnetwork
==
'simple_attention'
:
parents
=
[
'encoded_sequence'
,
'encoded_proj'
,
'decoder_state'
]
else
:
parents
=
filter
(
lambda
x
:
x
.
startswith
(
'input'
),
argspec
.
args
)
assert
len
(
parents
)
!=
0
,
each_subnetwork
v2_subnet
=
__convert_to_v2__
(
each_subnetwork
,
parent_names
=
parents
,
is_default_name
=
'name'
in
argspec
.
args
)
globals
()[
each_subnetwork
]
=
v2_subnet
globals
()[
each_subnetwork
]
=
func
globals
()[
each_subnetwork
].
__name__
=
each_subnetwork
global
__all__
__all__
.
append
(
each_subnetwork
)
...
...
python/paddle/v2/tests/test_layer.py
浏览文件 @
7d0355cd
...
...
@@ -173,9 +173,9 @@ class OtherLayerTest(unittest.TestCase):
class
ProjOpTest
(
unittest
.
TestCase
):
def
test_projection
(
self
):
input
=
layer
.
data
(
name
=
'data'
,
type
=
data_type
.
dense_vector
(
784
))
input
=
layer
.
data
(
name
=
'data
2
'
,
type
=
data_type
.
dense_vector
(
784
))
word
=
layer
.
data
(
name
=
'word'
,
type
=
data_type
.
integer_value_sequence
(
10000
))
name
=
'word
2
'
,
type
=
data_type
.
integer_value_sequence
(
10000
))
fc0
=
layer
.
fc
(
input
=
input
,
size
=
100
,
act
=
activation
.
Sigmoid
())
fc1
=
layer
.
fc
(
input
=
input
,
size
=
200
,
act
=
activation
.
Sigmoid
())
mixed0
=
layer
.
mixed
(
...
...
@@ -204,8 +204,8 @@ class ProjOpTest(unittest.TestCase):
dotmul1
+=
dotmul
context
=
layer
.
context_projection
(
input
=
fc0
,
context_len
=
5
)
context0
=
layer
.
mixed
(
size
=
1
00
,
input
=
context
)
with
layer
.
mixed
(
size
=
1
00
)
as
context1
:
context0
=
layer
.
mixed
(
size
=
5
00
,
input
=
context
)
with
layer
.
mixed
(
size
=
5
00
)
as
context1
:
context1
+=
context
conv
=
layer
.
conv_projection
(
...
...
@@ -231,8 +231,8 @@ class ProjOpTest(unittest.TestCase):
print
layer
.
parse_network
(
conv1
)
def
test_operator
(
self
):
ipt0
=
layer
.
data
(
name
=
'data'
,
type
=
data_type
.
dense_vector
(
784
))
ipt1
=
layer
.
data
(
name
=
'word'
,
type
=
data_type
.
dense_vector
(
128
))
ipt0
=
layer
.
data
(
name
=
'data
1
'
,
type
=
data_type
.
dense_vector
(
784
))
ipt1
=
layer
.
data
(
name
=
'word
1
'
,
type
=
data_type
.
dense_vector
(
128
))
fc0
=
layer
.
fc
(
input
=
ipt0
,
size
=
100
,
act
=
activation
.
Sigmoid
())
fc1
=
layer
.
fc
(
input
=
ipt0
,
size
=
100
,
act
=
activation
.
Sigmoid
())
...
...
@@ -261,7 +261,7 @@ class ProjOpTest(unittest.TestCase):
class
NetworkTests
(
unittest
.
TestCase
):
def
test_vgg
(
self
):
img
=
layer
.
data
(
name
=
'pixel'
,
type
=
data_type
.
dense_vector
(
784
))
img
=
layer
.
data
(
name
=
'pixel
1
'
,
type
=
data_type
.
dense_vector
(
784
))
vgg_out
=
networks
.
small_vgg
(
input_image
=
img
,
num_channels
=
1
,
num_classes
=
2
)
print
layer
.
parse_network
(
vgg_out
)
...
...
@@ -269,12 +269,12 @@ class NetworkTests(unittest.TestCase):
class
EvaluatorTest
(
unittest
.
TestCase
):
def
test_evaluator
(
self
):
img
=
layer
.
data
(
name
=
'pixel'
,
type
=
data_type
.
dense_vector
(
784
))
img
=
layer
.
data
(
name
=
'pixel
2
'
,
type
=
data_type
.
dense_vector
(
784
))
output
=
layer
.
fc
(
input
=
img
,
size
=
10
,
act
=
activation
.
Softmax
(),
name
=
'fc_here'
)
lbl
=
layer
.
data
(
name
=
'label'
,
type
=
data_type
.
integer_value
(
10
))
lbl
=
layer
.
data
(
name
=
'label
2
'
,
type
=
data_type
.
integer_value
(
10
))
cost
=
layer
.
cross_entropy_cost
(
input
=
output
,
label
=
lbl
)
evaluator
.
classification_error
(
input
=
output
,
label
=
lbl
)
...
...
python/paddle/v2/tests/test_rnn_layer.py
浏览文件 @
7d0355cd
...
...
@@ -20,6 +20,8 @@ import paddle.v2.data_type as data_type
import
paddle.v2.layer
as
layer
from
paddle.trainer_config_helpers.config_parser_utils
import
\
parse_network_config
as
parse_network
from
paddle.trainer_config_helpers.config_parser_utils
import
\
reset_parser
class
RNNTest
(
unittest
.
TestCase
):
...
...
@@ -29,6 +31,7 @@ class RNNTest(unittest.TestCase):
hidden_dim
=
8
def
parse_old_rnn
():
reset_parser
()
def
step
(
y
):
mem
=
conf_helps
.
memory
(
name
=
"rnn_state"
,
size
=
hidden_dim
)
out
=
conf_helps
.
fc_layer
(
...
...
@@ -48,6 +51,7 @@ class RNNTest(unittest.TestCase):
return
str
(
parse_network
(
test
))
def
parse_new_rnn
():
reset_parser
()
def
new_step
(
y
):
mem
=
layer
.
memory
(
name
=
"rnn_state"
,
size
=
hidden_dim
)
out
=
layer
.
fc
(
input
=
[
y
,
mem
],
...
...
@@ -68,6 +72,7 @@ class RNNTest(unittest.TestCase):
parse_new_rnn
().
splitlines
(
1
))
print
''
.
join
(
diff
)
def
test_sequence_rnn_multi_input
(
self
):
dict_dim
=
10
word_dim
=
8
...
...
@@ -75,6 +80,7 @@ class RNNTest(unittest.TestCase):
label_dim
=
3
def
parse_old_rnn
():
reset_parser
()
def
test
():
data
=
conf_helps
.
data_layer
(
name
=
"word"
,
size
=
dict_dim
)
label
=
conf_helps
.
data_layer
(
name
=
"label"
,
size
=
label_dim
)
...
...
@@ -114,6 +120,7 @@ class RNNTest(unittest.TestCase):
return
str
(
parse_network
(
test
))
def
parse_new_rnn
():
reset_parser
()
data
=
layer
.
data
(
name
=
"word"
,
type
=
data_type
.
dense_vector
(
dict_dim
))
label
=
layer
.
data
(
...
...
python/paddle/v2/tests/test_topology.py
浏览文件 @
7d0355cd
...
...
@@ -46,8 +46,8 @@ class TestTopology(unittest.TestCase):
self
.
assertEqual
(
label_data_type
[
1
].
dim
,
10
)
def
test_get_layer
(
self
):
pixel
=
layer
.
data
(
name
=
'pixel'
,
type
=
data_type
.
dense_vector
(
784
))
label
=
layer
.
data
(
name
=
'label'
,
type
=
data_type
.
integer_value
(
10
))
pixel
=
layer
.
data
(
name
=
'pixel
2
'
,
type
=
data_type
.
dense_vector
(
784
))
label
=
layer
.
data
(
name
=
'label
2
'
,
type
=
data_type
.
integer_value
(
10
))
hidden
=
layer
.
fc
(
input
=
pixel
,
size
=
100
,
act
=
conf_helps
.
SigmoidActivation
())
...
...
@@ -56,14 +56,14 @@ class TestTopology(unittest.TestCase):
act
=
conf_helps
.
SoftmaxActivation
())
cost
=
layer
.
classification_cost
(
input
=
inference
,
label
=
label
)
topo
=
topology
.
Topology
(
cost
)
pixel_layer
=
topo
.
get_layer
(
"pixel"
)
label_layer
=
topo
.
get_layer
(
"label"
)
pixel_layer
=
topo
.
get_layer
(
"pixel
2
"
)
label_layer
=
topo
.
get_layer
(
"label
2
"
)
self
.
assertEqual
(
pixel_layer
,
pixel
)
self
.
assertEqual
(
label_layer
,
label
)
def
test_parse
(
self
):
pixel
=
layer
.
data
(
name
=
'pixel'
,
type
=
data_type
.
dense_vector
(
784
))
label
=
layer
.
data
(
name
=
'label'
,
type
=
data_type
.
integer_value
(
10
))
pixel
=
layer
.
data
(
name
=
'pixel
3
'
,
type
=
data_type
.
dense_vector
(
784
))
label
=
layer
.
data
(
name
=
'label
3
'
,
type
=
data_type
.
integer_value
(
10
))
hidden
=
layer
.
fc
(
input
=
pixel
,
size
=
100
,
act
=
conf_helps
.
SigmoidActivation
())
...
...
python/paddle/v2/topology.py
浏览文件 @
7d0355cd
...
...
@@ -15,7 +15,7 @@
import
collections
from
paddle.proto.ModelConfig_pb2
import
ModelConfig
import
paddle.trainer_config_helpers
as
conf_helps
import
layer
as
v2_layer
__all__
=
[
'Topology'
]
...
...
@@ -94,31 +94,18 @@ class Topology(object):
:param name:
:return:
"""
result_layer
=
[
None
]
def
__impl__
(
l
):
if
l
.
name
==
name
:
result_layer
[
0
]
=
l
return
True
# break
return
False
__bfs_travel__
(
__impl__
,
*
self
.
layers
)
if
result_layer
[
0
]
is
None
:
raise
ValueError
(
"No such layer %s"
%
name
)
return
result_layer
[
0
]
return
v2_layer
.
get_layer
(
name
)
def
data_layers
(
self
):
"""
get all data layer
:return:
"""
data_layers
=
dict
()
def
__impl__
(
l
):
if
isinstance
(
l
,
v2_layer
.
DataLayerV2
):
data_layers
[
l
.
name
]
=
l
__bfs_travel__
(
__impl__
,
*
self
.
layers
)
data_layers
=
{}
for
layer
in
self
.
proto
().
layers
:
l
=
v2_layer
.
get_layer
(
layer
.
name
)
if
l
and
l
.
layer_type
==
conf_helps
.
LayerType
.
DATA
:
data_layers
[
layer
.
name
]
=
l
return
data_layers
def
data_type
(
self
):
...
...
@@ -127,7 +114,7 @@ class Topology(object):
[('image', dense_vector(768)), ('label', integer_value(10))]
"""
data_layers
=
self
.
data_layers
()
return
[(
nm
,
data_layers
[
nm
].
type
)
return
[(
nm
,
data_layers
[
nm
].
data_
type
)
for
nm
in
self
.
proto
().
input_layer_names
]
def
get_layer_proto
(
self
,
name
):
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录