Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
ce939b30
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
ce939b30
编写于
12月 28, 2016
作者:
C
caoying03
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
enable dropout rate in several computation layers.
上级
15a6252f
变更
1
显示空白变更内容
内联
并排
Showing
1 changed file
with
42 addition
and
32 deletion
+42
-32
python/paddle/trainer/config_parser.py
python/paddle/trainer/config_parser.py
+42
-32
未找到文件。
python/paddle/trainer/config_parser.py
浏览文件 @
ce939b30
...
@@ -1803,9 +1803,9 @@ class ConvTransLayer(ConvTransLayerBase):
...
@@ -1803,9 +1803,9 @@ class ConvTransLayer(ConvTransLayerBase):
@
config_layer
(
'norm'
)
@
config_layer
(
'norm'
)
class
NormLayer
(
LayerBase
):
class
NormLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
**
xargs
):
super
(
NormLayer
,
self
).
__init__
(
super
(
NormLayer
,
self
).
__init__
(
name
,
'norm'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'norm'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
for
input_index
in
xrange
(
len
(
self
.
inputs
)):
for
input_index
in
xrange
(
len
(
self
.
inputs
)):
input_layer
=
self
.
get_input_layer
(
input_index
)
input_layer
=
self
.
get_input_layer
(
input_index
)
norm_conf
=
self
.
config
.
inputs
[
input_index
].
norm_conf
norm_conf
=
self
.
config
.
inputs
[
input_index
].
norm_conf
...
@@ -1817,9 +1817,9 @@ class NormLayer(LayerBase):
...
@@ -1817,9 +1817,9 @@ class NormLayer(LayerBase):
@
config_layer
(
'pool'
)
@
config_layer
(
'pool'
)
class
PoolLayer
(
LayerBase
):
class
PoolLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
**
xargs
):
super
(
PoolLayer
,
self
).
__init__
(
super
(
PoolLayer
,
self
).
__init__
(
name
,
'pool'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'pool'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
for
input_index
in
xrange
(
len
(
self
.
inputs
)):
for
input_index
in
xrange
(
len
(
self
.
inputs
)):
input_layer
=
self
.
get_input_layer
(
input_index
)
input_layer
=
self
.
get_input_layer
(
input_index
)
pool_conf
=
self
.
config
.
inputs
[
input_index
].
pool_conf
pool_conf
=
self
.
config
.
inputs
[
input_index
].
pool_conf
...
@@ -1927,9 +1927,9 @@ class BatchNormLayer(LayerBase):
...
@@ -1927,9 +1927,9 @@ class BatchNormLayer(LayerBase):
@
config_layer
(
'trans'
)
@
config_layer
(
'trans'
)
class
TransLayer
(
LayerBase
):
class
TransLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
**
xargs
):
super
(
TransLayer
,
self
).
__init__
(
super
(
TransLayer
,
self
).
__init__
(
name
,
'trans'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'trans'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
config_assert
(
config_assert
(
len
(
self
.
inputs
)
==
1
,
len
(
self
.
inputs
)
==
1
,
'TransLayer must have one and only one input'
)
'TransLayer must have one and only one input'
)
...
@@ -1938,9 +1938,9 @@ class TransLayer(LayerBase):
...
@@ -1938,9 +1938,9 @@ class TransLayer(LayerBase):
@
config_layer
(
'resize'
)
@
config_layer
(
'resize'
)
class
ResizeLayer
(
LayerBase
):
class
ResizeLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
size
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
size
,
inputs
,
device
=
None
,
**
xargs
):
super
(
ResizeLayer
,
self
).
__init__
(
super
(
ResizeLayer
,
self
).
__init__
(
name
,
'resize'
,
size
=
size
,
inputs
=
inputs
,
device
=
device
)
name
,
'resize'
,
size
=
size
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
config_assert
(
config_assert
(
len
(
self
.
inputs
)
==
1
,
len
(
self
.
inputs
)
==
1
,
'ResizeLayer must have one and only one input'
)
'ResizeLayer must have one and only one input'
)
...
@@ -2270,9 +2270,10 @@ class ExpandLayer(LayerBase):
...
@@ -2270,9 +2270,10 @@ class ExpandLayer(LayerBase):
inputs
,
inputs
,
trans_type
=
'non-seq'
,
trans_type
=
'non-seq'
,
device
=
None
,
device
=
None
,
bias
=
False
):
bias
=
False
,
**
xargs
):
super
(
ExpandLayer
,
self
).
__init__
(
super
(
ExpandLayer
,
self
).
__init__
(
name
,
'expand'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'expand'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
config_assert
(
config_assert
(
len
(
self
.
inputs
)
==
2
,
'ExpandLayer takes 2 and only 2 inputs'
)
len
(
self
.
inputs
)
==
2
,
'ExpandLayer takes 2 and only 2 inputs'
)
self
.
config
.
trans_type
=
trans_type
self
.
config
.
trans_type
=
trans_type
...
@@ -2356,14 +2357,16 @@ class SequenceLastInstanceLayer(LayerBase):
...
@@ -2356,14 +2357,16 @@ class SequenceLastInstanceLayer(LayerBase):
active_type
=
'linear'
,
active_type
=
'linear'
,
trans_type
=
'non-seq'
,
trans_type
=
'non-seq'
,
device
=
None
,
device
=
None
,
bias
=
False
):
bias
=
False
,
**
xargs
):
super
(
SequenceLastInstanceLayer
,
self
).
__init__
(
super
(
SequenceLastInstanceLayer
,
self
).
__init__
(
name
,
name
,
'seqlastins'
,
'seqlastins'
,
0
,
0
,
inputs
=
inputs
,
inputs
=
inputs
,
device
=
device
,
device
=
device
,
active_type
=
active_type
)
active_type
=
active_type
,
**
xargs
)
config_assert
(
config_assert
(
len
(
inputs
)
==
1
,
'SequenceLastInstanceLayer must have 1 input'
)
len
(
inputs
)
==
1
,
'SequenceLastInstanceLayer must have 1 input'
)
self
.
config
.
trans_type
=
trans_type
self
.
config
.
trans_type
=
trans_type
...
@@ -2400,14 +2403,16 @@ class SequenceConcatLayer(LayerBase):
...
@@ -2400,14 +2403,16 @@ class SequenceConcatLayer(LayerBase):
inputs
,
inputs
,
active_type
=
'linear'
,
active_type
=
'linear'
,
device
=
None
,
device
=
None
,
bias
=
False
):
bias
=
False
,
**
xargs
):
super
(
SequenceConcatLayer
,
self
).
__init__
(
super
(
SequenceConcatLayer
,
self
).
__init__
(
name
,
name
,
'seqconcat'
,
'seqconcat'
,
0
,
0
,
inputs
=
inputs
,
inputs
=
inputs
,
device
=
device
,
device
=
device
,
active_type
=
active_type
)
active_type
=
active_type
,
**
xargs
)
config_assert
(
config_assert
(
len
(
inputs
)
==
2
,
'SequenceConcatLayer must have 2 inputs'
)
len
(
inputs
)
==
2
,
'SequenceConcatLayer must have 2 inputs'
)
for
input_index
in
xrange
(
len
(
self
.
inputs
)):
for
input_index
in
xrange
(
len
(
self
.
inputs
)):
...
@@ -2424,14 +2429,16 @@ class SequenceReshapeLayer(LayerBase):
...
@@ -2424,14 +2429,16 @@ class SequenceReshapeLayer(LayerBase):
inputs
,
inputs
,
active_type
=
'linear'
,
active_type
=
'linear'
,
device
=
None
,
device
=
None
,
bias
=
False
):
bias
=
False
,
**
xargs
):
super
(
SequenceReshapeLayer
,
self
).
__init__
(
super
(
SequenceReshapeLayer
,
self
).
__init__
(
name
,
name
,
'seqreshape'
,
'seqreshape'
,
size
,
size
,
inputs
=
inputs
,
inputs
=
inputs
,
device
=
device
,
device
=
device
,
active_type
=
active_type
)
active_type
=
active_type
,
**
xargs
)
config_assert
(
config_assert
(
len
(
inputs
)
==
1
,
'SequenceReshapeLayer must have 1 inputs'
)
len
(
inputs
)
==
1
,
'SequenceReshapeLayer must have 1 inputs'
)
self
.
set_layer_size
(
size
)
self
.
set_layer_size
(
size
)
...
@@ -2445,14 +2452,16 @@ class SubSequenceLayer(LayerBase):
...
@@ -2445,14 +2452,16 @@ class SubSequenceLayer(LayerBase):
inputs
,
inputs
,
active_type
=
'linear'
,
active_type
=
'linear'
,
device
=
None
,
device
=
None
,
bias
=
False
):
bias
=
False
,
**
xargs
):
super
(
SubSequenceLayer
,
self
).
__init__
(
super
(
SubSequenceLayer
,
self
).
__init__
(
name
,
name
,
'subseq'
,
'subseq'
,
0
,
0
,
inputs
=
inputs
,
inputs
=
inputs
,
device
=
device
,
device
=
device
,
active_type
=
active_type
)
active_type
=
active_type
,
**
xargs
)
config_assert
(
len
(
inputs
)
==
3
,
'SubSequenceLayer must have 3 inputs'
)
config_assert
(
len
(
inputs
)
==
3
,
'SubSequenceLayer must have 3 inputs'
)
input_layer0
=
self
.
get_input_layer
(
0
)
input_layer0
=
self
.
get_input_layer
(
0
)
size
=
input_layer0
.
size
size
=
input_layer0
.
size
...
@@ -2462,9 +2471,9 @@ class SubSequenceLayer(LayerBase):
...
@@ -2462,9 +2471,9 @@ class SubSequenceLayer(LayerBase):
@
config_layer
(
'out_prod'
)
@
config_layer
(
'out_prod'
)
class
OuterProdLayer
(
LayerBase
):
class
OuterProdLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
**
xargs
):
super
(
OuterProdLayer
,
self
).
__init__
(
super
(
OuterProdLayer
,
self
).
__init__
(
name
,
'out_prod'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'out_prod'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
config_assert
(
len
(
inputs
)
==
2
,
'OuterProdLayer must have 2 inputs'
)
config_assert
(
len
(
inputs
)
==
2
,
'OuterProdLayer must have 2 inputs'
)
input_layer0
=
self
.
get_input_layer
(
0
)
input_layer0
=
self
.
get_input_layer
(
0
)
input_layer1
=
self
.
get_input_layer
(
1
)
input_layer1
=
self
.
get_input_layer
(
1
)
...
@@ -2473,9 +2482,9 @@ class OuterProdLayer(LayerBase):
...
@@ -2473,9 +2482,9 @@ class OuterProdLayer(LayerBase):
@
config_layer
(
'power'
)
@
config_layer
(
'power'
)
class
PowerLayer
(
LayerBase
):
class
PowerLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
**
xargs
):
super
(
PowerLayer
,
self
).
__init__
(
super
(
PowerLayer
,
self
).
__init__
(
name
,
'power'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'power'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
config_assert
(
len
(
inputs
)
==
2
,
'PowerLayer must have 2 inputs'
)
config_assert
(
len
(
inputs
)
==
2
,
'PowerLayer must have 2 inputs'
)
input_layer1
=
self
.
get_input_layer
(
1
)
input_layer1
=
self
.
get_input_layer
(
1
)
self
.
set_layer_size
(
input_layer1
.
size
)
self
.
set_layer_size
(
input_layer1
.
size
)
...
@@ -2486,9 +2495,10 @@ class PowerLayer(LayerBase):
...
@@ -2486,9 +2495,10 @@ class PowerLayer(LayerBase):
@
config_layer
(
'slope_intercept'
)
@
config_layer
(
'slope_intercept'
)
class
SlopeInterceptLayer
(
LayerBase
):
class
SlopeInterceptLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
slope
=
1.0
,
intercept
=
0.0
,
device
=
None
):
def
__init__
(
self
,
name
,
inputs
,
slope
=
1.0
,
intercept
=
0.0
,
device
=
None
,
**
xargs
):
super
(
SlopeInterceptLayer
,
self
).
__init__
(
super
(
SlopeInterceptLayer
,
self
).
__init__
(
name
,
'slope_intercept'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'slope_intercept'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
self
.
config
.
slope
=
slope
self
.
config
.
slope
=
slope
self
.
config
.
intercept
=
intercept
self
.
config
.
intercept
=
intercept
config_assert
(
len
(
inputs
)
==
1
,
'SlopeInterceptLayer must have 1 input'
)
config_assert
(
len
(
inputs
)
==
1
,
'SlopeInterceptLayer must have 1 input'
)
...
@@ -2498,9 +2508,9 @@ class SlopeInterceptLayer(LayerBase):
...
@@ -2498,9 +2508,9 @@ class SlopeInterceptLayer(LayerBase):
@
config_layer
(
'scaling'
)
@
config_layer
(
'scaling'
)
class
ScalingLayer
(
LayerBase
):
class
ScalingLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
**
xargs
):
super
(
ScalingLayer
,
self
).
__init__
(
super
(
ScalingLayer
,
self
).
__init__
(
name
,
'scaling'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'scaling'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
config_assert
(
len
(
inputs
)
==
2
,
'ScalingLayer must have 2 inputs'
)
config_assert
(
len
(
inputs
)
==
2
,
'ScalingLayer must have 2 inputs'
)
input_layer1
=
self
.
get_input_layer
(
1
)
input_layer1
=
self
.
get_input_layer
(
1
)
self
.
set_layer_size
(
input_layer1
.
size
)
self
.
set_layer_size
(
input_layer1
.
size
)
...
@@ -2511,9 +2521,9 @@ class ScalingLayer(LayerBase):
...
@@ -2511,9 +2521,9 @@ class ScalingLayer(LayerBase):
@
config_layer
(
'conv_shift'
)
@
config_layer
(
'conv_shift'
)
class
ConvShiftLayer
(
LayerBase
):
class
ConvShiftLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
**
xargs
):
super
(
ConvShiftLayer
,
self
).
__init__
(
super
(
ConvShiftLayer
,
self
).
__init__
(
name
,
'conv_shift'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'conv_shift'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
config_assert
(
len
(
inputs
)
==
2
,
'ConvShiftLayer must have 2 inputs'
)
config_assert
(
len
(
inputs
)
==
2
,
'ConvShiftLayer must have 2 inputs'
)
input_layer0
=
self
.
get_input_layer
(
0
)
input_layer0
=
self
.
get_input_layer
(
0
)
self
.
set_layer_size
(
input_layer0
.
size
)
self
.
set_layer_size
(
input_layer0
.
size
)
...
@@ -2521,9 +2531,9 @@ class ConvShiftLayer(LayerBase):
...
@@ -2521,9 +2531,9 @@ class ConvShiftLayer(LayerBase):
@
config_layer
(
'convex_comb'
)
@
config_layer
(
'convex_comb'
)
class
ConvexCombinationLayer
(
LayerBase
):
class
ConvexCombinationLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
size
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
size
,
inputs
,
device
=
None
,
**
xargs
):
super
(
ConvexCombinationLayer
,
self
).
__init__
(
super
(
ConvexCombinationLayer
,
self
).
__init__
(
name
,
'convex_comb'
,
size
,
inputs
=
inputs
,
device
=
device
)
name
,
'convex_comb'
,
size
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
config_assert
(
config_assert
(
len
(
self
.
inputs
)
==
2
,
'ConvexCombinationLayer must have 2 inputs'
)
len
(
self
.
inputs
)
==
2
,
'ConvexCombinationLayer must have 2 inputs'
)
config_assert
(
config_assert
(
...
@@ -2562,9 +2572,9 @@ class BilinearInterpLayer(LayerBase):
...
@@ -2562,9 +2572,9 @@ class BilinearInterpLayer(LayerBase):
@
config_layer
(
'sum_to_one_norm'
)
@
config_layer
(
'sum_to_one_norm'
)
class
SumToOneNormLayer
(
LayerBase
):
class
SumToOneNormLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
**
xargs
):
super
(
SumToOneNormLayer
,
self
).
__init__
(
super
(
SumToOneNormLayer
,
self
).
__init__
(
name
,
'sum_to_one_norm'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'sum_to_one_norm'
,
0
,
inputs
=
inputs
,
device
=
device
,
**
xargs
)
config_assert
(
config_assert
(
len
(
self
.
inputs
)
==
1
,
'SumToOneNormLayer must have 1 input'
)
len
(
self
.
inputs
)
==
1
,
'SumToOneNormLayer must have 1 input'
)
input_layer0
=
self
.
get_input_layer
(
0
)
input_layer0
=
self
.
get_input_layer
(
0
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录