Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
ac383dd0
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
ac383dd0
编写于
10月 24, 2016
作者:
L
luotao1
提交者:
GitHub
10月 24, 2016
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add missing layer_attr (#234)
上级
e387cdba
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
88 addition
and
18 deletion
+88
-18
python/paddle/trainer_config_helpers/layers.py
python/paddle/trainer_config_helpers/layers.py
+88
-18
未找到文件。
python/paddle/trainer_config_helpers/layers.py
浏览文件 @
ac383dd0
...
@@ -2799,7 +2799,9 @@ def __cost_input__(input, label, weight=None):
...
@@ -2799,7 +2799,9 @@ def __cost_input__(input, label, weight=None):
@
wrap_name_default
()
@
wrap_name_default
()
def
regression_cost
(
input
,
label
,
weight
=
None
,
name
=
None
):
@
layer_support
()
def
regression_cost
(
input
,
label
,
weight
=
None
,
name
=
None
,
layer_attr
=
None
):
"""
"""
Regression Layer.
Regression Layer.
...
@@ -2814,12 +2816,15 @@ def regression_cost(input, label, weight=None, name=None):
...
@@ -2814,12 +2816,15 @@ def regression_cost(input, label, weight=None, name=None):
:param weight: The weight affects the cost, namely the scale of cost.
:param weight: The weight affects the cost, namely the scale of cost.
It is an optional argument.
It is an optional argument.
:type weight: LayerOutput
:type weight: LayerOutput
:param layer_attr: layer's extra attribute.
:type layer_attr: ExtraLayerAttribute
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
ipts
,
parents
=
__cost_input__
(
input
,
label
,
weight
)
ipts
,
parents
=
__cost_input__
(
input
,
label
,
weight
)
Layer
(
inputs
=
ipts
,
type
=
"square_error"
,
name
=
name
)
Layer
(
inputs
=
ipts
,
type
=
"square_error"
,
name
=
name
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
))
return
LayerOutput
(
name
,
LayerType
.
COST
,
parents
=
parents
)
return
LayerOutput
(
name
,
LayerType
.
COST
,
parents
=
parents
)
...
@@ -2948,7 +2953,8 @@ def conv_operator(img, filter, filter_size, num_filters,
...
@@ -2948,7 +2953,8 @@ def conv_operator(img, filter, filter_size, num_filters,
@
wrap_name_default
()
@
wrap_name_default
()
def
conv_shift_layer
(
a
,
b
,
name
=
None
):
@
layer_support
()
def
conv_shift_layer
(
a
,
b
,
name
=
None
,
layer_attr
=
None
):
"""
"""
This layer performs cyclic convolution for two input. For example:
This layer performs cyclic convolution for two input. For example:
- a[in]: contains M elements.
- a[in]: contains M elements.
...
@@ -2977,6 +2983,8 @@ def conv_shift_layer(a, b, name=None):
...
@@ -2977,6 +2983,8 @@ def conv_shift_layer(a, b, name=None):
:type a: LayerOutput
:type a: LayerOutput
:param b: input layer b
:param b: input layer b
:type b: LayerOutput
:type b: LayerOutput
:param layer_attr: layer's extra attribute.
:type layer_attr: ExtraLayerAttribute
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -2986,6 +2994,7 @@ def conv_shift_layer(a, b, name=None):
...
@@ -2986,6 +2994,7 @@ def conv_shift_layer(a, b, name=None):
name
=
name
,
name
=
name
,
type
=
LayerType
.
CONV_SHIFT_LAYER
,
type
=
LayerType
.
CONV_SHIFT_LAYER
,
inputs
=
[
a
.
name
,
b
.
name
],
inputs
=
[
a
.
name
,
b
.
name
],
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
CONV_SHIFT_LAYER
,
parents
=
[
a
,
b
],
return
LayerOutput
(
name
,
LayerType
.
CONV_SHIFT_LAYER
,
parents
=
[
a
,
b
],
...
@@ -3059,6 +3068,7 @@ def tensor_layer(a, b, size, act=None, name=None,
...
@@ -3059,6 +3068,7 @@ def tensor_layer(a, b, size, act=None, name=None,
@
wrap_param_attr_default
()
@
wrap_param_attr_default
()
@
wrap_bias_attr_default
()
@
wrap_bias_attr_default
()
@
wrap_act_default
()
@
wrap_act_default
()
@
layer_support
()
def
selective_fc_layer
(
input
,
select
,
size
,
act
=
None
,
name
=
None
,
def
selective_fc_layer
(
input
,
select
,
size
,
act
=
None
,
name
=
None
,
pass_generation
=
False
,
pass_generation
=
False
,
has_selected_colums
=
True
,
has_selected_colums
=
True
,
...
@@ -3131,7 +3141,8 @@ def selective_fc_layer(input, select, size, act=None, name=None,
...
@@ -3131,7 +3141,8 @@ def selective_fc_layer(input, select, size, act=None, name=None,
@
wrap_name_default
()
@
wrap_name_default
()
def
sampling_id_layer
(
input
,
name
=
None
):
@
layer_support
()
def
sampling_id_layer
(
input
,
name
=
None
,
layer_attr
=
None
):
"""
"""
A layer for sampling id from multinomial distribution from the input layer.
A layer for sampling id from multinomial distribution from the input layer.
Sampling one id for one sample.
Sampling one id for one sample.
...
@@ -3146,6 +3157,8 @@ def sampling_id_layer(input, name=None):
...
@@ -3146,6 +3157,8 @@ def sampling_id_layer(input, name=None):
:type input: LayerOutput
:type input: LayerOutput
:param name: The Layer Name.
:param name: The Layer Name.
:type name: basestring
:type name: basestring
:param layer_attr: Extra Layer config.
:type layer_attr: ExtraLayerAttribute|None
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3153,12 +3166,15 @@ def sampling_id_layer(input, name=None):
...
@@ -3153,12 +3166,15 @@ def sampling_id_layer(input, name=None):
name
=
name
,
name
=
name
,
type
=
LayerType
.
SAMPLING_ID_LAYER
,
type
=
LayerType
.
SAMPLING_ID_LAYER
,
inputs
=
[
Input
(
input
.
name
)],
inputs
=
[
Input
(
input
.
name
)],
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
SAMPLING_ID_LAYER
,
input
)
return
LayerOutput
(
name
,
LayerType
.
SAMPLING_ID_LAYER
,
input
)
@
wrap_name_default
()
@
wrap_name_default
()
def
slope_intercept_layer
(
input
,
name
=
None
,
slope
=
1.0
,
intercept
=
0.0
):
@
layer_support
()
def
slope_intercept_layer
(
input
,
name
=
None
,
slope
=
1.0
,
intercept
=
0.0
,
layer_attr
=
None
):
"""
"""
This layer for applying a slope and an intercept to the input
This layer for applying a slope and an intercept to the input
element-wise. There is no activation and weight.
element-wise. There is no activation and weight.
...
@@ -3180,6 +3196,8 @@ def slope_intercept_layer(input, name=None, slope=1.0, intercept=0.0):
...
@@ -3180,6 +3196,8 @@ def slope_intercept_layer(input, name=None, slope=1.0, intercept=0.0):
:type slope: float.
:type slope: float.
:param intercept: the offset.
:param intercept: the offset.
:type intercept: float.
:type intercept: float.
:param layer_attr: Extra Layer config.
:type layer_attr: ExtraLayerAttribute|None
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3189,12 +3207,15 @@ def slope_intercept_layer(input, name=None, slope=1.0, intercept=0.0):
...
@@ -3189,12 +3207,15 @@ def slope_intercept_layer(input, name=None, slope=1.0, intercept=0.0):
slope
=
slope
,
slope
=
slope
,
intercept
=
intercept
,
intercept
=
intercept
,
inputs
=
[
Input
(
input
.
name
)],
inputs
=
[
Input
(
input
.
name
)],
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
SLOPE_INTERCEPT_LAYER
,
input
)
return
LayerOutput
(
name
,
LayerType
.
SLOPE_INTERCEPT_LAYER
,
input
)
@
wrap_name_default
()
@
wrap_name_default
()
def
linear_comb_layer
(
weights
,
vectors
,
size
=
None
,
name
=
None
):
@
layer_support
()
def
linear_comb_layer
(
weights
,
vectors
,
size
=
None
,
name
=
None
,
layer_attr
=
None
):
"""
"""
A layer for weighted sum of vectors takes two inputs.
A layer for weighted sum of vectors takes two inputs.
- Input: size of weights is M
- Input: size of weights is M
...
@@ -3235,6 +3256,8 @@ def linear_comb_layer(weights, vectors, size=None, name=None):
...
@@ -3235,6 +3256,8 @@ def linear_comb_layer(weights, vectors, size=None, name=None):
:type size: int
:type size: int
:param name: The Layer Name.
:param name: The Layer Name.
:type name: basestring
:type name: basestring
:param layer_attr: Extra Layer config.
:type layer_attr: ExtraLayerAttribute|None
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3250,6 +3273,7 @@ def linear_comb_layer(weights, vectors, size=None, name=None):
...
@@ -3250,6 +3273,7 @@ def linear_comb_layer(weights, vectors, size=None, name=None):
type
=
LayerType
.
LINEAR_COMBINATION_LAYER
,
type
=
LayerType
.
LINEAR_COMBINATION_LAYER
,
size
=
size
,
size
=
size
,
inputs
=
[
Input
(
weights
.
name
),
Input
(
vectors
.
name
)],
inputs
=
[
Input
(
weights
.
name
),
Input
(
vectors
.
name
)],
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
LINEAR_COMBINATION_LAYER
,
return
LayerOutput
(
name
,
LayerType
.
LINEAR_COMBINATION_LAYER
,
[
weights
,
vectors
],
size
=
size
)
[
weights
,
vectors
],
size
=
size
)
...
@@ -3259,6 +3283,7 @@ convex_comb_layer = linear_comb_layer
...
@@ -3259,6 +3283,7 @@ convex_comb_layer = linear_comb_layer
@
wrap_name_default
()
@
wrap_name_default
()
@
layer_support
()
def
block_expand_layer
(
input
,
def
block_expand_layer
(
input
,
channel
=
0
,
channel
=
0
,
block_x
=
0
,
block_x
=
0
,
...
@@ -3267,7 +3292,8 @@ def block_expand_layer(input,
...
@@ -3267,7 +3292,8 @@ def block_expand_layer(input,
stride_y
=
0
,
stride_y
=
0
,
padding_x
=
0
,
padding_x
=
0
,
padding_y
=
0
,
padding_y
=
0
,
name
=
None
):
name
=
None
,
layer_attr
=
None
):
"""
"""
Expand feature map to minibatch matrix.
Expand feature map to minibatch matrix.
- matrix width is: block_y * block_x * channel
- matrix width is: block_y * block_x * channel
...
@@ -3314,6 +3340,8 @@ def block_expand_layer(input,
...
@@ -3314,6 +3340,8 @@ def block_expand_layer(input,
:type padding_y: int
:type padding_y: int
:param name: The name of this layer, which can not specify.
:param name: The name of this layer, which can not specify.
:type name: None|basestring.
:type name: None|basestring.
:param layer_attr: Extra Layer config.
:type layer_attr: ExtraLayerAttribute|None
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3328,13 +3356,16 @@ def block_expand_layer(input,
...
@@ -3328,13 +3356,16 @@ def block_expand_layer(input,
padding_y
=
padding_y
)
padding_y
=
padding_y
)
),
),
type
=
LayerType
.
BLOCK_EXPAND
,
type
=
LayerType
.
BLOCK_EXPAND
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
BLOCK_EXPAND
,
parents
=
[
input
])
return
LayerOutput
(
name
,
LayerType
.
BLOCK_EXPAND
,
parents
=
[
input
])
@
wrap_name_default
()
@
wrap_name_default
()
def
ctc_layer
(
input
,
label
,
size
=
None
,
name
=
None
,
norm_by_times
=
False
):
@
layer_support
()
def
ctc_layer
(
input
,
label
,
size
=
None
,
name
=
None
,
norm_by_times
=
False
,
layer_attr
=
None
):
"""
"""
Connectionist Temporal Classification (CTC) is designed for temporal
Connectionist Temporal Classification (CTC) is designed for temporal
classication task. That is, for sequence labeling problems where the
classication task. That is, for sequence labeling problems where the
...
@@ -3371,6 +3402,8 @@ def ctc_layer(input, label, size=None, name=None, norm_by_times=False):
...
@@ -3371,6 +3402,8 @@ def ctc_layer(input, label, size=None, name=None, norm_by_times=False):
:type name: basestring|None
:type name: basestring|None
:param norm_by_times: Whether to normalization by times. False by default.
:param norm_by_times: Whether to normalization by times. False by default.
:type norm_by_times: bool
:type norm_by_times: bool
:param layer_attr: Extra Layer config.
:type layer_attr: ExtraLayerAttribute|None
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3386,14 +3419,17 @@ def ctc_layer(input, label, size=None, name=None, norm_by_times=False):
...
@@ -3386,14 +3419,17 @@ def ctc_layer(input, label, size=None, name=None, norm_by_times=False):
type
=
LayerType
.
CTC_LAYER
,
type
=
LayerType
.
CTC_LAYER
,
size
=
size
,
size
=
size
,
norm_by_times
=
norm_by_times
,
norm_by_times
=
norm_by_times
,
inputs
=
[
input
.
name
,
label
.
name
]
inputs
=
[
input
.
name
,
label
.
name
],
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
CTC_LAYER
,
[
input
,
label
],
size
=
size
)
return
LayerOutput
(
name
,
LayerType
.
CTC_LAYER
,
[
input
,
label
],
size
=
size
)
@
wrap_name_default
()
@
wrap_name_default
()
@
wrap_param_attr_default
()
@
wrap_param_attr_default
()
def
crf_layer
(
input
,
label
,
size
=
None
,
weight
=
None
,
param_attr
=
None
,
name
=
None
):
@
layer_support
()
def
crf_layer
(
input
,
label
,
size
=
None
,
weight
=
None
,
param_attr
=
None
,
name
=
None
,
layer_attr
=
None
):
"""
"""
A layer for calculating the cost of sequential conditional random
A layer for calculating the cost of sequential conditional random
field model.
field model.
...
@@ -3419,6 +3455,8 @@ def crf_layer(input, label, size=None, weight=None, param_attr=None, name=None):
...
@@ -3419,6 +3455,8 @@ def crf_layer(input, label, size=None, weight=None, param_attr=None, name=None):
:type param_attr: ParameterAttribute
:type param_attr: ParameterAttribute
:param name: The name of this layers. It is not necessary.
:param name: The name of this layers. It is not necessary.
:type name: None|basestring
:type name: None|basestring
:param layer_attr: Extra Layer config.
:type layer_attr: ExtraLayerAttribute|None
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3442,6 +3480,7 @@ def crf_layer(input, label, size=None, weight=None, param_attr=None, name=None):
...
@@ -3442,6 +3480,7 @@ def crf_layer(input, label, size=None, weight=None, param_attr=None, name=None):
type
=
LayerType
.
CRF_LAYER
,
type
=
LayerType
.
CRF_LAYER
,
size
=
size
,
size
=
size
,
inputs
=
ipts
,
inputs
=
ipts
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
parents
=
[
input
,
label
]
parents
=
[
input
,
label
]
if
weight
is
not
None
:
if
weight
is
not
None
:
...
@@ -3451,7 +3490,9 @@ def crf_layer(input, label, size=None, weight=None, param_attr=None, name=None):
...
@@ -3451,7 +3490,9 @@ def crf_layer(input, label, size=None, weight=None, param_attr=None, name=None):
@
wrap_name_default
()
@
wrap_name_default
()
@
wrap_param_attr_default
()
@
wrap_param_attr_default
()
def
crf_decoding_layer
(
input
,
size
,
label
=
None
,
param_attr
=
None
,
name
=
None
):
@
layer_support
()
def
crf_decoding_layer
(
input
,
size
,
label
=
None
,
param_attr
=
None
,
name
=
None
,
layer_attr
=
None
):
"""
"""
A layer for calculating the decoding sequence of sequential conditional
A layer for calculating the decoding sequence of sequential conditional
random field model. The decoding sequence is stored in output.ids.
random field model. The decoding sequence is stored in output.ids.
...
@@ -3469,6 +3510,8 @@ def crf_decoding_layer(input, size, label=None, param_attr=None, name=None):
...
@@ -3469,6 +3510,8 @@ def crf_decoding_layer(input, size, label=None, param_attr=None, name=None):
:type param_attr: ParameterAttribute
:type param_attr: ParameterAttribute
:param name: The name of this layers. It is not necessary.
:param name: The name of this layers. It is not necessary.
:type name: None|basestring
:type name: None|basestring
:param layer_attr: Extra Layer config.
:type layer_attr: ExtraLayerAttribute|None
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3485,6 +3528,7 @@ def crf_decoding_layer(input, size, label=None, param_attr=None, name=None):
...
@@ -3485,6 +3528,7 @@ def crf_decoding_layer(input, size, label=None, param_attr=None, name=None):
type
=
LayerType
.
CRF_DECODING_LAYER
,
type
=
LayerType
.
CRF_DECODING_LAYER
,
size
=
size
,
size
=
size
,
inputs
=
ipts
,
inputs
=
ipts
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
parents
=
[
input
]
parents
=
[
input
]
if
label
is
not
None
:
if
label
is
not
None
:
...
@@ -3575,7 +3619,8 @@ following are cost Layers.
...
@@ -3575,7 +3619,8 @@ following are cost Layers.
@
wrap_name_default
()
@
wrap_name_default
()
def
rank_cost
(
left
,
right
,
label
,
weight
=
None
,
name
=
None
,
coeff
=
1.0
):
@
layer_support
()
def
rank_cost
(
left
,
right
,
label
,
weight
=
None
,
name
=
None
,
coeff
=
1.0
,
layer_attr
=
None
):
"""
"""
A cost Layer for learning to rank using gradient descent. Details can refer
A cost Layer for learning to rank using gradient descent. Details can refer
to `papers <http://research.microsoft.com/en-us/um/people/cburges/papers/
to `papers <http://research.microsoft.com/en-us/um/people/cburges/papers/
...
@@ -3619,6 +3664,8 @@ def rank_cost(left, right, label, weight=None, name=None, coeff=1.0):
...
@@ -3619,6 +3664,8 @@ def rank_cost(left, right, label, weight=None, name=None, coeff=1.0):
:type name: None|basestring
:type name: None|basestring
:param coeff: The coefficient affects the gradient in the backward.
:param coeff: The coefficient affects the gradient in the backward.
:type coeff: float
:type coeff: float
:param layer_attr: Extra Layer Attribute.
:type layer_attr: ExtraLayerAttribute
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3636,13 +3683,15 @@ def rank_cost(left, right, label, weight=None, name=None, coeff=1.0):
...
@@ -3636,13 +3683,15 @@ def rank_cost(left, right, label, weight=None, name=None, coeff=1.0):
type
=
LayerType
.
RANK_COST
,
type
=
LayerType
.
RANK_COST
,
inputs
=
ipts
,
inputs
=
ipts
,
coeff
=
coeff
,
coeff
=
coeff
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
RANK_COST
,
parents
=
parents
)
return
LayerOutput
(
name
,
LayerType
.
RANK_COST
,
parents
=
parents
)
@
wrap_name_default
()
@
wrap_name_default
()
def
lambda_cost
(
input
,
score
,
name
,
NDCG_num
=
5
,
max_sort_size
=-
1
):
@
layer_support
()
def
lambda_cost
(
input
,
score
,
name
,
NDCG_num
=
5
,
max_sort_size
=-
1
,
layer_attr
=
None
):
"""
"""
lambdaCost for lambdaRank LTR approach.
lambdaCost for lambdaRank LTR approach.
...
@@ -3673,6 +3722,8 @@ def lambda_cost(input, score, name, NDCG_num=5, max_sort_size=-1):
...
@@ -3673,6 +3722,8 @@ def lambda_cost(input, score, name, NDCG_num=5, max_sort_size=-1):
:type max_sort_size: int
:type max_sort_size: int
:param name: The name of this layers. It is not necessary.
:param name: The name of this layers. It is not necessary.
:type name: None|basestring
:type name: None|basestring
:param layer_attr: Extra Layer Attribute.
:type layer_attr: ExtraLayerAttribute
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3683,14 +3734,16 @@ def lambda_cost(input, score, name, NDCG_num=5, max_sort_size=-1):
...
@@ -3683,14 +3734,16 @@ def lambda_cost(input, score, name, NDCG_num=5, max_sort_size=-1):
type
=
LayerType
.
LAMBDA_COST
,
type
=
LayerType
.
LAMBDA_COST
,
inputs
=
[
input
.
name
,
score
.
name
],
inputs
=
[
input
.
name
,
score
.
name
],
NDCG_num
=
NDCG_num
,
NDCG_num
=
NDCG_num
,
max_sort_size
=
max_sort_size
max_sort_size
=
max_sort_size
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
LAMBDA_COST
,
parents
=
[
input
,
score
])
return
LayerOutput
(
name
,
LayerType
.
LAMBDA_COST
,
parents
=
[
input
,
score
])
@
wrap_name_default
()
@
wrap_name_default
()
def
cross_entropy
(
input
,
label
,
name
=
None
,
coeff
=
1.0
):
@
layer_support
()
def
cross_entropy
(
input
,
label
,
name
=
None
,
coeff
=
1.0
,
layer_attr
=
None
):
"""
"""
A loss layer for multi class entropy.
A loss layer for multi class entropy.
...
@@ -3708,6 +3761,8 @@ def cross_entropy(input, label, name=None, coeff=1.0):
...
@@ -3708,6 +3761,8 @@ def cross_entropy(input, label, name=None, coeff=1.0):
:type name: None|basestring.
:type name: None|basestring.
:param coeff: The coefficient affects the gradient in the backward.
:param coeff: The coefficient affects the gradient in the backward.
:type coeff: float.
:type coeff: float.
:param layer_attr: Extra Layer Attribute.
:type layer_attr: ExtraLayerAttribute
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput.
:rtype: LayerOutput.
"""
"""
...
@@ -3716,13 +3771,16 @@ def cross_entropy(input, label, name=None, coeff=1.0):
...
@@ -3716,13 +3771,16 @@ def cross_entropy(input, label, name=None, coeff=1.0):
type
=
LayerType
.
CROSS_ENTROPY
,
type
=
LayerType
.
CROSS_ENTROPY
,
inputs
=
[
input
.
name
,
label
.
name
],
inputs
=
[
input
.
name
,
label
.
name
],
coeff
=
coeff
,
coeff
=
coeff
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
CROSS_ENTROPY
,
parents
=
[
input
,
label
])
return
LayerOutput
(
name
,
LayerType
.
CROSS_ENTROPY
,
parents
=
[
input
,
label
])
@
wrap_name_default
()
@
wrap_name_default
()
@
layer_support
()
def
cross_entropy_with_selfnorm
(
input
,
label
,
name
=
None
,
coeff
=
1.0
,
def
cross_entropy_with_selfnorm
(
input
,
label
,
name
=
None
,
coeff
=
1.0
,
softmax_selfnorm_alpha
=
0.1
):
softmax_selfnorm_alpha
=
0.1
,
layer_attr
=
None
):
"""
"""
A loss layer for multi class entropy with selfnorm.
A loss layer for multi class entropy with selfnorm.
...
@@ -3742,6 +3800,8 @@ def cross_entropy_with_selfnorm(input, label, name=None, coeff=1.0,
...
@@ -3742,6 +3800,8 @@ def cross_entropy_with_selfnorm(input, label, name=None, coeff=1.0,
:type coeff: float.
:type coeff: float.
:param softmax_selfnorm_alpha: The scale factor affects the cost.
:param softmax_selfnorm_alpha: The scale factor affects the cost.
:type softmax_selfnorm_alpha: float.
:type softmax_selfnorm_alpha: float.
:param layer_attr: Extra Layer Attribute.
:type layer_attr: ExtraLayerAttribute
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput.
:rtype: LayerOutput.
"""
"""
...
@@ -3750,6 +3810,7 @@ def cross_entropy_with_selfnorm(input, label, name=None, coeff=1.0,
...
@@ -3750,6 +3810,7 @@ def cross_entropy_with_selfnorm(input, label, name=None, coeff=1.0,
inputs
=
[
input
.
name
,
label
.
name
],
inputs
=
[
input
.
name
,
label
.
name
],
coeff
=
coeff
,
coeff
=
coeff
,
softmax_selfnorm_alpha
=
softmax_selfnorm_alpha
,
softmax_selfnorm_alpha
=
softmax_selfnorm_alpha
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
return
LayerOutput
(
name
,
...
@@ -3758,7 +3819,8 @@ def cross_entropy_with_selfnorm(input, label, name=None, coeff=1.0,
...
@@ -3758,7 +3819,8 @@ def cross_entropy_with_selfnorm(input, label, name=None, coeff=1.0,
@
wrap_name_default
()
@
wrap_name_default
()
def
huber_cost
(
input
,
label
,
name
=
None
,
coeff
=
1.0
):
@
layer_support
()
def
huber_cost
(
input
,
label
,
name
=
None
,
coeff
=
1.0
,
layer_attr
=
None
):
"""
"""
A loss layer for huber loss.
A loss layer for huber loss.
...
@@ -3774,6 +3836,8 @@ def huber_cost(input, label, name=None, coeff=1.0):
...
@@ -3774,6 +3836,8 @@ def huber_cost(input, label, name=None, coeff=1.0):
:type name: None|basestring.
:type name: None|basestring.
:param coeff: The coefficient affects the gradient in the backward.
:param coeff: The coefficient affects the gradient in the backward.
:type coeff: float.
:type coeff: float.
:param layer_attr: Extra Layer Attribute.
:type layer_attr: ExtraLayerAttribute
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput.
:rtype: LayerOutput.
"""
"""
...
@@ -3784,12 +3848,15 @@ def huber_cost(input, label, name=None, coeff=1.0):
...
@@ -3784,12 +3848,15 @@ def huber_cost(input, label, name=None, coeff=1.0):
type
=
LayerType
.
HUBER
,
type
=
LayerType
.
HUBER
,
inputs
=
[
input
.
name
,
label
.
name
],
inputs
=
[
input
.
name
,
label
.
name
],
coeff
=
coeff
,
coeff
=
coeff
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
HUBER
,
parents
=
[
input
,
label
])
return
LayerOutput
(
name
,
LayerType
.
HUBER
,
parents
=
[
input
,
label
])
@
wrap_name_default
()
@
wrap_name_default
()
def
multi_binary_label_cross_entropy
(
input
,
label
,
name
=
None
,
coeff
=
1.0
):
@
layer_support
()
def
multi_binary_label_cross_entropy
(
input
,
label
,
name
=
None
,
coeff
=
1.0
,
layer_attr
=
None
):
"""
"""
A loss layer for multi binary label cross entropy.
A loss layer for multi binary label cross entropy.
...
@@ -3807,6 +3874,8 @@ def multi_binary_label_cross_entropy(input, label, name=None, coeff=1.0):
...
@@ -3807,6 +3874,8 @@ def multi_binary_label_cross_entropy(input, label, name=None, coeff=1.0):
:type name: None|basestring
:type name: None|basestring
:param coeff: The coefficient affects the gradient in the backward.
:param coeff: The coefficient affects the gradient in the backward.
:type coeff: float
:type coeff: float
:param layer_attr: Extra Layer Attribute.
:type layer_attr: ExtraLayerAttribute
:return: LayerOutput object.
:return: LayerOutput object.
:rtype: LayerOutput
:rtype: LayerOutput
"""
"""
...
@@ -3821,6 +3890,7 @@ def multi_binary_label_cross_entropy(input, label, name=None, coeff=1.0):
...
@@ -3821,6 +3890,7 @@ def multi_binary_label_cross_entropy(input, label, name=None, coeff=1.0):
type
=
LayerType
.
MULTI_BIN_LABEL_CROSS_ENTROPY
,
type
=
LayerType
.
MULTI_BIN_LABEL_CROSS_ENTROPY
,
inputs
=
[
input
.
name
,
label
.
name
],
inputs
=
[
input
.
name
,
label
.
name
],
coeff
=
coeff
,
coeff
=
coeff
,
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
)
)
)
return
LayerOutput
(
name
,
LayerType
.
MULTI_BIN_LABEL_CROSS_ENTROPY
,
return
LayerOutput
(
name
,
LayerType
.
MULTI_BIN_LABEL_CROSS_ENTROPY
,
parents
=
[
input
,
label
])
parents
=
[
input
,
label
])
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录