Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
4772b78c
P
Paddle
项目概览
PaddlePaddle
/
Paddle
大约 1 年 前同步成功
通知
2298
Star
20931
Fork
5422
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
4772b78c
编写于
11月 17, 2017
作者:
C
caoying03
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add config_helper.
上级
dfc5d1f1
变更
8
隐藏空白更改
内联
并排
Showing
8 changed file
with
142 addition
and
31 deletion
+142
-31
doc/api/v2/config/layer.rst
doc/api/v2/config/layer.rst
+5
-0
paddle/gserver/layers/L2DistanceLayer.cpp
paddle/gserver/layers/L2DistanceLayer.cpp
+11
-12
paddle/gserver/layers/L2DistanceLayer.h
paddle/gserver/layers/L2DistanceLayer.h
+4
-5
python/paddle/trainer/config_parser.py
python/paddle/trainer/config_parser.py
+26
-12
python/paddle/trainer_config_helpers/layers.py
python/paddle/trainer_config_helpers/layers.py
+48
-1
python/paddle/trainer_config_helpers/tests/configs/file_list.sh
.../paddle/trainer_config_helpers/tests/configs/file_list.sh
+2
-1
python/paddle/trainer_config_helpers/tests/configs/protostr/test_l2_distance_layer.protostr
...rs/tests/configs/protostr/test_l2_distance_layer.protostr
+39
-0
python/paddle/trainer_config_helpers/tests/configs/test_l2_distance_layer.py
...er_config_helpers/tests/configs/test_l2_distance_layer.py
+7
-0
未找到文件。
doc/api/v2/config/layer.rst
浏览文件 @
4772b78c
...
...
@@ -372,6 +372,11 @@ cos_sim
.. autoclass:: paddle.v2.layer.cos_sim
:noindex:
l2_distance
-----------
.. autoclass:: paddle.v2.layer.l2_distance
:noindex:
trans
-----
.. autoclass:: paddle.v2.layer.trans
...
...
paddle/gserver/layers/L2DistanceLayer.cpp
浏览文件 @
4772b78c
...
...
@@ -25,9 +25,9 @@ bool L2DistanceLayer::init(const LayerMap& layerMap,
/* Initialize the basic parent class */
Layer
::
init
(
layerMap
,
parameterMap
);
CHECK_EQ
(
inputLayers_
.
size
(),
2UL
)
<<
"The L2
distance l
ayer accepts two and "
CHECK_EQ
(
inputLayers_
.
size
(),
2UL
)
<<
"The L2
DistanceL
ayer accepts two and "
<<
"only two inputs."
;
CHECK_EQ
(
getSize
(),
1UL
)
<<
"The output dimensionality of L2
distance
"
CHECK_EQ
(
getSize
(),
1UL
)
<<
"The output dimensionality of L2
DistanceLayer
"
<<
"is fixed to be 1."
;
return
true
;
...
...
@@ -41,9 +41,9 @@ void L2DistanceLayer::forward(PassType passType) {
CHECK
(
inV1
&&
inV2
);
CHECK_EQ
(
inV1
->
getHeight
(),
inV2
->
getHeight
())
<<
"The height of two inputs
to
this layer must be the same."
;
<<
"The height of two inputs
of
this layer must be the same."
;
CHECK_EQ
(
inV1
->
getWidth
(),
inV2
->
getWidth
())
<<
"The width of two inputs
to
this layer must be the same."
;
<<
"The width of two inputs
of
this layer must be the same."
;
int
batchSize
=
inV1
->
getHeight
();
int
output_dim
=
getSize
();
...
...
@@ -66,22 +66,21 @@ void L2DistanceLayer::forward(PassType passType) {
void
L2DistanceLayer
::
backward
(
const
UpdateCallback
&
callback
)
{
const
auto
outG
=
getOutputGrad
();
const
auto
outV
=
getOutputValue
();
const
auto
inV1
=
getInputValue
(
0
);
const
auto
inV2
=
getInputValue
(
1
);
CHECK
(
outG
&&
outV
);
auto
inGrad1
=
getInputGrad
(
0
);
auto
inGrad2
=
getInputGrad
(
1
);
CHECK
(
outG
&&
outV
&&
inV1
&&
inV2
&&
inGrad1
&&
inGrad2
);
{
REGISTER_TIMER_INFO
(
"L2DistanceBpAtvTimer"
,
getName
().
c_str
());
outV
->
scalarDiv
(
*
outV
,
1.
);
outV
->
dotMul
(
*
outG
,
*
outV
);
if
(
inGrad1
)
{
inGrad1
->
addRowScale
(
0
,
*
inputSub_
,
*
outV
);
if
(
inGrad1
||
inGrad2
)
{
outV
->
scalarDiv
(
*
outV
,
1.
);
outV
->
dotMul
(
*
outG
,
*
outV
);
}
if
(
inGrad1
)
inGrad1
->
addRowScale
(
0
,
*
inputSub_
,
*
outV
);
if
(
inGrad2
)
{
inputSub_
->
mulScalar
(
-
1.
);
inGrad2
->
addRowScale
(
0
,
*
inputSub_
,
*
outV
);
...
...
paddle/gserver/layers/L2DistanceLayer.h
浏览文件 @
4772b78c
...
...
@@ -16,12 +16,11 @@ limitations under the License. */
#include "Layer.h"
#include "paddle/math/Matrix.h"
#include "paddle/utils/ThreadLocal.h"
namespace
paddle
{
/**
* @brief
A layer for calculating l2 distance between the
two input vectors.
* @brief
The layer calculates the l2 distance between
two input vectors.
* \f[
* f(\bf{x}, \bf{y}) = \sqrt{\sum_{i=1}^D(x_i - y_i)}
* \f]
...
...
@@ -30,13 +29,12 @@ namespace paddle {
* - Input2: A vector (batchSize * dataDim)
* - Output: A vector (batchSize * 1)
*
* The config
file api is l2_distance
.
* The config
uration api is: l2_distance_layer
.
*/
class
L2DistanceLayer
:
public
Layer
{
public:
explicit
L2DistanceLayer
(
const
LayerConfig
&
config
)
:
Layer
(
config
)
{}
~
L2DistanceLayer
()
{}
bool
init
(
const
LayerMap
&
layerMap
,
...
...
@@ -46,7 +44,8 @@ public:
void
backward
(
const
UpdateCallback
&
callback
=
nullptr
)
override
;
private:
// Store result of subtracting Input2 from Input1.
// Store the result of subtracting Input2 from Input1 in forward computation,
// which will be reused in backward computation.
MatrixPtr
inputSub_
;
};
...
...
python/paddle/trainer/config_parser.py
浏览文件 @
4772b78c
...
...
@@ -3330,6 +3330,18 @@ class RowL2NormLayer(LayerBase):
self
.
set_layer_size
(
input_layer
.
size
)
@
config_layer
(
'cos'
)
class
CosSimLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
cos_scale
=
1
,
device
=
None
):
super
(
CosSimLayer
,
self
).
__init__
(
name
,
'cos'
,
1
,
inputs
=
inputs
,
device
=
device
)
config_assert
(
len
(
self
.
inputs
)
==
2
,
'CosSimLayer must have 2 inputs'
)
config_assert
(
self
.
get_input_layer
(
0
).
size
==
self
.
get_input_layer
(
1
).
size
,
'inputs of CosSimLayer must have same dim'
)
self
.
config
.
cos_scale
=
cos_scale
@
config_layer
(
'cos_vm'
)
class
CosSimVecMatLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
size
,
inputs
,
cos_scale
=
1.0
,
device
=
None
):
...
...
@@ -3343,6 +3355,20 @@ class CosSimVecMatLayer(LayerBase):
'Wrong input size for CosSimVecMatLayer'
)
@
config_layer
(
'l2_distance'
)
class
L2DistanceLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
super
(
L2DistanceLayer
,
self
).
__init__
(
name
,
'l2_distance'
,
1
,
inputs
=
inputs
,
device
=
device
)
config_assert
(
len
(
self
.
inputs
)
==
2
,
(
'The L2DistanceLayer must have '
'and only have 2 inputs.'
))
config_assert
(
self
.
get_input_layer
(
0
).
size
==
self
.
get_input_layer
(
1
).
size
,
(
'Two inputs of the L2DistanceLayer must have '
'the same dimensionality.'
))
@
config_layer
(
'sampling_id'
)
class
SamplingIdLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
):
...
...
@@ -3384,18 +3410,6 @@ class AverageLayer(LayerBase):
self
.
create_bias_parameter
(
bias
,
self
.
config
.
size
)
@
config_layer
(
'cos'
)
class
CosSimLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
cos_scale
=
1
,
device
=
None
):
super
(
CosSimLayer
,
self
).
__init__
(
name
,
'cos'
,
1
,
inputs
=
inputs
,
device
=
device
)
config_assert
(
len
(
self
.
inputs
)
==
2
,
'CosSimLayer must have 2 inputs'
)
config_assert
(
self
.
get_input_layer
(
0
).
size
==
self
.
get_input_layer
(
1
).
size
,
'inputs of CosSimLayer must have same dim'
)
self
.
config
.
cos_scale
=
cos_scale
@
config_layer
(
'tensor'
)
class
TensorLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
size
,
inputs
,
bias
=
True
,
**
xargs
):
...
...
python/paddle/trainer_config_helpers/layers.py
浏览文件 @
4772b78c
...
...
@@ -51,6 +51,7 @@ __all__ = [
'last_seq'
,
'first_seq'
,
'cos_sim'
,
'l2_distance_layer'
,
'hsigmoid'
,
'conv_projection'
,
'square_error_cost'
,
...
...
@@ -167,6 +168,7 @@ class LayerType(object):
COST
=
'cost'
COSINE_SIM_VEC
=
'cos_vm'
COSINE_SIM
=
'cos'
L2_DISTANCE
=
'l2_distance'
HSIGMOID
=
'hsigmoid'
CONV_LAYER
=
'conv'
CONVTRANS_LAYER
=
'convt'
...
...
@@ -2332,6 +2334,51 @@ def cos_sim(a, b, scale=1, size=1, name=None, layer_attr=None):
return
LayerOutput
(
name
,
LayerType
.
COSINE_SIM
,
parents
=
[
a
,
b
],
size
=
size
)
@
wrap_name_default
()
@
layer_support
()
def
l2_distance_layer
(
x
,
y
,
name
=
None
,
layer_attr
=
None
):
"""
This layer calculate and return the Euclidean distance between two input
vectors a and b. The equation is as follows:
.. math::
l2_distance(
\\
mathbf{x},
\\
mathbf{y}) =
\\
sqrt{
\\
sum_{i=1}^D(x_i - y_i)}
The output size of this layer is fixed to be 1. Note that the above
computation is for one sample. Multiple samples are processed in one batch.
The example usage is:
.. code-block:: python
l2_sim = l2_distance(x=layer1, y=layer2)
:param name: The name of this layer. It is optional.
:type name: basestring
:param x: The first input x for this layer, whose output is a matrix with
dimensionality N x D. N is the sample number in a mini-batch.
D is the dimensionality of x's output.
:type x: LayerOutput
:param y: The second input y for this layer, whose output is a matrix with
dimensionality N x D. N is the sample number in a mini-batch.
D is the dimensionality of y's output.
:type y: LayerOutput
:param layer_attr: The extra layer attributes, for example, drop rate.
See ExtraLayerAttribute for more details.
:type layer_attr: ExtraLayerAttribute
:return: The returned LayerOutput object.
:rtype: LayerOutput
"""
assert
isinstance
(
x
,
LayerOutput
)
and
isinstance
(
x
,
LayerOutput
)
Layer
(
name
=
name
,
type
=
LayerType
.
L2_DISTANCE
,
inputs
=
[
x
.
name
,
x
.
name
],
**
ExtraLayerAttribute
.
to_kwargs
(
layer_attr
))
return
LayerOutput
(
name
,
LayerType
.
L2_DISTANCE
,
parents
=
[
x
,
y
],
size
=
1
)
@
wrap_name_default
()
@
wrap_bias_attr_default
(
has_bias
=
True
)
@
wrap_param_attr_default
()
...
...
@@ -3867,7 +3914,7 @@ def recurrent_layer(input,
:type input: LayerOutput
:param act: Activation type. TanhActivation is the default activation.
:type act: BaseActivation
:param bias_attr: The parameter attribute for bias. If this parameter is set to
:param bias_attr: The parameter attribute for bias. If this parameter is set to
False or an object whose type is not ParameterAttribute,
no bias is defined. If the parameter is set to True,
the bias is initialized to zero.
...
...
python/paddle/trainer_config_helpers/tests/configs/file_list.sh
浏览文件 @
4772b78c
...
...
@@ -10,6 +10,7 @@ test_prelu_layer test_row_conv test_detection_output_layer test_multibox_loss_la
test_recursive_topology test_gated_unit_layer test_clip_layer test_row_l2_norm_layer
test_kmax_seq_socre_layer test_sub_nested_seq_select_layer test_scale_shift_layer
test_seq_slice_layer test_cross_entropy_over_beam test_roi_pool_layer test_pooling3D_layer
test_conv3d_layer test_deconv3d_layer test_BatchNorm3D test_resize_layer test_scale_sub_region_layer
)
test_conv3d_layer test_deconv3d_layer test_BatchNorm3D test_resize_layer
test_scale_sub_region_layer test_l2_distance_layer
)
export
whole_configs
=(
test_split_datasource
)
python/paddle/trainer_config_helpers/tests/configs/protostr/test_l2_distance_layer.protostr
0 → 100644
浏览文件 @
4772b78c
type: "nn"
layers {
name: "x"
type: "data"
size: 128
active_type: ""
}
layers {
name: "y"
type: "data"
size: 128
active_type: ""
}
layers {
name: "__l2_distance_layer_0__"
type: "l2_distance"
size: 1
active_type: ""
inputs {
input_layer_name: "x"
}
inputs {
input_layer_name: "x"
}
}
input_layer_names: "x"
input_layer_names: "y"
output_layer_names: "__l2_distance_layer_0__"
sub_models {
name: "root"
layer_names: "x"
layer_names: "y"
layer_names: "__l2_distance_layer_0__"
input_layer_names: "x"
input_layer_names: "y"
output_layer_names: "__l2_distance_layer_0__"
is_recurrent_layer_group: false
}
python/paddle/trainer_config_helpers/tests/configs/test_l2_distance_layer.py
0 → 100644
浏览文件 @
4772b78c
from
paddle.trainer_config_helpers
import
*
outputs
(
l2_distance_layer
(
x
=
data_layer
(
name
=
'x'
,
size
=
128
),
y
=
data_layer
(
name
=
'y'
,
size
=
128
)))
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录