Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
fc0ad904
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
fc0ad904
编写于
6月 13, 2017
作者:
X
xuwei06
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Repeat layer for column vector
上级
14c0e71d
变更
4
显示空白变更内容
内联
并排
Showing
4 changed file
with
97 addition
and
39 deletion
+97
-39
paddle/gserver/layers/FeatureMapExpandLayer.cpp
paddle/gserver/layers/FeatureMapExpandLayer.cpp
+54
-24
paddle/gserver/tests/test_LayerGrad.cpp
paddle/gserver/tests/test_LayerGrad.cpp
+9
-6
python/paddle/trainer/config_parser.py
python/paddle/trainer/config_parser.py
+9
-1
python/paddle/trainer_config_helpers/layers.py
python/paddle/trainer_config_helpers/layers.py
+25
-8
未找到文件。
paddle/gserver/layers/FeatureMapExpandLayer.cpp
浏览文件 @
fc0ad904
...
@@ -40,6 +40,7 @@ namespace paddle {
...
@@ -40,6 +40,7 @@ namespace paddle {
class
FeatureMapExpandLayer
:
public
Layer
{
class
FeatureMapExpandLayer
:
public
Layer
{
private:
private:
int
numFilters_
;
int
numFilters_
;
bool
asRowVector_
;
public:
public:
explicit
FeatureMapExpandLayer
(
const
LayerConfig
&
config
)
:
Layer
(
config
)
{}
explicit
FeatureMapExpandLayer
(
const
LayerConfig
&
config
)
:
Layer
(
config
)
{}
...
@@ -62,6 +63,7 @@ bool FeatureMapExpandLayer::init(const LayerMap& layerMap,
...
@@ -62,6 +63,7 @@ bool FeatureMapExpandLayer::init(const LayerMap& layerMap,
CHECK_EQ
(
inputLayers_
.
size
(),
1UL
);
CHECK_EQ
(
inputLayers_
.
size
(),
1UL
);
numFilters_
=
config_
.
num_filters
();
numFilters_
=
config_
.
num_filters
();
asRowVector_
=
config_
.
user_arg
()
!=
"as_col_vec"
;
return
true
;
return
true
;
}
}
...
@@ -76,6 +78,7 @@ void FeatureMapExpandLayer::forward(PassType passType) {
...
@@ -76,6 +78,7 @@ void FeatureMapExpandLayer::forward(PassType passType) {
{
{
AsyncGpuBlock
asyncGpuBlock
;
AsyncGpuBlock
asyncGpuBlock
;
if
(
asRowVector_
)
{
for
(
size_t
i
=
0
;
i
<
batchSize
;
i
++
)
{
for
(
size_t
i
=
0
;
i
<
batchSize
;
i
++
)
{
MatrixPtr
outVTmp
=
MatrixPtr
outVTmp
=
Matrix
::
create
(
outputV
->
getData
()
+
i
*
imgSize
*
numFilters_
,
Matrix
::
create
(
outputV
->
getData
()
+
i
*
imgSize
*
numFilters_
,
...
@@ -87,6 +90,19 @@ void FeatureMapExpandLayer::forward(PassType passType) {
...
@@ -87,6 +90,19 @@ void FeatureMapExpandLayer::forward(PassType passType) {
inputV
->
getData
()
+
i
*
imgSize
,
1
,
imgSize
,
false
,
useGpu_
);
inputV
->
getData
()
+
i
*
imgSize
,
1
,
imgSize
,
false
,
useGpu_
);
outVTmp
->
addRowVector
(
*
inVTmp
);
outVTmp
->
addRowVector
(
*
inVTmp
);
}
}
}
else
{
for
(
size_t
i
=
0
;
i
<
batchSize
;
i
++
)
{
MatrixPtr
outVTmp
=
Matrix
::
create
(
outputV
->
getData
()
+
i
*
imgSize
*
numFilters_
,
imgSize
,
numFilters_
,
false
,
useGpu_
);
MatrixPtr
inVTmp
=
Matrix
::
create
(
inputV
->
getData
()
+
i
*
imgSize
,
imgSize
,
1
,
false
,
useGpu_
);
outVTmp
->
addColVector
(
*
inVTmp
);
}
}
}
}
/* activation */
{
/* activation */
{
REGISTER_TIMER_INFO
(
"FwAtvTimer"
,
getName
().
c_str
());
REGISTER_TIMER_INFO
(
"FwAtvTimer"
,
getName
().
c_str
());
...
@@ -102,8 +118,13 @@ void FeatureMapExpandLayer::backward(const UpdateCallback& callback) {
...
@@ -102,8 +118,13 @@ void FeatureMapExpandLayer::backward(const UpdateCallback& callback) {
MatrixPtr
outGrad
=
getOutputGrad
();
MatrixPtr
outGrad
=
getOutputGrad
();
size_t
batchSize
=
getInput
(
0
).
getBatchSize
();
size_t
batchSize
=
getInput
(
0
).
getBatchSize
();
int
imgSize
=
inGrad
->
getWidth
();
int
imgSize
=
inGrad
->
getWidth
();
/* Do activation */
{
REGISTER_TIMER_INFO
(
"BpAvtTimer"
,
getName
().
c_str
());
backwardActivation
();
}
{
{
AsyncGpuBlock
asyncGpuBlock
;
AsyncGpuBlock
asyncGpuBlock
;
if
(
asRowVector_
)
{
for
(
size_t
i
=
0
;
i
<
batchSize
;
i
++
)
{
for
(
size_t
i
=
0
;
i
<
batchSize
;
i
++
)
{
MatrixPtr
outGradTmp
=
MatrixPtr
outGradTmp
=
Matrix
::
create
(
outGrad
->
getData
()
+
i
*
imgSize
*
numFilters_
,
Matrix
::
create
(
outGrad
->
getData
()
+
i
*
imgSize
*
numFilters_
,
...
@@ -115,10 +136,19 @@ void FeatureMapExpandLayer::backward(const UpdateCallback& callback) {
...
@@ -115,10 +136,19 @@ void FeatureMapExpandLayer::backward(const UpdateCallback& callback) {
inGrad
->
getData
()
+
i
*
imgSize
,
1
,
imgSize
,
false
,
useGpu_
);
inGrad
->
getData
()
+
i
*
imgSize
,
1
,
imgSize
,
false
,
useGpu_
);
inGradTmp
->
collectBias
(
*
outGradTmp
,
1
);
inGradTmp
->
collectBias
(
*
outGradTmp
,
1
);
}
}
}
else
{
for
(
size_t
i
=
0
;
i
<
batchSize
;
i
++
)
{
MatrixPtr
outGradTmp
=
Matrix
::
create
(
outGrad
->
getData
()
+
i
*
imgSize
*
numFilters_
,
imgSize
,
numFilters_
,
false
,
useGpu_
);
MatrixPtr
inGradTmp
=
Matrix
::
create
(
inGrad
->
getData
()
+
i
*
imgSize
,
imgSize
,
1
,
false
,
useGpu_
);
inGradTmp
->
sumRows
(
*
outGradTmp
,
1
,
1
);
}
}
}
/* Do derivation */
{
REGISTER_TIMER_INFO
(
"BpAvtTimer"
,
getName
().
c_str
());
backwardActivation
();
}
}
}
}
...
...
paddle/gserver/tests/test_LayerGrad.cpp
浏览文件 @
fc0ad904
...
@@ -1598,6 +1598,8 @@ TEST(Layer, FeatureMapExpandLayer) {
...
@@ -1598,6 +1598,8 @@ TEST(Layer, FeatureMapExpandLayer) {
/* paraSize= */
0
});
/* paraSize= */
0
});
config
.
layerConfig
.
add_inputs
();
config
.
layerConfig
.
add_inputs
();
for
(
auto
useGpu
:
{
false
,
true
})
{
for
(
auto
useGpu
:
{
false
,
true
})
{
for
(
auto
asRowVec
:
{
false
,
true
})
{
config
.
layerConfig
.
set_user_arg
(
asRowVec
?
"as_row_vec"
:
"as_col_vec"
);
testLayerGrad
(
config
,
testLayerGrad
(
config
,
"featmap_expand"
,
"featmap_expand"
,
/*batch_size*/
100
,
/*batch_size*/
100
,
...
@@ -1605,6 +1607,7 @@ TEST(Layer, FeatureMapExpandLayer) {
...
@@ -1605,6 +1607,7 @@ TEST(Layer, FeatureMapExpandLayer) {
useGpu
,
useGpu
,
/* useWeight */
true
);
/* useWeight */
true
);
}
}
}
}
}
TEST
(
Layer
,
MultiplexLayer
)
{
TEST
(
Layer
,
MultiplexLayer
)
{
...
...
python/paddle/trainer/config_parser.py
浏览文件 @
fc0ad904
...
@@ -2428,7 +2428,13 @@ class ExpandLayer(LayerBase):
...
@@ -2428,7 +2428,13 @@ class ExpandLayer(LayerBase):
@
config_layer
(
'featmap_expand'
)
@
config_layer
(
'featmap_expand'
)
class
FeatMapExpandLayer
(
LayerBase
):
class
FeatMapExpandLayer
(
LayerBase
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
num_filters
=
None
,
bias
=
False
):
def
__init__
(
self
,
name
,
inputs
,
device
=
None
,
num_filters
=
None
,
as_row_vector
=
True
,
bias
=
False
):
super
(
FeatMapExpandLayer
,
self
).
__init__
(
super
(
FeatMapExpandLayer
,
self
).
__init__
(
name
,
'featmap_expand'
,
0
,
inputs
=
inputs
,
device
=
device
)
name
,
'featmap_expand'
,
0
,
inputs
=
inputs
,
device
=
device
)
config_assert
(
config_assert
(
...
@@ -2437,6 +2443,8 @@ class FeatMapExpandLayer(LayerBase):
...
@@ -2437,6 +2443,8 @@ class FeatMapExpandLayer(LayerBase):
self
.
config
.
num_filters
=
num_filters
self
.
config
.
num_filters
=
num_filters
else
:
else
:
logger
.
fatal
(
"FeatMapExpandLayer must specify num_filters."
)
logger
.
fatal
(
"FeatMapExpandLayer must specify num_filters."
)
if
not
as_row_vector
:
self
.
config
.
user_arg
=
"as_col_vec"
self
.
set_layer_size
(
self
.
get_input_layer
(
0
).
size
*
num_filters
)
self
.
set_layer_size
(
self
.
get_input_layer
(
0
).
size
*
num_filters
)
...
...
python/paddle/trainer_config_helpers/layers.py
浏览文件 @
fc0ad904
...
@@ -1566,13 +1566,21 @@ def expand_layer(input,
...
@@ -1566,13 +1566,21 @@ def expand_layer(input,
@
wrap_name_default
()
@
wrap_name_default
()
@
layer_support
()
@
layer_support
()
def
repeat_layer
(
input
,
num_repeats
,
name
=
None
,
layer_attr
=
None
):
def
repeat_layer
(
input
,
num_repeats
,
as_row_vector
=
True
,
name
=
None
,
layer_attr
=
None
):
"""
"""
A layer for repeating the input for num_repeats times. This is equivalent
A layer for repeating the input for num_repeats times.
to apply concat_layer() with num_repeats same input.
If as_row_vector:
.. math::
.. math::
y = [x, x, \cdots, x]
y = [x_1,\cdots, x_n, \cdots, x_1, \cdots, x_n]
If not as_row_vector:
.. math::
y = [x_1,\cdots, x_1, \cdots, x_n, \cdots, x_n]
The example usage is:
The example usage is:
...
@@ -1585,6 +1593,12 @@ def repeat_layer(input, num_repeats, name=None, layer_attr=None):
...
@@ -1585,6 +1593,12 @@ def repeat_layer(input, num_repeats, name=None, layer_attr=None):
:param num_repeats: Repeat the input so many times
:param num_repeats: Repeat the input so many times
:type num_repeats: int
:type num_repeats: int
:param name: Layer name.
:param name: Layer name.
:param as_row_vector: True for treating input as row vector and repeating
in the column direction. This is equivalent to apply
concat_layer() with num_repeats same input.
False for treating input as column vector and repeating
in the row direction.
:type as_row_vector: bool
:type name: basestring
:type name: basestring
:param layer_attr: extra layer attributes.
:param layer_attr: extra layer attributes.
:type layer_attr: ExtraLayerAttribute.
:type layer_attr: ExtraLayerAttribute.
...
@@ -1596,6 +1610,7 @@ def repeat_layer(input, num_repeats, name=None, layer_attr=None):
...
@@ -1596,6 +1610,7 @@ def repeat_layer(input, num_repeats, name=None, layer_attr=None):
inputs
=
[
input
.
name
],
inputs
=
[
input
.
name
],
name
=
name
,
name
=
name
,
num_filters
=
num_repeats
,
num_filters
=
num_repeats
,
as_row_vector
=
as_row_vector
,
type
=
LayerType
.
FEATURE_MAP_EXPAND_LAYER
,
type
=
LayerType
.
FEATURE_MAP_EXPAND_LAYER
,
**
ExtraAttr
.
to_kwargs
(
layer_attr
))
**
ExtraAttr
.
to_kwargs
(
layer_attr
))
return
LayerOutput
(
return
LayerOutput
(
...
@@ -2846,17 +2861,19 @@ def seq_concat_layer(a, b, act=None, name=None, layer_attr=None,
...
@@ -2846,17 +2861,19 @@ def seq_concat_layer(a, b, act=None, name=None, layer_attr=None,
Concat sequence a with sequence b.
Concat sequence a with sequence b.
Inputs:
Inputs:
- a = [a1, a2, ..., a
n
]
- a = [a1, a2, ..., a
m
]
- b = [b1, b2, ..., bn]
- b = [b1, b2, ..., bn]
- Note that the length of a and b should be the same.
Output: [a1, b1, a2, b2, ..., an, bn]
Output: [a1, ..., am, b1, ..., bn]
Note that the above computation is for one sample. Multiple samples are
processed in one batch.
The example usage is:
The example usage is:
.. code-block:: python
.. code-block:: python
concat = seq_concat_layer(a=layer1, b=layer2)
concat = seq_concat_layer(a
l
=layer1, b=layer2)
:param name: Layer name.
:param name: Layer name.
:type name: basestring
:type name: basestring
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录