Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
353244f4
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
353244f4
编写于
4月 02, 2019
作者:
J
Jiabin Yang
提交者:
GitHub
4月 02, 2019
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
test=develop, add FC and test (#16604)
* test=develop, add FC and test * test=develop, refine code
上级
bd193781
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
111 addition
and
45 deletion
+111
-45
python/paddle/fluid/dygraph/layer_object_helper.py
python/paddle/fluid/dygraph/layer_object_helper.py
+16
-8
python/paddle/fluid/dygraph/nn.py
python/paddle/fluid/dygraph/nn.py
+60
-37
python/paddle/fluid/tests/unittests/test_layers.py
python/paddle/fluid/tests/unittests/test_layers.py
+35
-0
未找到文件。
python/paddle/fluid/dygraph/layer_object_helper.py
浏览文件 @
353244f4
...
...
@@ -65,7 +65,7 @@ class LayerObjectHelper(LayerHelperBase):
def
_input
(
self
,
inputs_in
):
inputs
=
self
.
_multiple_input
(
inputs_in
)
if
len
(
inputs
)
!=
1
:
raise
"{0} layer only takes one input"
.
format
(
self
.
layer_type
)
raise
"{0} layer only takes one input
in
"
.
format
(
self
.
layer_type
)
return
inputs
[
0
]
def
_multiple_param_attr
(
self
,
length
,
param_attr_in
=
None
):
...
...
@@ -74,7 +74,8 @@ class LayerObjectHelper(LayerHelperBase):
param_attr
=
[
param_attr
]
if
len
(
param_attr
)
!=
1
and
len
(
param_attr
)
!=
length
:
raise
ValueError
(
"parameter number mismatch"
)
raise
ValueError
(
"parameter number mismatch in {}"
.
format
(
self
.
name
))
elif
len
(
param_attr
)
==
1
and
length
!=
1
:
tmp
=
[
None
]
*
length
for
i
in
six
.
moves
.
range
(
length
):
...
...
@@ -91,6 +92,10 @@ class LayerObjectHelper(LayerHelperBase):
Returns input, param_attr
"""
param_attr_in
=
ParamAttr
.
_to_attr
(
param_attr_in
)
if
isinstance
(
param_attr_in
,
bool
):
raise
ValueError
(
'Param_attr should not be False in {}'
.
format
(
self
.
name
))
inputs
=
inputs_in
if
(
inputs_in
is
not
None
)
else
[]
inputs
=
self
.
_multiple_input
(
inputs
)
param_attrs
=
self
.
_multiple_param_attr
(
len
(
inputs
),
param_attr_in
)
...
...
@@ -112,8 +117,8 @@ class LayerObjectHelper(LayerHelperBase):
if
dtype
is
None
:
dtype
=
each
.
dtype
elif
dtype
!=
each
.
dtype
:
raise
ValueError
(
"Data Type mismatch: %d to %d"
%
(
dtype
,
each
.
dtype
))
raise
ValueError
(
"Data Type mismatch: %d to %d
in %s
"
%
(
dtype
,
each
.
dtype
,
self
.
name
))
return
dtype
def
get_parameter
(
self
,
name
):
...
...
@@ -126,7 +131,8 @@ class LayerObjectHelper(LayerHelperBase):
"""
param
=
self
.
main_program
.
global_block
().
var
(
name
)
if
not
isinstance
(
param
,
Parameter
):
raise
ValueError
(
"no Parameter name %s found"
%
name
)
raise
ValueError
(
"no Parameter name %s found in %s"
%
(
name
,
self
.
name
))
return
param
def
append_bias_op
(
self
,
...
...
@@ -184,7 +190,8 @@ class LayerObjectHelper(LayerHelperBase):
if
isinstance
(
act
,
six
.
string_types
):
act
=
{
'type'
:
act
}
else
:
raise
TypeError
(
str
(
act
)
+
" should be unicode or str"
)
raise
TypeError
(
str
(
act
)
+
" should be unicode or str in %s "
,
self
.
name
)
if
(
use_cudnn
is
not
None
)
and
use_cudnn
:
act
[
'use_cudnn'
]
=
use_cudnn
...
...
@@ -211,5 +218,6 @@ class LayerObjectHelper(LayerHelperBase):
"""
param
=
param
if
not
isinstance
(
param
,
cls
):
raise
TypeError
(
"The input {0} parameter of method {1} must be {2}"
,
param
,
self
.
layer_type
,
cls
.
__name__
)
raise
TypeError
(
"The input {0} parameter of method {1} must be {2}, in layer {3}"
,
param
,
self
.
layer_type
,
cls
.
__name__
,
self
.
name
)
python/paddle/fluid/dygraph/nn.py
浏览文件 @
353244f4
...
...
@@ -20,7 +20,7 @@ import numpy as np
from
..
import
core
from
..layers
import
utils
from
.
import
layers
from
..framework
import
Variable
,
OpProtoHolder
from
..framework
import
Variable
,
OpProtoHolder
,
Parameter
from
..layers
import
layer_function_generator
from
..param_attr
import
ParamAttr
from
..initializer
import
Normal
,
Constant
,
NumpyArrayInitializer
...
...
@@ -213,46 +213,69 @@ class FC(layers.Layer):
self
.
_param_attr
=
param_attr
self
.
_bias_attr
=
bias_attr
self
.
_act
=
act
self
.
__w
=
list
()
def
_build_once
(
self
,
input
):
input_shape
=
input
.
shape
param_shape
=
[
reduce
(
lambda
a
,
b
:
a
*
b
,
input_shape
[
self
.
_num_flatten_dims
:],
1
)
]
+
[
self
.
_size
]
self
.
_w
=
self
.
create_parameter
(
attr
=
self
.
_param_attr
,
shape
=
param_shape
,
dtype
=
self
.
_dtype
,
is_bias
=
False
)
@
property
def
_w
(
self
,
i
=
0
):
return
self
.
__w
[
i
]
if
self
.
_bias_attr
:
size
=
list
([
self
.
_size
])
self
.
_b
=
self
.
create_parameter
(
attr
=
self
.
_bias_attr
,
shape
=
size
,
dtype
=
self
.
_dtype
,
is_bias
=
True
)
else
:
self
.
_b
=
None
@
_w
.
setter
def
_w
(
self
,
value
,
i
=
0
):
assert
isinstance
(
value
,
Parameter
)
self
.
__w
[
i
]
=
value
def
forward
(
self
,
input
):
tmp
=
self
.
_helper
.
create_variable_for_type_inference
(
self
.
_dtype
)
self
.
_helper
.
append_op
(
type
=
"mul"
,
inputs
=
{
"X"
:
input
,
"Y"
:
self
.
_w
},
outputs
=
{
"Out"
:
tmp
},
attrs
=
{
"x_num_col_dims"
:
self
.
_num_flatten_dims
,
"y_num_col_dims"
:
1
})
def
_build_once
(
self
,
input
):
i
=
0
for
inp
,
param
in
self
.
_helper
.
iter_inputs_and_params
(
input
,
self
.
_param_attr
):
input_shape
=
inp
.
shape
param_shape
=
[
reduce
(
lambda
a
,
b
:
a
*
b
,
input_shape
[
self
.
_num_flatten_dims
:],
1
)
]
+
[
self
.
_size
]
self
.
__w
.
append
(
self
.
add_parameter
(
'_w%d'
%
i
,
self
.
create_parameter
(
attr
=
param
,
shape
=
param_shape
,
dtype
=
self
.
_dtype
,
is_bias
=
False
)))
i
+=
1
size
=
list
([
self
.
_size
])
self
.
_b
=
self
.
create_parameter
(
attr
=
self
.
_bias_attr
,
shape
=
size
,
dtype
=
self
.
_dtype
,
is_bias
=
True
)
pre_bias
=
self
.
_helper
.
create_variable_for_type_inference
(
self
.
_dtype
)
self
.
_helper
.
append_op
(
type
=
"sum"
,
inputs
=
{
"X"
:
[
tmp
]},
outputs
=
{
"Out"
:
pre_bias
},
attrs
=
{
"use_mkldnn"
:
False
})
def
forward
(
self
,
input
):
mul_results
=
list
()
i
=
0
for
inp
,
param
in
self
.
_helper
.
iter_inputs_and_params
(
input
,
self
.
_param_attr
):
tmp
=
self
.
_helper
.
create_variable_for_type_inference
(
self
.
_dtype
)
self
.
_helper
.
append_op
(
type
=
"mul"
,
inputs
=
{
"X"
:
inp
,
"Y"
:
self
.
__w
[
i
]},
outputs
=
{
"Out"
:
tmp
},
attrs
=
{
"x_num_col_dims"
:
self
.
_num_flatten_dims
,
"y_num_col_dims"
:
1
})
i
+=
1
mul_results
.
append
(
tmp
)
if
len
(
mul_results
)
==
1
:
pre_bias
=
mul_results
[
0
]
else
:
pre_bias
=
self
.
_helper
.
create_variable_for_type_inference
(
self
.
_dtype
)
self
.
_helper
.
append_op
(
type
=
"sum"
,
inputs
=
{
"X"
:
mul_results
},
outputs
=
{
"Out"
:
pre_bias
},
attrs
=
{
"use_mkldnn"
:
False
})
if
self
.
_b
:
pre_activation
=
self
.
_helper
.
create_variable_for_type_inference
(
...
...
python/paddle/fluid/tests/unittests/test_layers.py
浏览文件 @
353244f4
...
...
@@ -76,6 +76,41 @@ class LayerTest(unittest.TestCase):
class
TestLayer
(
LayerTest
):
def
test_fc
(
self
):
# pdb.set_trace()
inp
=
np
.
ones
([
3
,
32
,
32
],
dtype
=
'float32'
)
with
self
.
static_graph
():
t
=
layers
.
data
(
name
=
'data'
,
shape
=
[
3
,
32
,
32
],
dtype
=
'float32'
,
append_batch_size
=
False
)
ret
=
layers
.
fc
(
t
,
size
=
4
,
bias_attr
=
False
,
num_flatten_dims
=
1
)
ret2
=
layers
.
fc
(
ret
,
size
=
4
)
static_ret
=
self
.
get_static_graph_result
(
feed
=
{
'data'
:
inp
},
fetch_list
=
[
ret2
])[
0
]
with
self
.
static_graph
():
t
=
layers
.
data
(
name
=
'data'
,
shape
=
[
3
,
32
,
32
],
dtype
=
'float32'
,
append_batch_size
=
False
)
fc1
=
nn
.
FC
(
'fc1'
,
size
=
4
,
bias_attr
=
False
,
num_flatten_dims
=
1
)
fc2
=
nn
.
FC
(
'fc2'
,
size
=
4
)
ret
=
fc1
(
t
)
ret2
=
fc2
(
ret
)
static_ret2
=
self
.
get_static_graph_result
(
feed
=
{
'data'
:
inp
},
fetch_list
=
[
ret2
])[
0
]
with
self
.
dynamic_graph
():
t
=
base
.
to_variable
(
inp
)
fc1
=
nn
.
FC
(
'fc1'
,
size
=
4
,
bias_attr
=
False
,
num_flatten_dims
=
1
)
fc2
=
nn
.
FC
(
'fc2'
,
size
=
4
)
ret
=
fc1
(
t
)
dy_ret
=
fc2
(
ret
)
self
.
assertTrue
(
np
.
array_equal
(
static_ret
,
static_ret2
))
self
.
assertTrue
(
np
.
array_equal
(
static_ret
,
dy_ret
.
_numpy
()))
def
test_layer_norm
(
self
):
inp
=
np
.
ones
([
3
,
32
,
32
],
dtype
=
'float32'
)
with
self
.
static_graph
():
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录