Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleHub
提交
9176bbad
P
PaddleHub
项目概览
PaddlePaddle
/
PaddleHub
1 年多 前同步成功
通知
283
Star
12117
Fork
2091
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
200
列表
看板
标记
里程碑
合并请求
4
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleHub
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
200
Issue
200
列表
看板
标记
里程碑
合并请求
4
合并请求
4
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
9176bbad
编写于
2月 15, 2019
作者:
W
wuzewu
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add unit test case
上级
01c64ed8
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
276 addition
and
1 deletion
+276
-1
tests/test_param_serialize.py
tests/test_param_serialize.py
+227
-0
tests/test_pyobj_serialize.py
tests/test_pyobj_serialize.py
+1
-1
tests/test_signature.py
tests/test_signature.py
+48
-0
未找到文件。
tests/test_param_serialize.py
0 → 100644
浏览文件 @
9176bbad
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import
unittest
import
paddle_hub
as
hub
import
paddle.fluid
as
fluid
from
paddle_hub.paddle_helper
import
from_param_to_flexible_data
,
from_flexible_data_to_param
from
paddle_hub
import
module_desc_pb2
from
paddle_hub.logger
import
logger
class
TestParamAttrSerializeAndDeSerialize
(
unittest
.
TestCase
):
def
test_convert_l1_regularizer
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
input
=
fluid
.
layers
.
data
(
name
=
"test"
,
shape
=
[
1
],
dtype
=
"float32"
)
fluid
.
layers
.
fc
(
input
=
input
,
size
=
10
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"fc_w"
,
regularizer
=
fluid
.
regularizer
.
L1Decay
(
regularization_coeff
=
1
)))
fc_w
=
[
param
for
param
in
fluid
.
default_main_program
().
global_block
().
iter_parameters
()
][
0
]
flexible_data
=
module_desc_pb2
.
FlexibleData
()
from_param_to_flexible_data
(
fc_w
,
flexible_data
)
param_dict
=
from_flexible_data_to_param
(
flexible_data
)
assert
fc_w
.
regularizer
.
__class__
==
param_dict
[
'regularizer'
].
__class__
,
"regularzier type convert error!"
assert
fc_w
.
regularizer
.
_regularization_coeff
==
param_dict
[
'regularizer'
].
_regularization_coeff
,
"regularzier value convert error!"
def
test_convert_l2_regularizer
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
input
=
fluid
.
layers
.
data
(
name
=
"test"
,
shape
=
[
1
],
dtype
=
"float32"
)
fluid
.
layers
.
fc
(
input
=
input
,
size
=
10
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"fc_w"
,
regularizer
=
fluid
.
regularizer
.
L2Decay
(
regularization_coeff
=
1.5
)))
fc_w
=
[
param
for
param
in
fluid
.
default_main_program
().
global_block
().
iter_parameters
()
][
0
]
flexible_data
=
module_desc_pb2
.
FlexibleData
()
from_param_to_flexible_data
(
fc_w
,
flexible_data
)
param_dict
=
from_flexible_data_to_param
(
flexible_data
)
assert
fc_w
.
regularizer
.
__class__
==
param_dict
[
'regularizer'
].
__class__
,
"regularzier type convert error!"
assert
fc_w
.
regularizer
.
_regularization_coeff
==
param_dict
[
'regularizer'
].
_regularization_coeff
,
"regularzier value convert error!"
def
test_convert_error_clip_by_value
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
input
=
fluid
.
layers
.
data
(
name
=
"test"
,
shape
=
[
1
],
dtype
=
"float32"
)
fluid
.
layers
.
fc
(
input
=
input
,
size
=
10
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"fc_w"
,
gradient_clip
=
fluid
.
clip
.
ErrorClipByValue
(
max
=
1
)))
fc_w
=
[
param
for
param
in
fluid
.
default_main_program
().
global_block
().
iter_parameters
()
][
0
]
flexible_data
=
module_desc_pb2
.
FlexibleData
()
from_param_to_flexible_data
(
fc_w
,
flexible_data
)
param_dict
=
from_flexible_data_to_param
(
flexible_data
)
assert
fc_w
.
gradient_clip_attr
.
__class__
==
param_dict
[
'gradient_clip_attr'
].
__class__
,
"clip type convert error!"
assert
fc_w
.
gradient_clip_attr
.
max
==
param_dict
[
'gradient_clip_attr'
].
max
,
"clip value convert error!"
assert
fc_w
.
gradient_clip_attr
.
min
==
param_dict
[
'gradient_clip_attr'
].
min
,
"clip value convert error!"
def
test_convert_gradient_clip_by_value
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
input
=
fluid
.
layers
.
data
(
name
=
"test"
,
shape
=
[
1
],
dtype
=
"float32"
)
fluid
.
layers
.
fc
(
input
=
input
,
size
=
10
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"fc_w"
,
gradient_clip
=
fluid
.
clip
.
GradientClipByValue
(
max
=
1
)))
fc_w
=
[
param
for
param
in
fluid
.
default_main_program
().
global_block
().
iter_parameters
()
][
0
]
flexible_data
=
module_desc_pb2
.
FlexibleData
()
from_param_to_flexible_data
(
fc_w
,
flexible_data
)
param_dict
=
from_flexible_data_to_param
(
flexible_data
)
assert
fc_w
.
gradient_clip_attr
.
__class__
==
param_dict
[
'gradient_clip_attr'
].
__class__
,
"clip type convert error!"
assert
fc_w
.
gradient_clip_attr
.
max
==
param_dict
[
'gradient_clip_attr'
].
max
,
"clip value convert error!"
assert
fc_w
.
gradient_clip_attr
.
min
==
param_dict
[
'gradient_clip_attr'
].
min
,
"clip value convert error!"
def
test_convert_gradient_clip_by_normal
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
input
=
fluid
.
layers
.
data
(
name
=
"test"
,
shape
=
[
1
],
dtype
=
"float32"
)
fluid
.
layers
.
fc
(
input
=
input
,
size
=
10
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"fc_w"
,
gradient_clip
=
fluid
.
clip
.
GradientClipByNorm
(
clip_norm
=
1
)))
fc_w
=
[
param
for
param
in
fluid
.
default_main_program
().
global_block
().
iter_parameters
()
][
0
]
flexible_data
=
module_desc_pb2
.
FlexibleData
()
from_param_to_flexible_data
(
fc_w
,
flexible_data
)
param_dict
=
from_flexible_data_to_param
(
flexible_data
)
assert
fc_w
.
gradient_clip_attr
.
__class__
==
param_dict
[
'gradient_clip_attr'
].
__class__
,
"clip type convert error!"
assert
fc_w
.
gradient_clip_attr
.
clip_norm
==
param_dict
[
'gradient_clip_attr'
].
clip_norm
,
"clip value convert error!"
def
test_convert_gradient_clip_by_global_normal
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
input
=
fluid
.
layers
.
data
(
name
=
"test"
,
shape
=
[
1
],
dtype
=
"float32"
)
fluid
.
layers
.
fc
(
input
=
input
,
size
=
10
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"fc_w"
,
gradient_clip
=
fluid
.
clip
.
GradientClipByGlobalNorm
(
clip_norm
=
1
)))
fc_w
=
[
param
for
param
in
fluid
.
default_main_program
().
global_block
().
iter_parameters
()
][
0
]
flexible_data
=
module_desc_pb2
.
FlexibleData
()
from_param_to_flexible_data
(
fc_w
,
flexible_data
)
param_dict
=
from_flexible_data_to_param
(
flexible_data
)
assert
fc_w
.
gradient_clip_attr
.
__class__
==
param_dict
[
'gradient_clip_attr'
].
__class__
,
"clip type convert error!"
assert
fc_w
.
gradient_clip_attr
.
clip_norm
==
param_dict
[
'gradient_clip_attr'
].
clip_norm
,
"clip value convert error!"
assert
fc_w
.
gradient_clip_attr
.
group_name
==
param_dict
[
'gradient_clip_attr'
].
group_name
,
"clip value convert error!"
def
test_convert_trainable
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
input
=
fluid
.
layers
.
data
(
name
=
"test"
,
shape
=
[
1
],
dtype
=
"float32"
)
fluid
.
layers
.
fc
(
input
=
input
,
size
=
10
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"fc_w"
,
trainable
=
False
))
fc_w
=
[
param
for
param
in
fluid
.
default_main_program
().
global_block
().
iter_parameters
()
][
0
]
flexible_data
=
module_desc_pb2
.
FlexibleData
()
from_param_to_flexible_data
(
fc_w
,
flexible_data
)
param_dict
=
from_flexible_data_to_param
(
flexible_data
)
assert
fc_w
.
trainable
.
__class__
==
param_dict
[
'trainable'
].
__class__
,
"trainable type convert error!"
assert
fc_w
.
trainable
==
param_dict
[
'trainable'
],
"trainable value convert error!"
def
test_convert_do_model_average
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
input
=
fluid
.
layers
.
data
(
name
=
"test"
,
shape
=
[
1
],
dtype
=
"float32"
)
fluid
.
layers
.
fc
(
input
=
input
,
size
=
10
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"fc_w"
,
do_model_average
=
True
))
fc_w
=
[
param
for
param
in
fluid
.
default_main_program
().
global_block
().
iter_parameters
()
][
0
]
flexible_data
=
module_desc_pb2
.
FlexibleData
()
from_param_to_flexible_data
(
fc_w
,
flexible_data
)
param_dict
=
from_flexible_data_to_param
(
flexible_data
)
assert
fc_w
.
do_model_average
.
__class__
==
param_dict
[
'do_model_average'
].
__class__
,
"do_model_average type convert error!"
assert
fc_w
.
do_model_average
==
param_dict
[
'do_model_average'
],
"do_model_average value convert error!"
def
test_convert_optimize_attr
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
input
=
fluid
.
layers
.
data
(
name
=
"test"
,
shape
=
[
1
],
dtype
=
"float32"
)
fluid
.
layers
.
fc
(
input
=
input
,
size
=
10
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"fc_w"
,
learning_rate
=
5
))
fc_w
=
[
param
for
param
in
fluid
.
default_main_program
().
global_block
().
iter_parameters
()
][
0
]
flexible_data
=
module_desc_pb2
.
FlexibleData
()
from_param_to_flexible_data
(
fc_w
,
flexible_data
)
param_dict
=
from_flexible_data_to_param
(
flexible_data
)
assert
fc_w
.
optimize_attr
.
__class__
==
param_dict
[
'optimize_attr'
].
__class__
,
"optimize_attr type convert error!"
assert
fc_w
.
optimize_attr
==
param_dict
[
'optimize_attr'
],
"optimize_attr value convert error!"
if
__name__
==
"__main__"
:
unittest
.
main
()
tests/test_pyobj_serialize.py
浏览文件 @
9176bbad
...
...
@@ -218,7 +218,7 @@ class TestSerializeAndDeSerialize(unittest.TestCase):
output
=
from_flexible_data_to_pyobj
(
flexible_data
)
assert
input
==
output
,
"dict convesion error"
def
test_compound_object
(
self
):
def
test_co
nvert_co
mpound_object
(
self
):
input
=
{
False
:
1
,
'2'
:
3
,
...
...
tests/test_signature.py
0 → 100644
浏览文件 @
9176bbad
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import
unittest
import
paddle_hub
as
hub
import
paddle.fluid
as
fluid
from
paddle_hub
import
create_signature
class
TestSignature
(
unittest
.
TestCase
):
def
test_check_signature_info
(
self
):
program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
program
):
var_1
=
fluid
.
layers
.
data
(
name
=
"var_1"
,
dtype
=
"int64"
,
shape
=
[
1
])
var_2
=
fluid
.
layers
.
data
(
name
=
"var_2"
,
dtype
=
"float32"
,
shape
=
[
3
,
100
,
100
])
name
=
"test"
inputs
=
[
var_1
]
outputs
=
[
var_2
]
feed_names
=
[
"label"
]
fetch_names
=
[
"img"
]
sign
=
create_signature
(
name
=
name
,
inputs
=
inputs
,
outputs
=
outputs
,
feed_names
=
feed_names
,
fetch_names
=
fetch_names
)
assert
sign
.
get_name
()
==
name
,
"sign name error"
assert
sign
.
get_inputs
()
==
inputs
,
"sign inputs error"
assert
sign
.
get_outputs
()
==
outputs
,
"sign outputs error"
assert
sign
.
get_feed_names
()
==
feed_names
,
"sign feed_names error"
assert
sign
.
get_fetch_names
(
)
==
fetch_names
,
"sign fetch_names error"
if
__name__
==
"__main__"
:
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录