Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
36027490
P
Paddle
项目概览
PaddlePaddle
/
Paddle
1 年多 前同步成功
通知
2305
Star
20932
Fork
5423
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
36027490
编写于
8月 05, 2020
作者:
C
Chen Weihang
提交者:
GitHub
8月 05, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Verify correctness of jit.save/jit.load - part 1 (#25915)
上级
82374dc1
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
408 addition
and
94 deletion
+408
-94
python/paddle/fluid/tests/unittests/dygraph_to_static/test_bert.py
...ddle/fluid/tests/unittests/dygraph_to_static/test_bert.py
+87
-6
python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py
...addle/fluid/tests/unittests/dygraph_to_static/test_bmn.py
+19
-1
python/paddle/fluid/tests/unittests/dygraph_to_static/test_lac.py
...addle/fluid/tests/unittests/dygraph_to_static/test_lac.py
+15
-0
python/paddle/fluid/tests/unittests/dygraph_to_static/test_mobile_net.py
...luid/tests/unittests/dygraph_to_static/test_mobile_net.py
+79
-6
python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet.py
...le/fluid/tests/unittests/dygraph_to_static/test_resnet.py
+142
-81
python/paddle/fluid/tests/unittests/dygraph_to_static/test_se_resnet.py
...fluid/tests/unittests/dygraph_to_static/test_se_resnet.py
+66
-0
未找到文件。
python/paddle/fluid/tests/unittests/dygraph_to_static/test_bert.py
浏览文件 @
36027490
...
...
@@ -18,6 +18,7 @@ import unittest
import
numpy
as
np
import
paddle.fluid
as
fluid
from
paddle.fluid.dygraph.dygraph_to_static
import
ProgramTranslator
from
paddle.fluid.dygraph.io
import
VARIABLE_FILENAME
from
bert_dygraph_model
import
PretrainModelLayer
from
bert_utils
import
get_bert_config
,
get_feed_data_reader
...
...
@@ -28,9 +29,11 @@ place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace(
SEED
=
2020
STEP_NUM
=
10
PRINT_STEP
=
2
MODEL_SAVE_PATH
=
"./bert.inference.model"
DY_STATE_DICT_SAVE_PATH
=
"./bert.dygraph"
def
train
(
bert_config
,
data_reader
):
def
train
(
bert_config
,
data_reader
,
to_static
):
with
fluid
.
dygraph
.
guard
(
place
):
fluid
.
default_main_program
().
random_seed
=
SEED
fluid
.
default_startup_program
().
random_seed
=
SEED
...
...
@@ -79,18 +82,74 @@ def train(bert_config, data_reader):
step_idx
+=
1
if
step_idx
==
STEP_NUM
:
if
to_static
:
fluid
.
dygraph
.
jit
.
save
(
bert
,
MODEL_SAVE_PATH
)
else
:
fluid
.
dygraph
.
save_dygraph
(
bert
.
state_dict
(),
DY_STATE_DICT_SAVE_PATH
)
break
return
loss
,
ppl
def
train_dygraph
(
bert_config
,
data_reader
):
program_translator
.
enable
(
False
)
return
train
(
bert_config
,
data_reader
)
return
train
(
bert_config
,
data_reader
,
False
)
def
train_static
(
bert_config
,
data_reader
):
program_translator
.
enable
(
True
)
return
train
(
bert_config
,
data_reader
)
return
train
(
bert_config
,
data_reader
,
True
)
def
predict_static
(
data
):
exe
=
fluid
.
Executor
(
place
)
# load inference model
[
inference_program
,
feed_target_names
,
fetch_targets
]
=
fluid
.
io
.
load_inference_model
(
MODEL_SAVE_PATH
,
executor
=
exe
,
params_filename
=
VARIABLE_FILENAME
)
pred_res
=
exe
.
run
(
inference_program
,
feed
=
dict
(
zip
(
feed_target_names
,
data
)),
fetch_list
=
fetch_targets
)
return
pred_res
def
predict_dygraph
(
bert_config
,
data
):
program_translator
.
enable
(
False
)
with
fluid
.
dygraph
.
guard
(
place
):
bert
=
PretrainModelLayer
(
config
=
bert_config
,
weight_sharing
=
False
,
use_fp16
=
False
)
model_dict
,
_
=
fluid
.
dygraph
.
load_dygraph
(
DY_STATE_DICT_SAVE_PATH
)
bert
.
set_dict
(
model_dict
)
bert
.
eval
()
input_vars
=
[
fluid
.
dygraph
.
to_variable
(
x
)
for
x
in
data
]
src_ids
,
pos_ids
,
sent_ids
,
input_mask
,
mask_label
,
mask_pos
,
labels
=
input_vars
pred_res
=
bert
(
src_ids
=
src_ids
,
position_ids
=
pos_ids
,
sentence_ids
=
sent_ids
,
input_mask
=
input_mask
,
mask_label
=
mask_label
,
mask_pos
=
mask_pos
,
labels
=
labels
)
pred_res
=
[
var
.
numpy
()
for
var
in
pred_res
]
return
pred_res
def
predict_dygraph_jit
(
data
):
with
fluid
.
dygraph
.
guard
(
place
):
bert
=
fluid
.
dygraph
.
jit
.
load
(
MODEL_SAVE_PATH
)
bert
.
eval
()
src_ids
,
pos_ids
,
sent_ids
,
input_mask
,
mask_label
,
mask_pos
,
labels
=
data
pred_res
=
bert
(
src_ids
,
pos_ids
,
sent_ids
,
input_mask
,
mask_label
,
mask_pos
,
labels
)
pred_res
=
[
var
.
numpy
()
for
var
in
pred_res
]
return
pred_res
class
TestBert
(
unittest
.
TestCase
):
...
...
@@ -104,14 +163,36 @@ class TestBert(unittest.TestCase):
dygraph_loss
,
dygraph_ppl
=
train_dygraph
(
self
.
bert_config
,
self
.
data_reader
)
self
.
assertTrue
(
np
.
allclose
(
static_loss
,
static
_loss
),
msg
=
"static_loss: {}
\n
static
_loss: {}"
.
format
(
static_loss
,
dygraph_loss
))
np
.
allclose
(
static_loss
,
dygraph
_loss
),
msg
=
"static_loss: {}
\n
dygraph
_loss: {}"
.
format
(
static_loss
,
dygraph_loss
))
self
.
assertTrue
(
np
.
allclose
(
static_ppl
,
dygraph_ppl
),
msg
=
"static_ppl: {}
\n
dygraph_ppl: {}"
.
format
(
static_ppl
,
dygraph_ppl
))
self
.
verify_predict
()
def
verify_predict
(
self
):
for
data
in
self
.
data_reader
.
data_generator
()():
dygraph_pred_res
=
predict_dygraph
(
self
.
bert_config
,
data
)
static_pred_res
=
predict_static
(
data
)
dygraph_jit_pred_res
=
predict_dygraph_jit
(
data
)
for
dy_res
,
st_res
,
dy_jit_res
in
zip
(
dygraph_pred_res
,
static_pred_res
,
dygraph_jit_pred_res
):
self
.
assertTrue
(
np
.
allclose
(
st_res
,
dy_res
),
"dygraph_res: {},
\n
static_res: {}"
.
format
(
dy_res
[
~
np
.
isclose
(
st_res
,
dy_res
)],
st_res
[
~
np
.
isclose
(
st_res
,
dy_res
)]))
self
.
assertTrue
(
np
.
allclose
(
st_res
,
dy_jit_res
),
"dygraph_jit_res: {},
\n
static_res: {}"
.
format
(
dy_jit_res
[
~
np
.
isclose
(
st_res
,
dy_jit_res
)],
st_res
[
~
np
.
isclose
(
st_res
,
dy_jit_res
)]))
break
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py
浏览文件 @
36027490
...
...
@@ -692,13 +692,20 @@ class TestTrain(unittest.TestCase):
video_data
=
np
.
array
([
item
[
0
]
for
item
in
data
]).
astype
(
DATATYPE
)
static_pred_res
=
self
.
predict_static
(
video_data
)
dygraph_pred_res
=
self
.
predict_dygraph
(
video_data
)
dygraph_jit_pred_res
=
self
.
predict_dygraph_jit
(
video_data
)
for
dy_res
,
st_res
in
zip
(
dygraph_pred_res
,
static_pred_res
):
for
dy_res
,
st_res
,
dy_jit_res
in
zip
(
dygraph_pred_res
,
static_pred_res
,
dygraph_jit_pred_res
):
self
.
assertTrue
(
np
.
allclose
(
st_res
,
dy_res
),
"dygraph_res: {},
\n
static_res: {}"
.
format
(
dy_res
[
~
np
.
isclose
(
st_res
,
dy_res
)],
st_res
[
~
np
.
isclose
(
st_res
,
dy_res
)]))
self
.
assertTrue
(
np
.
allclose
(
st_res
,
dy_jit_res
),
"dygraph_jit_res: {},
\n
static_res: {}"
.
format
(
dy_jit_res
[
~
np
.
isclose
(
st_res
,
dy_jit_res
)],
st_res
[
~
np
.
isclose
(
st_res
,
dy_jit_res
)]))
break
def
predict_dygraph
(
self
,
data
):
...
...
@@ -731,6 +738,17 @@ class TestTrain(unittest.TestCase):
return
pred_res
def
predict_dygraph_jit
(
self
,
data
):
with
fluid
.
dygraph
.
guard
(
self
.
place
):
bmn
=
fluid
.
dygraph
.
jit
.
load
(
self
.
args
.
infer_dir
)
bmn
.
eval
()
x
=
to_variable
(
data
)
pred_res
=
bmn
(
x
)
pred_res
=
[
var
.
numpy
()
for
var
in
pred_res
]
return
pred_res
if
__name__
==
"__main__"
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/dygraph_to_static/test_lac.py
浏览文件 @
36027490
...
...
@@ -535,9 +535,14 @@ class TestLACModel(unittest.TestCase):
batch
=
[
np
.
vstack
(
var
)
for
var
in
zip
(
*
batch
)]
dy_pre
=
self
.
predict_dygraph
(
batch
)
st_pre
=
self
.
predict_static
(
batch
)
dy_jit_pre
=
self
.
predict_dygraph_jit
(
batch
)
self
.
assertTrue
(
np
.
allclose
(
dy_pre
,
st_pre
),
msg
=
"dy_pre:
\n
{}
\n
, st_pre:
\n
{}."
.
format
(
dy_pre
,
st_pre
))
self
.
assertTrue
(
np
.
allclose
(
dy_jit_pre
,
st_pre
),
msg
=
"dy_jit_pre:
\n
{}
\n
, st_pre:
\n
{}."
.
format
(
dy_jit_pre
,
st_pre
))
def
predict_dygraph
(
self
,
batch
):
words
,
targets
,
length
=
batch
...
...
@@ -576,6 +581,16 @@ class TestLACModel(unittest.TestCase):
fetch_list
=
fetch_targets
)
return
pred_res
[
0
]
def
predict_dygraph_jit
(
self
,
batch
):
words
,
targets
,
length
=
batch
with
fluid
.
dygraph
.
guard
(
self
.
place
):
model
=
fluid
.
dygraph
.
jit
.
load
(
self
.
args
.
model_save_dir
)
model
.
eval
()
pred_res
=
model
(
to_variable
(
words
),
to_variable
(
length
))
return
pred_res
.
numpy
()
if
__name__
==
"__main__"
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/dygraph_to_static/test_mobile_net.py
浏览文件 @
36027490
...
...
@@ -19,6 +19,7 @@ from paddle.fluid.initializer import MSRA
from
paddle.fluid.param_attr
import
ParamAttr
from
paddle.fluid.dygraph.nn
import
Conv2D
,
Pool2D
,
BatchNorm
,
Linear
from
paddle.fluid.dygraph
import
declarative
,
ProgramTranslator
from
paddle.fluid.dygraph.io
import
VARIABLE_FILENAME
import
unittest
...
...
@@ -433,14 +434,15 @@ class Args(object):
class_dim
=
50
print_step
=
1
train_step
=
10
place
=
fluid
.
CUDAPlace
(
0
)
if
fluid
.
is_compiled_with_cuda
(
)
else
fluid
.
CPUPlace
()
model_save_path
=
model
+
".inference.model"
dy_state_dict_save_path
=
model
+
".dygraph"
def
train_mobilenet
(
args
,
to_static
):
program_translator
.
enable
(
to_static
)
place
=
fluid
.
CUDAPlace
(
0
)
if
fluid
.
is_compiled_with_cuda
(
)
else
fluid
.
CPUPlace
()
with
fluid
.
dygraph
.
guard
(
place
):
with
fluid
.
dygraph
.
guard
(
args
.
place
):
np
.
random
.
seed
(
SEED
)
fluid
.
default_startup_program
().
random_seed
=
SEED
...
...
@@ -461,7 +463,7 @@ def train_mobilenet(args, to_static):
# 3. reader
train_reader
=
fake_data_reader
(
args
.
batch_size
,
args
.
class_dim
)
train_data_loader
=
fluid
.
io
.
DataLoader
.
from_generator
(
capacity
=
16
)
train_data_loader
.
set_sample_list_generator
(
train_reader
,
place
)
train_data_loader
.
set_sample_list_generator
(
train_reader
)
# 4. train loop
loss_data
=
[]
...
...
@@ -498,17 +500,64 @@ def train_mobilenet(args, to_static):
batch_id
+=
1
t_last
=
time
.
time
()
if
batch_id
>
args
.
train_step
:
if
to_static
:
fluid
.
dygraph
.
jit
.
save
(
net
,
args
.
model_save_path
)
else
:
fluid
.
dygraph
.
save_dygraph
(
net
.
state_dict
(),
args
.
dy_state_dict_save_path
)
break
return
np
.
array
(
loss_data
)
def
predict_static
(
args
,
data
):
exe
=
fluid
.
Executor
(
args
.
place
)
# load inference model
[
inference_program
,
feed_target_names
,
fetch_targets
]
=
fluid
.
io
.
load_inference_model
(
args
.
model_save_path
,
executor
=
exe
,
params_filename
=
VARIABLE_FILENAME
)
pred_res
=
exe
.
run
(
inference_program
,
feed
=
{
feed_target_names
[
0
]:
data
},
fetch_list
=
fetch_targets
)
return
pred_res
[
0
]
def
predict_dygraph
(
args
,
data
):
program_translator
.
enable
(
False
)
with
fluid
.
dygraph
.
guard
(
args
.
place
):
if
args
.
model
==
"MobileNetV1"
:
model
=
MobileNetV1
(
class_dim
=
args
.
class_dim
,
scale
=
1.0
)
elif
args
.
model
==
"MobileNetV2"
:
model
=
MobileNetV2
(
class_dim
=
args
.
class_dim
,
scale
=
1.0
)
# load dygraph trained parameters
model_dict
,
_
=
fluid
.
load_dygraph
(
args
.
dy_state_dict_save_path
)
model
.
set_dict
(
model_dict
)
model
.
eval
()
pred_res
=
model
(
fluid
.
dygraph
.
to_variable
(
data
))
return
pred_res
.
numpy
()
def
predict_dygraph_jit
(
args
,
data
):
with
fluid
.
dygraph
.
guard
(
args
.
place
):
model
=
fluid
.
dygraph
.
jit
.
load
(
args
.
model_save_path
)
model
.
eval
()
pred_res
=
model
(
data
)
return
pred_res
.
numpy
()
class
TestMobileNet
(
unittest
.
TestCase
):
def
setUp
(
self
):
self
.
args
=
Args
()
def
train
(
self
,
model_name
,
to_static
):
self
.
args
.
model
=
model_name
self
.
args
.
model_save_path
=
model_name
+
".inference.model"
self
.
args
.
dy_state_dict_save_path
=
model_name
+
".dygraph"
out
=
train_mobilenet
(
self
.
args
,
to_static
)
return
out
...
...
@@ -519,12 +568,36 @@ class TestMobileNet(unittest.TestCase):
np
.
allclose
(
dy_out
,
st_out
),
msg
=
"dy_out: {}, st_out: {}"
.
format
(
dy_out
,
st_out
))
def
test_mobileNet
(
self
):
def
assert_same_predict
(
self
,
model_name
):
self
.
args
.
model
=
model_name
self
.
args
.
model_save_path
=
model_name
+
".inference.model"
self
.
args
.
dy_state_dict_save_path
=
model_name
+
".dygraph"
local_random
=
np
.
random
.
RandomState
(
SEED
)
image
=
local_random
.
random_sample
([
1
,
3
,
224
,
224
]).
astype
(
'float32'
)
dy_pre
=
predict_dygraph
(
self
.
args
,
image
)
st_pre
=
predict_static
(
self
.
args
,
image
)
dy_jit_pre
=
predict_dygraph_jit
(
self
.
args
,
image
)
self
.
assertTrue
(
np
.
allclose
(
dy_pre
,
st_pre
),
msg
=
"dy_pre:
\n
{}
\n
, st_pre:
\n
{}."
.
format
(
dy_pre
,
st_pre
))
self
.
assertTrue
(
np
.
allclose
(
dy_jit_pre
,
st_pre
),
msg
=
"dy_jit_pre:
\n
{}
\n
, st_pre:
\n
{}."
.
format
(
dy_jit_pre
,
st_pre
))
def
test_mobile_net
(
self
):
# MobileNet-V1
self
.
assert_same_loss
(
"MobileNetV1"
)
# MobileNet-V2
self
.
assert_same_loss
(
"MobileNetV2"
)
self
.
verify_predict
()
def
verify_predict
(
self
):
# MobileNet-V1
self
.
assert_same_predict
(
"MobileNetV1"
)
# MobileNet-V2
self
.
assert_same_predict
(
"MobileNetV2"
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet.py
浏览文件 @
36027490
...
...
@@ -22,39 +22,33 @@ import numpy as np
import
paddle
import
paddle.fluid
as
fluid
from
paddle.fluid.dygraph
.jit
import
dygraph_to_static_func
from
paddle.fluid.dygraph
import
declarative
,
ProgramTranslator
from
paddle.fluid.dygraph.nn
import
BatchNorm
,
Conv2D
,
Linear
,
Pool2D
from
paddle.fluid.dygraph.io
import
VARIABLE_FILENAME
SEED
=
2020
IMAGENET1000
=
1281167
base_lr
=
0.1
base_lr
=
0.
00
1
momentum_rate
=
0.9
l2_decay
=
1e-4
batch_size
=
8
epoch_num
=
1
place
=
fluid
.
CUDAPlace
(
0
)
if
fluid
.
is_compiled_with_cuda
()
\
else
fluid
.
CPUPlace
()
MODEL_SAVE_PATH
=
"./resnet.inference.model"
DY_STATE_DICT_SAVE_PATH
=
"./resnet.dygraph"
program_translator
=
ProgramTranslator
()
if
fluid
.
is_compiled_with_cuda
():
fluid
.
set_flags
({
'FLAGS_cudnn_deterministic'
:
True
})
def
optimizer_setting
(
parameter_list
=
None
):
total_images
=
IMAGENET1000
step
=
int
(
math
.
ceil
(
float
(
total_images
)
/
batch_size
))
epochs
=
[
30
,
60
,
90
]
bd
=
[
step
*
e
for
e
in
epochs
]
lr
=
[
base_lr
*
(
0.1
**
i
)
for
i
in
range
(
len
(
bd
)
+
1
)]
if
fluid
.
in_dygraph_mode
():
optimizer
=
fluid
.
optimizer
.
Momentum
(
learning_rate
=
fluid
.
layers
.
piecewise_decay
(
boundaries
=
bd
,
values
=
lr
),
momentum
=
momentum_rate
,
regularization
=
fluid
.
regularizer
.
L2Decay
(
l2_decay
),
parameter_list
=
parameter_list
)
else
:
optimizer
=
fluid
.
optimizer
.
Momentum
(
learning_rate
=
fluid
.
layers
.
piecewise_decay
(
boundaries
=
bd
,
values
=
lr
),
momentum
=
momentum_rate
,
regularization
=
fluid
.
regularizer
.
L2Decay
(
l2_decay
))
optimizer
=
fluid
.
optimizer
.
Momentum
(
learning_rate
=
base_lr
,
momentum
=
momentum_rate
,
regularization
=
fluid
.
regularizer
.
L2Decay
(
l2_decay
),
parameter_list
=
parameter_list
)
return
optimizer
...
...
@@ -189,8 +183,8 @@ class ResNet(fluid.dygraph.Layer):
param_attr
=
fluid
.
param_attr
.
ParamAttr
(
initializer
=
fluid
.
initializer
.
Uniform
(
-
stdv
,
stdv
)))
@
d
ygraph_to_static_func
def
forward
(
self
,
inputs
,
label
):
@
d
eclarative
def
forward
(
self
,
inputs
):
y
=
self
.
conv
(
inputs
)
y
=
self
.
pool2d_max
(
y
)
for
bottleneck_block
in
self
.
bottleneck_block_list
:
...
...
@@ -199,77 +193,144 @@ class ResNet(fluid.dygraph.Layer):
y
=
fluid
.
layers
.
reshape
(
y
,
shape
=
[
-
1
,
self
.
pool2d_avg_output
])
pred
=
self
.
out
(
y
)
loss
=
fluid
.
layers
.
cross_entropy
(
input
=
pred
,
label
=
label
)
avg_loss_
=
fluid
.
layers
.
mean
(
x
=
loss
)
acc_top1_
=
fluid
.
layers
.
accuracy
(
input
=
pred
,
label
=
label
,
k
=
1
)
acc_top5_
=
fluid
.
layers
.
accuracy
(
input
=
pred
,
label
=
label
,
k
=
5
)
return
pred
return
pred
,
avg_loss_
,
acc_top1_
,
acc_top5_
def
reader_decorator
(
reader
):
def
__reader__
():
for
item
in
reader
():
img
=
np
.
array
(
item
[
0
]).
astype
(
'float32'
).
reshape
(
3
,
224
,
224
)
label
=
np
.
array
(
item
[
1
]).
astype
(
'int64'
).
reshape
(
1
)
yield
img
,
label
return
__reader__
def
train_resnet_in_static_mode
():
def
train
(
to_static
):
"""
Tests model decorated by `dygraph_to_static_output` in static mode. For users, the model is defined in dygraph mode and trained in static mode.
"""
with
fluid
.
dygraph
.
guard
(
place
):
np
.
random
.
seed
(
SEED
)
fluid
.
default_startup_program
().
random_seed
=
SEED
fluid
.
default_main_program
().
random_seed
=
SEED
train_reader
=
paddle
.
batch
(
reader_decorator
(
paddle
.
dataset
.
flowers
.
train
(
use_xmap
=
False
)),
batch_size
=
batch_size
,
drop_last
=
True
)
data_loader
=
fluid
.
io
.
DataLoader
.
from_generator
(
capacity
=
5
,
iterable
=
True
)
data_loader
.
set_sample_list_generator
(
train_reader
)
resnet
=
ResNet
()
optimizer
=
optimizer_setting
(
parameter_list
=
resnet
.
parameters
())
for
epoch
in
range
(
epoch_num
):
total_loss
=
0.0
total_acc1
=
0.0
total_acc5
=
0.0
total_sample
=
0
for
batch_id
,
data
in
enumerate
(
data_loader
()):
start_time
=
time
.
time
()
img
,
label
=
data
pred
=
resnet
(
img
)
loss
=
fluid
.
layers
.
cross_entropy
(
input
=
pred
,
label
=
label
)
avg_loss
=
fluid
.
layers
.
mean
(
x
=
loss
)
acc_top1
=
fluid
.
layers
.
accuracy
(
input
=
pred
,
label
=
label
,
k
=
1
)
acc_top5
=
fluid
.
layers
.
accuracy
(
input
=
pred
,
label
=
label
,
k
=
5
)
avg_loss
.
backward
()
optimizer
.
minimize
(
avg_loss
)
resnet
.
clear_gradients
()
total_loss
+=
avg_loss
total_acc1
+=
acc_top1
total_acc5
+=
acc_top5
total_sample
+=
1
end_time
=
time
.
time
()
if
batch_id
%
2
==
0
:
print
(
"epoch %d | batch step %d, loss %0.3f, acc1 %0.3f, acc5 %0.3f, time %f"
%
\
(
epoch
,
batch_id
,
total_loss
.
numpy
()
/
total_sample
,
\
total_acc1
.
numpy
()
/
total_sample
,
total_acc5
.
numpy
()
/
total_sample
,
end_time
-
start_time
))
if
batch_id
==
10
:
if
to_static
:
fluid
.
dygraph
.
jit
.
save
(
resnet
,
MODEL_SAVE_PATH
)
else
:
fluid
.
dygraph
.
save_dygraph
(
resnet
.
state_dict
(),
DY_STATE_DICT_SAVE_PATH
)
# avoid dataloader throw abort signaal
data_loader
.
_reset
()
break
return
total_loss
.
numpy
()
def
predict_dygraph
(
data
):
program_translator
.
enable
(
False
)
with
fluid
.
dygraph
.
guard
(
place
):
resnet
=
ResNet
()
model_dict
,
_
=
fluid
.
dygraph
.
load_dygraph
(
DY_STATE_DICT_SAVE_PATH
)
resnet
.
set_dict
(
model_dict
)
resnet
.
eval
()
pred_res
=
resnet
(
fluid
.
dygraph
.
to_variable
(
data
))
return
pred_res
.
numpy
()
def
predict_static
(
data
):
exe
=
fluid
.
Executor
(
place
)
startup_prog
=
fluid
.
Program
()
main_prog
=
fluid
.
Program
()
[
inference_program
,
feed_target_names
,
fetch_targets
]
=
fluid
.
io
.
load_inference_model
(
MODEL_SAVE_PATH
,
executor
=
exe
,
params_filename
=
VARIABLE_FILENAME
)
with
fluid
.
program_guard
(
main_prog
,
startup_prog
):
pred_res
=
exe
.
run
(
inference_program
,
feed
=
{
feed_target_names
[
0
]:
data
},
fetch_list
=
fetch_targets
)
img
=
fluid
.
data
(
name
=
"img"
,
shape
=
[
None
,
3
,
224
,
224
],
dtype
=
"float32"
)
label
=
fluid
.
data
(
name
=
"label"
,
shape
=
[
None
,
1
],
dtype
=
"int64"
)
label
.
stop_gradient
=
True
resnet
=
ResNet
()
pred
,
avg_loss_
,
acc_top1_
,
acc_top5_
=
resnet
(
img
,
label
)
optimizer
=
optimizer_setting
(
parameter_list
=
resnet
.
parameters
())
optimizer
.
minimize
(
avg_loss_
)
exe
.
run
(
startup_prog
)
train_reader
=
paddle
.
batch
(
paddle
.
dataset
.
flowers
.
train
(
use_xmap
=
False
),
batch_size
=
batch_size
)
for
epoch
in
range
(
epoch_num
):
total_loss
=
0.0
total_acc1
=
0.0
total_acc5
=
0.0
total_sample
=
0
for
batch_id
,
data
in
enumerate
(
train_reader
()):
start_time
=
time
.
time
()
dy_x_data
=
np
.
array
(
[
x
[
0
].
reshape
(
3
,
224
,
224
)
for
x
in
data
]).
astype
(
'float32'
)
if
len
(
np
.
array
([
x
[
1
]
for
x
in
data
]).
astype
(
'int64'
))
!=
batch_size
:
continue
y_data
=
np
.
array
([
x
[
1
]
for
x
in
data
]).
astype
(
'int64'
).
reshape
(
-
1
,
1
)
avg_loss
,
acc_top1
,
acc_top5
=
exe
.
run
(
main_prog
,
feed
=
{
"img"
:
dy_x_data
,
"label"
:
y_data
},
fetch_list
=
[
avg_loss_
,
acc_top1_
,
acc_top5_
])
total_loss
+=
avg_loss
total_acc1
+=
acc_top1
total_acc5
+=
acc_top5
total_sample
+=
1
end_time
=
time
.
time
()
if
batch_id
%
2
==
0
:
print
(
"epoch %d | batch step %d, loss %0.3f, acc1 %0.3f, acc5 %0.3f, time %f"
%
\
(
epoch
,
batch_id
,
total_loss
/
total_sample
,
\
total_acc1
/
total_sample
,
total_acc5
/
total_sample
,
end_time
-
start_time
))
if
batch_id
==
10
:
break
return
pred_res
[
0
]
def
predict_dygraph_jit
(
data
):
with
fluid
.
dygraph
.
guard
(
place
):
resnet
=
fluid
.
dygraph
.
jit
.
load
(
MODEL_SAVE_PATH
)
resnet
.
eval
()
pred_res
=
resnet
(
data
)
return
pred_res
.
numpy
()
class
TestResnet
(
unittest
.
TestCase
):
def
test_in_static_mode
(
self
):
train_resnet_in_static_mode
()
def
train
(
self
,
to_static
):
program_translator
.
enable
(
to_static
)
return
train
(
to_static
)
def
verify_predict
(
self
):
image
=
np
.
random
.
random
([
1
,
3
,
224
,
224
]).
astype
(
'float32'
)
dy_pre
=
predict_dygraph
(
image
)
st_pre
=
predict_static
(
image
)
dy_jit_pre
=
predict_dygraph_jit
(
image
)
self
.
assertTrue
(
np
.
allclose
(
dy_pre
,
st_pre
),
msg
=
"dy_pre:
\n
{}
\n
, st_pre:
\n
{}."
.
format
(
dy_pre
,
st_pre
))
self
.
assertTrue
(
np
.
allclose
(
dy_jit_pre
,
st_pre
),
msg
=
"dy_jit_pre:
\n
{}
\n
, st_pre:
\n
{}."
.
format
(
dy_jit_pre
,
st_pre
))
def
test_resnet
(
self
):
static_loss
=
self
.
train
(
to_static
=
True
)
dygraph_loss
=
self
.
train
(
to_static
=
False
)
self
.
assertTrue
(
np
.
allclose
(
static_loss
,
dygraph_loss
),
msg
=
"static_loss: {}
\n
dygraph_loss: {}"
.
format
(
static_loss
,
dygraph_loss
))
self
.
verify_predict
()
if
__name__
==
'__main__'
:
...
...
python/paddle/fluid/tests/unittests/dygraph_to_static/test_se_resnet.py
浏览文件 @
36027490
...
...
@@ -24,6 +24,7 @@ from paddle.fluid.dygraph.base import to_variable
from
paddle.fluid.dygraph.nn
import
BatchNorm
,
Conv2D
,
Linear
,
Pool2D
from
paddle.fluid.dygraph
import
declarative
from
paddle.fluid.dygraph
import
ProgramTranslator
from
paddle.fluid.dygraph.io
import
VARIABLE_FILENAME
SEED
=
2020
np
.
random
.
seed
(
SEED
)
...
...
@@ -32,6 +33,8 @@ BATCH_SIZE = 8
EPOCH_NUM
=
1
PRINT_STEP
=
2
STEP_NUM
=
10
MODEL_SAVE_PATH
=
"./se_resnet.inference.model"
DY_STATE_DICT_SAVE_PATH
=
"./se_resnet.dygraph"
place
=
fluid
.
CUDAPlace
(
0
)
if
fluid
.
is_compiled_with_cuda
()
\
else
fluid
.
CPUPlace
()
...
...
@@ -377,11 +380,60 @@ def train(train_reader, to_static):
step_idx
+=
1
if
step_idx
==
STEP_NUM
:
if
to_static
:
configs
=
fluid
.
dygraph
.
jit
.
SaveLoadConfig
()
configs
.
output_spec
=
[
pred
]
fluid
.
dygraph
.
jit
.
save
(
se_resnext
,
MODEL_SAVE_PATH
,
[
img
],
configs
)
else
:
fluid
.
dygraph
.
save_dygraph
(
se_resnext
.
state_dict
(),
DY_STATE_DICT_SAVE_PATH
)
break
return
pred
.
numpy
(),
avg_loss
.
numpy
(),
acc_top1
.
numpy
(),
acc_top5
.
numpy
(
)
def
predict_dygraph
(
data
):
program_translator
=
ProgramTranslator
()
program_translator
.
enable
(
False
)
with
fluid
.
dygraph
.
guard
(
place
):
se_resnext
=
SeResNeXt
()
model_dict
,
_
=
fluid
.
dygraph
.
load_dygraph
(
DY_STATE_DICT_SAVE_PATH
)
se_resnext
.
set_dict
(
model_dict
)
se_resnext
.
eval
()
label
=
np
.
random
.
random
([
1
,
1
]).
astype
(
"int64"
)
img
=
fluid
.
dygraph
.
to_variable
(
data
)
label
=
fluid
.
dygraph
.
to_variable
(
label
)
pred_res
,
_
,
_
,
_
=
se_resnext
(
img
,
label
)
return
pred_res
.
numpy
()
def
predict_static
(
data
):
exe
=
fluid
.
Executor
(
place
)
[
inference_program
,
feed_target_names
,
fetch_targets
]
=
fluid
.
io
.
load_inference_model
(
MODEL_SAVE_PATH
,
executor
=
exe
,
params_filename
=
VARIABLE_FILENAME
)
pred_res
=
exe
.
run
(
inference_program
,
feed
=
{
feed_target_names
[
0
]:
data
},
fetch_list
=
fetch_targets
)
return
pred_res
[
0
]
def
predict_dygraph_jit
(
data
):
with
fluid
.
dygraph
.
guard
(
place
):
se_resnext
=
fluid
.
dygraph
.
jit
.
load
(
MODEL_SAVE_PATH
)
se_resnext
.
eval
()
pred_res
=
se_resnext
(
data
)
return
pred_res
.
numpy
()
class
TestSeResnet
(
unittest
.
TestCase
):
def
setUp
(
self
):
self
.
train_reader
=
paddle
.
batch
(
...
...
@@ -390,6 +442,18 @@ class TestSeResnet(unittest.TestCase):
batch_size
=
BATCH_SIZE
,
drop_last
=
True
)
def
verify_predict
(
self
):
image
=
np
.
random
.
random
([
1
,
3
,
224
,
224
]).
astype
(
'float32'
)
dy_pre
=
predict_dygraph
(
image
)
st_pre
=
predict_static
(
image
)
dy_jit_pre
=
predict_dygraph_jit
(
image
)
self
.
assertTrue
(
np
.
allclose
(
dy_pre
,
st_pre
),
msg
=
"dy_pre:
\n
{}
\n
, st_pre:
\n
{}."
.
format
(
dy_pre
,
st_pre
))
self
.
assertTrue
(
np
.
allclose
(
dy_jit_pre
,
st_pre
),
msg
=
"dy_jit_pre:
\n
{}
\n
, st_pre:
\n
{}."
.
format
(
dy_jit_pre
,
st_pre
))
def
test_check_result
(
self
):
pred_1
,
loss_1
,
acc1_1
,
acc5_1
=
train
(
self
.
train_reader
,
to_static
=
False
)
...
...
@@ -409,6 +473,8 @@ class TestSeResnet(unittest.TestCase):
np
.
allclose
(
acc5_1
,
acc5_2
),
msg
=
"static acc5: {}
\n
dygraph acc5: {}"
.
format
(
acc5_1
,
acc5_2
))
self
.
verify_predict
()
if
__name__
==
'__main__'
:
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录