Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
hapi
提交
3e8128cf
H
hapi
项目概览
PaddlePaddle
/
hapi
通知
11
Star
2
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
4
列表
看板
标记
里程碑
合并请求
7
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
H
hapi
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
4
Issue
4
列表
看板
标记
里程碑
合并请求
7
合并请求
7
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
提交
3e8128cf
编写于
1月 06, 2020
作者:
Y
Yang Zhang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Refactor `resnet` demo
上级
1faf669a
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
102 addition
and
102 deletion
+102
-102
resnet.py
resnet.py
+102
-102
未找到文件。
resnet.py
浏览文件 @
3e8128cf
...
...
@@ -27,88 +27,11 @@ import paddle
import
paddle.fluid
as
fluid
from
paddle.fluid.layer_helper
import
LayerHelper
from
paddle.fluid.dygraph.nn
import
Conv2D
,
Pool2D
,
BatchNorm
,
Linear
from
paddle.fluid.dygraph.container
import
Sequential
from
model
import
Model
,
CrossEntropy
def
center_crop_resize
(
img
):
h
,
w
=
img
.
shape
[:
2
]
c
=
int
(
224
/
256
*
min
((
h
,
w
)))
i
=
(
h
+
1
-
c
)
//
2
j
=
(
w
+
1
-
c
)
//
2
img
=
img
[
i
:
i
+
c
,
j
:
j
+
c
,
:]
return
cv2
.
resize
(
img
,
(
224
,
224
),
0
,
0
,
cv2
.
INTER_LINEAR
)
def
random_crop_resize
(
img
):
height
,
width
=
img
.
shape
[:
2
]
area
=
height
*
width
for
attempt
in
range
(
10
):
target_area
=
random
.
uniform
(
0.08
,
1.
)
*
area
log_ratio
=
(
math
.
log
(
3
/
4
),
math
.
log
(
4
/
3
))
aspect_ratio
=
math
.
exp
(
random
.
uniform
(
*
log_ratio
))
w
=
int
(
round
(
math
.
sqrt
(
target_area
*
aspect_ratio
)))
h
=
int
(
round
(
math
.
sqrt
(
target_area
/
aspect_ratio
)))
if
w
<=
width
and
h
<=
height
:
i
=
random
.
randint
(
0
,
height
-
h
)
j
=
random
.
randint
(
0
,
width
-
w
)
img
=
img
[
i
:
i
+
h
,
j
:
j
+
w
,
:]
return
cv2
.
resize
(
img
,
(
224
,
224
),
0
,
0
,
cv2
.
INTER_LINEAR
)
return
center_crop_resize
(
img
)
def
random_flip
(
img
):
return
img
[:,
::
-
1
,
:]
def
normalize_permute
(
img
):
# transpose and convert to RGB from BGR
img
=
img
.
astype
(
np
.
float32
).
transpose
((
2
,
0
,
1
))[::
-
1
,
...]
mean
=
np
.
array
([
123.675
,
116.28
,
103.53
],
dtype
=
np
.
float32
)
std
=
np
.
array
([
58.395
,
57.120
,
57.375
],
dtype
=
np
.
float32
)
invstd
=
1.
/
std
for
v
,
m
,
s
in
zip
(
img
,
mean
,
invstd
):
v
.
__isub__
(
m
).
__imul__
(
s
)
return
img
def
compose
(
functions
):
def
process
(
sample
):
img
,
label
=
sample
for
fn
in
functions
:
img
=
fn
(
img
)
return
img
,
label
return
process
def
image_folder
(
path
,
shuffle
=
False
):
valid_ext
=
(
'.jpg'
,
'.jpeg'
,
'.png'
,
'.ppm'
,
'.bmp'
,
'.webp'
)
classes
=
[
d
for
d
in
os
.
listdir
(
path
)
if
os
.
path
.
isdir
(
os
.
path
.
join
(
path
,
d
))]
classes
.
sort
()
class_map
=
{
cls
:
idx
for
idx
,
cls
in
enumerate
(
classes
)}
samples
=
[]
for
dir
in
sorted
(
class_map
.
keys
()):
d
=
os
.
path
.
join
(
path
,
dir
)
for
root
,
_
,
fnames
in
sorted
(
os
.
walk
(
d
)):
for
fname
in
sorted
(
fnames
):
p
=
os
.
path
.
join
(
root
,
fname
)
if
os
.
path
.
splitext
(
p
)[
1
].
lower
()
in
valid_ext
:
samples
.
append
((
p
,
class_map
[
dir
]))
if
shuffle
:
random
.
shuffle
(
samples
)
def
iterator
():
for
s
in
samples
:
yield
s
return
iterator
class
ConvBNLayer
(
fluid
.
dygraph
.
Layer
):
def
__init__
(
self
,
num_channels
,
...
...
@@ -204,8 +127,8 @@ class ResNet(Model):
layer_config
.
keys
(),
depth
)
layers
=
layer_config
[
depth
]
num_
channels
=
[
64
,
256
,
512
,
1024
]
num_
filters
=
[
64
,
128
,
256
,
512
]
num_
in
=
[
64
,
256
,
512
,
1024
]
num_
out
=
[
64
,
128
,
256
,
512
]
self
.
conv
=
ConvBNLayer
(
num_channels
=
3
,
...
...
@@ -219,26 +142,28 @@ class ResNet(Model):
pool_padding
=
1
,
pool_type
=
'max'
)
self
.
blocks
=
[]
for
b
in
range
(
len
(
layers
)):
self
.
layers
=
[]
for
idx
,
num_blocks
in
enumerate
(
layers
):
blocks
=
[]
shortcut
=
False
for
i
in
range
(
layers
[
b
]):
block
=
self
.
add_sublayer
(
'layer_{}_{}'
.
format
(
b
,
i
),
BottleneckBlock
(
num_channels
=
num_channels
[
b
]
if
i
==
0
else
num_filters
[
b
]
*
4
,
num_filters
=
num_filters
[
b
],
stride
=
2
if
i
==
0
and
b
!=
0
else
1
,
shortcut
=
shortcut
))
self
.
blocks
.
append
(
block
)
for
b
in
range
(
num_blocks
):
block
=
BottleneckBlock
(
num_channels
=
num_in
[
idx
]
if
b
==
0
else
num_out
[
idx
]
*
4
,
num_filters
=
num_out
[
idx
],
stride
=
2
if
b
==
0
and
idx
!=
0
else
1
,
shortcut
=
shortcut
)
blocks
.
append
(
block
)
shortcut
=
True
layer
=
self
.
add_sublayer
(
"layer_{}"
.
format
(
idx
),
Sequential
(
*
blocks
))
self
.
layers
.
append
(
layer
)
self
.
global_pool
=
Pool2D
(
pool_size
=
7
,
pool_type
=
'avg'
,
global_pooling
=
True
)
stdv
=
1.0
/
math
.
sqrt
(
2048
*
1.0
)
self
.
fc_input_dim
=
num_
filters
[
len
(
num_filters
)
-
1
]
*
4
*
1
*
1
self
.
fc_input_dim
=
num_
out
[
-
1
]
*
4
*
1
*
1
self
.
fc
=
Linear
(
self
.
fc_input_dim
,
num_classes
,
act
=
'softmax'
,
...
...
@@ -249,8 +174,8 @@ class ResNet(Model):
def
forward
(
self
,
inputs
):
x
=
self
.
conv
(
inputs
)
x
=
self
.
pool
(
x
)
for
block
in
self
.
block
s
:
x
=
block
(
x
)
for
layer
in
self
.
layer
s
:
x
=
layer
(
x
)
x
=
self
.
global_pool
(
x
)
x
=
fluid
.
layers
.
reshape
(
x
,
shape
=
[
-
1
,
self
.
fc_input_dim
])
x
=
self
.
fc
(
x
)
...
...
@@ -289,11 +214,88 @@ def accuracy(pred, label, topk=(1, )):
return
res
def
center_crop_resize
(
img
):
h
,
w
=
img
.
shape
[:
2
]
c
=
int
(
224
/
256
*
min
((
h
,
w
)))
i
=
(
h
+
1
-
c
)
//
2
j
=
(
w
+
1
-
c
)
//
2
img
=
img
[
i
:
i
+
c
,
j
:
j
+
c
,
:]
return
cv2
.
resize
(
img
,
(
224
,
224
),
0
,
0
,
cv2
.
INTER_LINEAR
)
def
random_crop_resize
(
img
):
height
,
width
=
img
.
shape
[:
2
]
area
=
height
*
width
for
attempt
in
range
(
10
):
target_area
=
random
.
uniform
(
0.08
,
1.
)
*
area
log_ratio
=
(
math
.
log
(
3
/
4
),
math
.
log
(
4
/
3
))
aspect_ratio
=
math
.
exp
(
random
.
uniform
(
*
log_ratio
))
w
=
int
(
round
(
math
.
sqrt
(
target_area
*
aspect_ratio
)))
h
=
int
(
round
(
math
.
sqrt
(
target_area
/
aspect_ratio
)))
if
w
<=
width
and
h
<=
height
:
i
=
random
.
randint
(
0
,
height
-
h
)
j
=
random
.
randint
(
0
,
width
-
w
)
img
=
img
[
i
:
i
+
h
,
j
:
j
+
w
,
:]
return
cv2
.
resize
(
img
,
(
224
,
224
),
0
,
0
,
cv2
.
INTER_LINEAR
)
return
center_crop_resize
(
img
)
def
random_flip
(
img
):
return
img
[:,
::
-
1
,
:]
def
normalize_permute
(
img
):
# transpose and convert to RGB from BGR
img
=
img
.
astype
(
np
.
float32
).
transpose
((
2
,
0
,
1
))[::
-
1
,
...]
mean
=
np
.
array
([
123.675
,
116.28
,
103.53
],
dtype
=
np
.
float32
)
std
=
np
.
array
([
58.395
,
57.120
,
57.375
],
dtype
=
np
.
float32
)
invstd
=
1.
/
std
for
v
,
m
,
s
in
zip
(
img
,
mean
,
invstd
):
v
.
__isub__
(
m
).
__imul__
(
s
)
return
img
def
compose
(
functions
):
def
process
(
sample
):
img
,
label
=
sample
for
fn
in
functions
:
img
=
fn
(
img
)
return
img
,
label
return
process
def
image_folder
(
path
,
shuffle
=
False
):
valid_ext
=
(
'.jpg'
,
'.jpeg'
,
'.png'
,
'.ppm'
,
'.bmp'
,
'.webp'
)
classes
=
[
d
for
d
in
os
.
listdir
(
path
)
if
os
.
path
.
isdir
(
os
.
path
.
join
(
path
,
d
))]
classes
.
sort
()
class_map
=
{
cls
:
idx
for
idx
,
cls
in
enumerate
(
classes
)}
samples
=
[]
for
dir
in
sorted
(
class_map
.
keys
()):
d
=
os
.
path
.
join
(
path
,
dir
)
for
root
,
_
,
fnames
in
sorted
(
os
.
walk
(
d
)):
for
fname
in
sorted
(
fnames
):
p
=
os
.
path
.
join
(
root
,
fname
)
if
os
.
path
.
splitext
(
p
)[
1
].
lower
()
in
valid_ext
:
samples
.
append
((
p
,
class_map
[
dir
]))
def
iterator
():
if
shuffle
:
random
.
shuffle
(
samples
)
for
s
in
samples
:
yield
s
return
iterator
def
run
(
model
,
loader
,
mode
=
'train'
):
total_loss
=
0.0
total_acc1
=
0.0
total_acc5
=
0.0
num_steps
=
0
device_ids
=
list
(
range
(
FLAGS
.
num_devices
))
for
idx
,
batch
in
enumerate
(
loader
()):
outputs
,
losses
=
getattr
(
model
,
mode
)(
...
...
@@ -303,12 +305,10 @@ def run(model, loader, mode='train'):
total_loss
+=
np
.
sum
(
losses
)
total_acc1
+=
top1
total_acc5
+=
top5
num_steps
+=
1
if
idx
%
10
==
0
:
print
(
"{:04d}: loss {:0.3f} top1: {:0.3f}% top5: {:0.3f}%"
.
format
(
idx
,
total_loss
/
num_steps
,
total_acc1
/
num_steps
,
total_acc5
/
num_steps
))
num_steps
+=
1
idx
,
total_loss
/
(
idx
+
1
),
total_acc1
/
(
idx
+
1
),
total_acc5
/
(
idx
+
1
)))
def
main
():
...
...
@@ -357,8 +357,8 @@ def main():
with
guard
:
model
=
ResNet
()
sgd
=
make_optimizer
(
parameter_list
=
model
.
parameters
())
model
.
prepare
(
sgd
,
CrossEntropy
())
optim
=
make_optimizer
(
parameter_list
=
model
.
parameters
())
model
.
prepare
(
optim
,
CrossEntropy
())
for
e
in
range
(
epoch
):
print
(
"======== train epoch {} ========"
.
format
(
e
))
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录