Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
hapi
提交
36850c6c
H
hapi
项目概览
PaddlePaddle
/
hapi
通知
11
Star
2
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
4
列表
看板
标记
里程碑
合并请求
7
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
H
hapi
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
4
Issue
4
列表
看板
标记
里程碑
合并请求
7
合并请求
7
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
36850c6c
编写于
4月 29, 2020
作者:
L
LielinJiang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
refine code
上级
0539966b
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
148 addition
and
153 deletion
+148
-153
hapi/model.py
hapi/model.py
+2
-2
hapi/tests/dist_mnist.py
hapi/tests/dist_mnist.py
+8
-6
hapi/tests/test_distributed.py
hapi/tests/test_distributed.py
+8
-27
hapi/vision/models/resnet.py
hapi/vision/models/resnet.py
+1
-1
hapi/vision/transforms/functional.py
hapi/vision/transforms/functional.py
+1
-1
hapi/vision/transforms/transforms.py
hapi/vision/transforms/transforms.py
+128
-116
未找到文件。
hapi/model.py
浏览文件 @
36850c6c
...
...
@@ -1097,10 +1097,10 @@ class Model(fluid.dygraph.Layer):
batch_size (int): Integer number. The batch size of train_data and eval_data.
When train_data and eval_data are both the instance of Dataloader, this
argument will be ignored. Default: 1.
num_workers (int):
t
he number of subprocess to load data, 0 for no subprocess
num_workers (int):
T
he number of subprocess to load data, 0 for no subprocess
used and loading data in main process. When train_data and eval_data are
both the instance of Dataloader, this argument will be ignored. Default: 0.
stack_output (bool):
w
hether stack output field like a batch, as for an output
stack_output (bool):
W
hether stack output field like a batch, as for an output
filed of a sample is in shape [X, Y], test_data contains N samples, predict
output field will be in shape [N, X, Y] if stack_output is True, and will
be a length N list in shape [[X, Y], [X, Y], ....[X, Y]] if stack_outputs
...
...
hapi/tests/dist_mnist.py
浏览文件 @
36850c6c
...
...
@@ -48,7 +48,7 @@ class MnistDataset(MNIST):
return
len
(
self
.
images
)
def
get_predict
_accuracy
(
pred
,
gt
):
def
compute
_accuracy
(
pred
,
gt
):
pred
=
np
.
argmax
(
pred
,
-
1
)
gt
=
np
.
array
(
gt
)
...
...
@@ -58,7 +58,7 @@ def get_predict_accuracy(pred, gt):
class
TestModel
(
unittest
.
TestCase
):
def
fit
(
self
,
dynamic
):
def
run
(
self
,
dynamic
):
device
=
set_device
(
'gpu'
)
fluid
.
enable_dygraph
(
device
)
if
dynamic
else
None
...
...
@@ -74,7 +74,9 @@ class TestModel(unittest.TestCase):
model
=
LeNet
()
optim
=
fluid
.
optimizer
.
Momentum
(
learning_rate
=
0.01
,
momentum
=
.
9
,
parameter_list
=
model
.
parameters
())
learning_rate
=
0.001
,
momentum
=
.
9
,
parameter_list
=
model
.
parameters
())
loss
=
CrossEntropy
()
model
.
prepare
(
optim
,
loss
,
Accuracy
(),
inputs
,
labels
,
device
=
device
)
cbk
=
ProgBarLogger
(
50
)
...
...
@@ -92,15 +94,15 @@ class TestModel(unittest.TestCase):
np
.
testing
.
assert_equal
(
output
[
0
].
shape
[
0
],
len
(
test_dataset
))
acc
=
get_predict
_accuracy
(
output
[
0
],
val_dataset
.
labels
)
acc
=
compute
_accuracy
(
output
[
0
],
val_dataset
.
labels
)
np
.
testing
.
assert_allclose
(
acc
,
eval_result
[
'acc'
])
def
test_multiple_gpus_static
(
self
):
self
.
fit
(
False
)
self
.
run
(
False
)
def
test_multiple_gpus_dygraph
(
self
):
self
.
fit
(
True
)
self
.
run
(
True
)
if
__name__
==
'__main__'
:
...
...
hapi/tests/test_distributed.py
浏览文件 @
36850c6c
...
...
@@ -30,40 +30,21 @@ import paddle.distributed.cloud_utils as cloud_utils
def
get_cluster_from_args
(
selected_gpus
):
cluster_node_ips
=
'127.0.0.1'
node_ip
=
'127.0.0.1'
use_paddlecloud
=
False
started_port
=
None
node_ips
=
[
x
.
strip
()
for
x
in
cluster_node_ips
.
split
(
','
)]
node_
rank
=
node_
ips
.
index
(
node_ip
)
node_ips
.
index
(
node_ip
)
free_ports
=
None
if
not
use_paddlecloud
and
len
(
node_ips
)
<=
1
and
started_port
is
None
:
free_ports
=
find_free_ports
(
len
(
selected_gpus
))
if
free_ports
is
not
None
:
free_ports
=
list
(
free_ports
)
else
:
started_port
=
6070
free_ports
=
[
x
for
x
in
range
(
started_port
,
started_port
+
len
(
selected_gpus
))
]
free_ports
=
find_free_ports
(
len
(
selected_gpus
))
if
free_ports
is
not
None
:
free_ports
=
list
(
free_ports
)
return
get_cluster
(
node_ips
,
node_ip
,
free_ports
,
selected_gpus
)
def
get_gpus
(
selected_gpus
):
cuda_visible_devices
=
os
.
getenv
(
"CUDA_VISIBLE_DEVICES"
)
if
cuda_visible_devices
is
None
or
cuda_visible_devices
==
""
:
selected_gpus
=
[
x
.
strip
()
for
x
in
selected_gpus
.
split
(
','
)]
else
:
cuda_visible_devices_list
=
cuda_visible_devices
.
split
(
','
)
for
x
in
selected_gpus
.
split
(
','
):
assert
x
in
cuda_visible_devices_list
,
"Can't find "
\
"your selected_gpus %s in CUDA_VISIBLE_DEVICES[%s]."
\
%
(
x
,
cuda_visible_devices
)
selected_gpus
=
[
cuda_visible_devices_list
.
index
(
x
.
strip
())
for
x
in
selected_gpus
.
split
(
','
)
]
selected_gpus
=
[
x
.
strip
()
for
x
in
selected_gpus
.
split
(
','
)]
return
selected_gpus
...
...
@@ -94,7 +75,7 @@ def start_local_trainers(cluster,
print
(
"trainer proc env:{}"
.
format
(
current_env
))
cmd
=
"python -
m coverage run --branch -p
"
+
training_script
cmd
=
"python -
u
"
+
training_script
print
(
"start trainer proc:{} env:{}"
.
format
(
cmd
,
proc_env
))
...
...
hapi/vision/models/resnet.py
浏览文件 @
36850c6c
...
...
@@ -36,7 +36,7 @@ model_urls = {
'resnet34'
:
(
'https://paddle-hapi.bj.bcebos.com/models/resnet34.pdparams'
,
'46bc9f7c3dd2e55b7866285bee91eff3'
),
'resnet50'
:
(
'https://paddle-hapi.bj.bcebos.com/models/resnet50.pdparams'
,
'
0884c9087266496c41c60d14a96f8530
'
),
'
5ce890a9ad386df17cf7fe2313dca0a1
'
),
'resnet101'
:
(
'https://paddle-hapi.bj.bcebos.com/models/resnet101.pdparams'
,
'fb07a451df331e4b0bb861ed97c3a9b9'
),
...
...
hapi/vision/transforms/functional.py
浏览文件 @
36850c6c
...
...
@@ -35,7 +35,7 @@ def flip(image, code):
Args:
image: Input image, with (H, W, C) shape
code:
c
ode that indicates the type of flip.
code:
C
ode that indicates the type of flip.
-1 : Flip horizontally and vertically
0 : Flip vertically
1 : Flip horizontally
...
...
hapi/vision/transforms/transforms.py
浏览文件 @
36850c6c
...
...
@@ -61,7 +61,7 @@ class Compose(object):
together for a dataset transform.
Args:
transforms (list
of ``Transform`` objects): l
ist of transforms to compose.
transforms (list
): L
ist of transforms to compose.
Returns:
A compose object which is callable, __call__ for this Compose
...
...
@@ -115,8 +115,8 @@ class BatchCompose(object):
"""Composes several batch transforms together
Args:
transforms (list
of ``Transform`` objects): l
ist of transforms to compose.
these transforms perform on batch data.
transforms (list
): L
ist of transforms to compose.
these transforms perform on batch data.
Examples:
...
...
@@ -209,21 +209,22 @@ class Resize(object):
smaller edge of the image will be matched to this number.
i.e, if height > width, then image will be rescaled to
(size * height / width, size)
interpolation (int):
i
nterpolation mode of resize. Default: cv2.INTER_LINEAR.
interpolation (int):
I
nterpolation mode of resize. Default: cv2.INTER_LINEAR.
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, Resize
import numpy as np
transform = Compose([Resize(size=224)])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import Resize
for i in range(10):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = Resize(size=224)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
size
,
interpolation
=
cv2
.
INTER_LINEAR
):
...
...
@@ -251,15 +252,16 @@ class RandomResizedCrop(object):
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, Resize, RandomResizedCrop
import numpy as np
transform = Compose([Resize(500), RandomResizedCrop(224)])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import RandomResizedCrop
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = RandomResizedCrop(224)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
...
...
@@ -320,22 +322,23 @@ class CenterCropResize(object):
Args:
size (int|list|tuple): Target size of output image, with (height, width) shape.
crop_padding (int):
c
enter crop with the padding. Default: 32.
interpolation (int):
i
nterpolation mode of resize. Default: cv2.INTER_LINEAR.
crop_padding (int):
C
enter crop with the padding. Default: 32.
interpolation (int):
I
nterpolation mode of resize. Default: cv2.INTER_LINEAR.
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, Resize, CenterCropResize
import numpy as np
transform = Compose([Resize(500), CenterCropResize(224)])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import CenterCropResize
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = CenterCropResize(224)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
size
,
crop_padding
=
32
,
interpolation
=
cv2
.
INTER_LINEAR
):
...
...
@@ -370,15 +373,16 @@ class CenterCrop(object):
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, Resize, CenterCrop
import numpy as np
transform = Compose([Resize(500), CenterCrop(224)])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import CenterCrop
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = CenterCrop(224)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
output_size
):
...
...
@@ -405,21 +409,22 @@ class RandomHorizontalFlip(object):
"""Horizontally flip the input data randomly with a given probability.
Args:
prob (float):
p
robability of the input data being flipped. Default: 0.5
prob (float):
P
robability of the input data being flipped. Default: 0.5
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, RandomHorizontalFlip
import numpy as np
transform = Compose([RandomHorizontalFlip()])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import RandomHorizontalFlip
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = RandomHorizontalFlip(224)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
prob
=
0.5
):
...
...
@@ -435,21 +440,22 @@ class RandomVerticalFlip(object):
"""Vertically flip the input data randomly with a given probability.
Args:
prob (float):
p
robability of the input data being flipped. Default: 0.5
prob (float):
P
robability of the input data being flipped. Default: 0.5
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, RandomVerticalFlip
import numpy as np
transform = Compose([RandomVerticalFlip()])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import RandomVerticalFlip
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = RandomVerticalFlip(224)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
prob
=
0.5
):
...
...
@@ -475,18 +481,17 @@ class Normalize(object):
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, Normalize, Permute
import numpy as np
normalize = Normalize(mean=[123.675, 116.28, 103.53],
std=[58.395, 57.120, 57.375])
from hapi.vision.transforms import Normalize
transform = Compose([Permute(), normalize])
flowers = Flowers(mode='test', transform=transform
)
normalize = Normalize(mean=[0.5, 0.5, 0.5],
std=[0.5, 0.5, 0.5]
)
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
fake_img = np.random.rand(3, 500, 500).astype('float32')
fake_img = normalize(fake_img)
print(fake_img.shape)
"""
...
...
@@ -511,22 +516,23 @@ class Permute(object):
Input image should be HWC mode and an instance of numpy.ndarray.
Args:
mode: Output mode of input. Default: "CHW".
to_rgb
: c
onvert 'bgr' image to 'rgb'. Default: True.
mode
(str)
: Output mode of input. Default: "CHW".
to_rgb
(bool): C
onvert 'bgr' image to 'rgb'. Default: True.
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, Permute
import numpy as np
transform = Compose([Permute()])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import Permute
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = Permute()
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
mode
=
"CHW"
,
to_rgb
=
True
):
...
...
@@ -549,22 +555,23 @@ class GaussianNoise(object):
Gaussian noise is generated with given mean and std.
Args:
mean: Gaussian mean used to generate noise.
std: Gaussian standard deviation used to generate noise.
mean
(float)
: Gaussian mean used to generate noise.
std
(float)
: Gaussian standard deviation used to generate noise.
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, GaussianNoise
import numpy as np
transform = Compose([GaussianNoise()])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import GaussianNoise
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = GaussianNoise()
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
mean
=
0.0
,
std
=
1.0
):
...
...
@@ -582,22 +589,23 @@ class BrightnessTransform(object):
"""Adjust brightness of the image.
Args:
value: How much to adjust the brightness. Can be any
value
(float)
: How much to adjust the brightness. Can be any
non negative number. 0 gives the original image
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, BrightnessTransform
import numpy as np
transform = Compose([BrightnessTransform(0.4)])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import BrightnessTransform
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = BrightnessTransform(0.4)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
value
):
...
...
@@ -620,22 +628,23 @@ class ContrastTransform(object):
"""Adjust contrast of the image.
Args:
value: How much to adjust the contrast. Can be any
value
(float)
: How much to adjust the contrast. Can be any
non negative number. 0 gives the original image
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, ContrastTransform
import numpy as np
transform = Compose([ContrastTransform(0.4)])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import ContrastTransform
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = ContrastTransform(0.4)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
value
):
...
...
@@ -659,22 +668,23 @@ class SaturationTransform(object):
"""Adjust saturation of the image.
Args:
value: How much to adjust the saturation. Can be any
value
(float)
: How much to adjust the saturation. Can be any
non negative number. 0 gives the original image
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, SaturationTransform
import numpy as np
transform = Compose([SaturationTransform(0.4)])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import SaturationTransform
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = SaturationTransform(0.4)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
value
):
...
...
@@ -699,22 +709,23 @@ class HueTransform(object):
"""Adjust hue of the image.
Args:
value: How much to adjust the hue. Can be any number
value
(float)
: How much to adjust the hue. Can be any number
between 0 and 0.5, 0 gives the original image
Examples:
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, HueTransform
import numpy as np
transform = Compose([HueTransform(0.4)])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import HueTransform
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = HueTransform(0.4)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
value
):
...
...
@@ -761,15 +772,16 @@ class ColorJitter(object):
.. code-block:: python
from hapi.datasets import Flowers
from hapi.vision.transforms import Compose, ColorJitter
import numpy as np
transform = Compose([ColorJitter(0.4)])
flowers = Flowers(mode='test', transform=transform)
from hapi.vision.transforms import ColorJitter
for i in range(2):
sample = flowers[i]
print(sample[0].shape, sample[1])
transform = ColorJitter(0.4)
fake_img = np.random.rand(500, 500, 3).astype('float32')
fake_img = transform(fake_img)
print(fake_img.shape)
"""
def
__init__
(
self
,
brightness
=
0
,
contrast
=
0
,
saturation
=
0
,
hue
=
0
):
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录