Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
2094a584
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
2094a584
编写于
4月 27, 2022
作者:
Z
Zhang Ting
提交者:
GitHub
4月 27, 2022
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
implement autotune python API (#42299)
上级
cf780097
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
265 addition
and
16 deletion
+265
-16
python/paddle/fluid/tests/unittests/test_dataloader_autotune.py
.../paddle/fluid/tests/unittests/test_dataloader_autotune.py
+32
-5
python/paddle/fluid/tests/unittests/test_layout_autotune.py
python/paddle/fluid/tests/unittests/test_layout_autotune.py
+29
-3
python/paddle/fluid/tests/unittests/test_switch_autotune.py
python/paddle/fluid/tests/unittests/test_switch_autotune.py
+46
-8
python/paddle/incubate/__init__.py
python/paddle/incubate/__init__.py
+1
-0
python/paddle/incubate/autotune.py
python/paddle/incubate/autotune.py
+157
-0
未找到文件。
python/paddle/fluid/tests/unittests/test_dataloader_autotune.py
浏览文件 @
2094a584
...
...
@@ -15,12 +15,14 @@
from
__future__
import
print_function
import
unittest
import
numpy
as
np
import
tempfile
import
warnings
import
json
import
paddle
import
paddle.nn
as
nn
from
paddle.io
import
Dataset
,
DataLoader
,
BatchSampler
,
SequenceSampler
from
paddle.fluid.reader
import
set_autotune_config
import
sys
import
os
class
RandomDataset
(
Dataset
):
...
...
@@ -51,12 +53,21 @@ class TestAutoTune(unittest.TestCase):
self
.
dataset
=
RandomDataset
(
10
)
def
test_dataloader_use_autotune
(
self
):
set_autotune_config
(
True
,
1
)
paddle
.
incubate
.
autotune
.
set_config
(
config
=
{
"dataloader"
:
{
"enable"
:
True
,
"tuning_steps"
:
1
,
}})
loader
=
DataLoader
(
self
.
dataset
,
batch_size
=
self
.
batch_size
,
num_workers
=
0
)
def
test_dataloader_disable_autotune
(
self
):
set_autotune_config
(
False
)
config
=
{
"dataloader"
:
{
"enable"
:
False
,
"tuning_steps"
:
1
}}
tfile
=
tempfile
.
NamedTemporaryFile
(
mode
=
"w+"
,
delete
=
False
)
json
.
dump
(
config
,
tfile
)
tfile
.
close
()
paddle
.
incubate
.
autotune
.
set_config
(
tfile
.
name
)
os
.
remove
(
tfile
.
name
)
loader
=
DataLoader
(
self
.
dataset
,
batch_size
=
self
.
batch_size
,
num_workers
=
2
)
if
(
sys
.
platform
==
'darwin'
or
sys
.
platform
==
'win32'
):
...
...
@@ -65,12 +76,28 @@ class TestAutoTune(unittest.TestCase):
self
.
assertEqual
(
loader
.
num_workers
,
2
)
def
test_distributer_batch_sampler_autotune
(
self
):
set_autotune_config
(
True
,
1
)
paddle
.
incubate
.
autotune
.
set_config
(
config
=
{
"dataloader"
:
{
"enable"
:
True
,
"tuning_steps"
:
1
,
}})
batch_sampler
=
paddle
.
io
.
DistributedBatchSampler
(
self
.
dataset
,
batch_size
=
self
.
batch_size
)
loader
=
DataLoader
(
self
.
dataset
,
batch_sampler
=
batch_sampler
,
num_workers
=
2
)
class
TestAutoTuneAPI
(
unittest
.
TestCase
):
def
test_set_config_warnings
(
self
):
with
warnings
.
catch_warnings
(
record
=
True
)
as
w
:
config
=
{
"kernel"
:
{
"enable"
:
1
,
"tuning_range"
:
True
}}
tfile
=
tempfile
.
NamedTemporaryFile
(
mode
=
"w+"
,
delete
=
False
)
json
.
dump
(
config
,
tfile
)
tfile
.
close
()
paddle
.
incubate
.
autotune
.
set_config
(
tfile
.
name
)
os
.
remove
(
tfile
.
name
)
self
.
assertTrue
(
len
(
w
)
==
2
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/test_layout_autotune.py
浏览文件 @
2094a584
...
...
@@ -16,6 +16,10 @@ import paddle
import
unittest
import
numpy
import
paddle.nn.functional
as
F
import
tempfile
import
warnings
import
json
import
os
class
SimpleNet
(
paddle
.
nn
.
Layer
):
...
...
@@ -41,10 +45,18 @@ class SimpleNet(paddle.nn.Layer):
class
LayoutAutoTune
(
unittest
.
TestCase
):
def
use_autoune
(
self
):
if
paddle
.
is_compiled_with_cuda
():
paddle
.
fluid
.
core
.
enable_layout_autotune
()
paddle
.
incubate
.
autotune
.
set_config
(
config
=
{
"layout"
:
{
"enable"
:
True
}})
return
paddle
.
fluid
.
core
.
use_layout_autotune
()
else
:
paddle
.
fluid
.
core
.
disable_layout_autotune
()
config
=
{
"layout"
:
{
"enable"
:
False
}}
tfile
=
tempfile
.
NamedTemporaryFile
(
mode
=
"w+"
,
delete
=
False
)
json
.
dump
(
config
,
tfile
)
tfile
.
close
()
paddle
.
incubate
.
autotune
.
set_config
(
tfile
.
name
)
os
.
remove
(
tfile
.
name
)
return
paddle
.
fluid
.
core
.
use_layout_autotune
()
def
train
(
self
,
data_format
):
...
...
@@ -103,7 +115,6 @@ class LayoutAutoTune(unittest.TestCase):
def
test_flatten_op_transposer
(
self
):
if
not
self
.
use_autoune
():
return
paddle
.
fluid
.
core
.
enable_layout_autotune
()
conv
=
paddle
.
nn
.
Conv2D
(
3
,
8
,
(
3
,
3
))
flatten
=
paddle
.
nn
.
Flatten
(
start_axis
=
1
,
stop_axis
=
2
)
data
=
paddle
.
rand
([
1
,
3
,
16
,
14
])
...
...
@@ -119,5 +130,20 @@ class LayoutAutoTune(unittest.TestCase):
self
.
assertEqual
(
out
.
shape
,
[
1
,
112
,
12
])
class
TestAutoTuneAPI
(
unittest
.
TestCase
):
def
test_set_config_warnings
(
self
):
with
warnings
.
catch_warnings
(
record
=
True
)
as
w
:
config
=
{
"layout"
:
{
"enable"
:
1
}}
# On linux, we can open the file again to read the content
# without closing the file, but on windows system, there is
# no permission to open it again without closing it.
tfile
=
tempfile
.
NamedTemporaryFile
(
mode
=
"w+"
,
delete
=
False
)
json
.
dump
(
config
,
tfile
)
tfile
.
close
()
paddle
.
incubate
.
autotune
.
set_config
(
tfile
.
name
)
os
.
remove
(
tfile
.
name
)
self
.
assertTrue
(
len
(
w
)
==
1
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/test_switch_autotune.py
浏览文件 @
2094a584
...
...
@@ -15,6 +15,10 @@
import
paddle
import
unittest
import
numpy
as
np
import
tempfile
import
warnings
import
json
import
os
class
SimpleNet
(
paddle
.
nn
.
Layer
):
...
...
@@ -73,10 +77,13 @@ class TestAutoTune(unittest.TestCase):
return
expected_res
def
test_autotune
(
self
):
paddle
.
fluid
.
core
.
disable_autotune
()
paddle
.
incubate
.
autotune
.
set_config
(
config
=
{
"kernel"
:
{
"enable"
:
False
}})
self
.
assertEqual
(
self
.
get_flags
(
"FLAGS_use_autotune"
),
False
)
paddle
.
fluid
.
core
.
enable_autotune
(
)
paddle
.
incubate
.
autotune
.
set_config
(
config
=
{
"kernel"
:
{
"enable"
:
True
}}
)
self
.
assertEqual
(
self
.
get_flags
(
"FLAGS_use_autotune"
),
True
)
def
check_status
(
self
,
expected_res
):
...
...
@@ -93,10 +100,16 @@ class TestDygraphAutoTuneStatus(TestAutoTune):
def
run_program
(
self
,
enable_autotune
):
self
.
set_flags
(
enable_autotune
)
if
enable_autotune
:
paddle
.
fluid
.
core
.
enable_autotune
()
paddle
.
incubate
.
autotune
.
set_config
(
config
=
{
"kernel"
:
{
"enable"
:
True
,
"tuning_range"
:
[
1
,
2
]
}})
else
:
paddle
.
fluid
.
core
.
disable_autotune
()
paddle
.
fluid
.
core
.
set_autotune_range
(
1
,
2
)
paddle
.
incubate
.
autotune
.
set_config
(
config
=
{
"kernel"
:
{
"enable"
:
False
}})
x_var
=
paddle
.
uniform
((
1
,
1
,
8
,
8
),
dtype
=
'float32'
,
min
=-
1.
,
max
=
1.
)
net
=
SimpleNet
()
for
i
in
range
(
3
):
...
...
@@ -141,10 +154,18 @@ class TestStaticAutoTuneStatus(TestAutoTune):
self
.
set_flags
(
enable_autotune
)
if
enable_autotune
:
paddle
.
fluid
.
core
.
enable_autotune
()
config
=
{
"kernel"
:
{
"enable"
:
True
,
"tuning_range"
:
[
1
,
2
]}}
tfile
=
tempfile
.
NamedTemporaryFile
(
mode
=
"w+"
,
delete
=
False
)
json
.
dump
(
config
,
tfile
)
tfile
.
close
()
paddle
.
incubate
.
autotune
.
set_config
(
tfile
.
name
)
os
.
remove
(
tfile
.
name
)
else
:
paddle
.
fluid
.
core
.
disable_autotune
()
paddle
.
fluid
.
core
.
set_autotune_range
(
1
,
2
)
paddle
.
incubate
.
autotune
.
set_config
(
config
=
{
"kernel"
:
{
"enable"
:
False
,
"tuning_range"
:
[
1
,
2
]
}})
for
i
in
range
(
3
):
exe
.
run
(
program
=
main_program
,
feed
=
{
'X'
:
x
},
fetch_list
=
[
loss
])
...
...
@@ -166,5 +187,22 @@ class TestStaticAutoTuneStatus(TestAutoTune):
self
.
func_disable_autotune
()
class
TestAutoTuneAPI
(
unittest
.
TestCase
):
def
test_set_config_warnings
(
self
):
with
warnings
.
catch_warnings
(
record
=
True
)
as
w
:
config
=
{
"kernel"
:
{
"enable"
:
1
,
"tuning_range"
:
1
}}
tfile
=
tempfile
.
NamedTemporaryFile
(
mode
=
"w+"
,
delete
=
False
)
json
.
dump
(
config
,
tfile
)
tfile
.
close
()
paddle
.
incubate
.
autotune
.
set_config
(
tfile
.
name
)
os
.
remove
(
tfile
.
name
)
self
.
assertTrue
(
len
(
w
)
==
2
)
def
test_set_config_attr
(
self
):
paddle
.
incubate
.
autotune
.
set_config
(
config
=
None
)
self
.
assertEqual
(
paddle
.
get_flags
(
"FLAGS_use_autotune"
)[
"FLAGS_use_autotune"
],
True
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle/incubate/__init__.py
浏览文件 @
2094a584
...
...
@@ -29,6 +29,7 @@ from .tensor import segment_max
from
.tensor
import
segment_min
from
.passes
import
fuse_resnet_unit_pass
import
paddle.incubate.autograd
import
paddle.incubate.autotune
from
.
import
nn
#noqa: F401
...
...
python/paddle/incubate/autotune.py
0 → 100644
浏览文件 @
2094a584
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import
paddle
import
json
import
warnings
from
paddle.fluid
import
core
__all__
=
[
'set_config'
]
def
set_config
(
config
=
None
):
r
"""
Set the configuration for kernel, layout and dataloader auto-tuning.
1. kernel: When it is enabled, exhaustive search method will be used to select
and cache the best algorithm for the operator in the tuning iteration. Tuning
parameters are as follows:
- enable(bool): Whether to enable kernel tuning.
- tuning_range(list): Start and end iteration for auto-tuning. Default: [1, 10].
2. layout: When it is enabled, the best data layout such as NCHW or NHWC will be
determined based on the device and data type. When the origin layout setting is
not best, layout transformation will be automaticly performed to improve model
performance. Layout auto-tuning only supports dygraph mode currently. Tuning
parameters are as follows:
- enable(bool): Whether to enable layout tuning.
3. dataloader: When it is enabled, the best num_workers will be selected to replace
the origin dataloader setting. Tuning parameters are as follows:
- enable(bool): Whether to enable dataloader tuning.
Args:
config (dict|str|None, optional): Configuration for auto-tuning. If it is a
dictionary, the key is the tuning type, and the value is a dictionary
of the corresponding tuning parameters. If it is a string, the path of
a json file will be specified and the tuning configuration will be set
by the the json file. Default: None, auto-tuning for kernel, layout and
dataloader will be enabled.
Examples:
.. code-block:: python
:name: auto-tuning
import paddle
import json
# config is a dict.
config = {
"kernel": {
"enable": True,
"tuning_range": [1, 5],
},
"layout": {
"enable": True,
},
"dataloader": {
"enable": True,
}
}
paddle.incubate.autotune.set_config(config)
# config is the path of json file.
config_json = json.dumps(config)
with open('config.json', 'w') as json_file:
json_file.write(config_json)
paddle.incubate.autotune.set_config('config.json')
"""
if
config
is
None
:
core
.
enable_autotune
()
core
.
enable_layout_autotune
()
paddle
.
fluid
.
reader
.
set_autotune_config
(
use_autotune
=
True
)
return
config_dict
=
{}
if
isinstance
(
config
,
dict
):
config_dict
=
config
elif
isinstance
(
config
,
str
):
try
:
with
open
(
config
,
'r'
)
as
filehandle
:
config_dict
=
json
.
load
(
filehandle
)
except
Exception
as
e
:
print
(
'Load config error: {}'
.
format
(
e
))
warnings
.
warn
(
"Use default configuration for auto-tuning."
)
if
"kernel"
in
config_dict
:
kernel_config
=
config_dict
[
"kernel"
]
if
"enable"
in
kernel_config
:
if
isinstance
(
kernel_config
[
'enable'
],
bool
):
if
kernel_config
[
'enable'
]:
core
.
enable_autotune
()
else
:
core
.
disable_autotune
()
else
:
warnings
.
warn
(
"The auto-tuning configuration of the kernel is incorrect."
"The `enable` should be bool. Use default parameter instead."
)
if
"tuning_range"
in
kernel_config
:
if
isinstance
(
kernel_config
[
'tuning_range'
],
list
):
tuning_range
=
kernel_config
[
'tuning_range'
]
assert
len
(
tuning_range
)
==
2
core
.
set_autotune_range
(
tuning_range
[
0
],
tuning_range
[
1
])
else
:
warnings
.
warn
(
"The auto-tuning configuration of the kernel is incorrect."
"The `tuning_range` should be list. Use default parameter instead."
)
if
"layout"
in
config_dict
:
layout_config
=
config_dict
[
"layout"
]
if
"enable"
in
layout_config
:
if
isinstance
(
layout_config
[
'enable'
],
bool
):
if
layout_config
[
'enable'
]:
core
.
enable_layout_autotune
()
else
:
core
.
disable_layout_autotune
()
else
:
warnings
.
warn
(
"The auto-tuning configuration of the layout is incorrect."
"The `enable` should be bool. Use default parameter instead."
)
if
"dataloader"
in
config_dict
:
dataloader_config
=
config_dict
[
"dataloader"
]
use_autoune
=
False
if
"enable"
in
dataloader_config
:
if
isinstance
(
dataloader_config
[
'enable'
],
bool
):
use_autoune
=
dataloader_config
[
'enable'
]
else
:
warnings
.
warn
(
"The auto-tuning configuration of the dataloader is incorrect."
"The `enable` should be bool. Use default parameter instead."
)
if
"tuning_steps"
in
dataloader_config
:
if
isinstance
(
dataloader_config
[
'tuning_steps'
],
int
):
paddle
.
fluid
.
reader
.
set_autotune_config
(
use_autoune
,
dataloader_config
[
'tuning_steps'
])
else
:
warnings
.
warn
(
"The auto-tuning configuration of the dataloader is incorrect."
"The `tuning_steps` should be int. Use default parameter instead."
)
paddle
.
fluid
.
reader
.
set_autotune_config
(
use_autoune
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录