diff --git a/python/paddle/fluid/tests/unittests/test_dataloader_autotune.py b/python/paddle/fluid/tests/unittests/test_dataloader_autotune.py index a140bb5c79c93b2de2b65a7c65643b3abe376c0f..7348783bd6748f8e37766ab5d685bbaea4aacd77 100755 --- a/python/paddle/fluid/tests/unittests/test_dataloader_autotune.py +++ b/python/paddle/fluid/tests/unittests/test_dataloader_autotune.py @@ -15,12 +15,14 @@ from __future__ import print_function import unittest import numpy as np - +import tempfile +import warnings +import json import paddle import paddle.nn as nn from paddle.io import Dataset, DataLoader, BatchSampler, SequenceSampler -from paddle.fluid.reader import set_autotune_config import sys +import os class RandomDataset(Dataset): @@ -51,12 +53,21 @@ class TestAutoTune(unittest.TestCase): self.dataset = RandomDataset(10) def test_dataloader_use_autotune(self): - set_autotune_config(True, 1) + paddle.incubate.autotune.set_config( + config={"dataloader": { + "enable": True, + "tuning_steps": 1, + }}) loader = DataLoader( self.dataset, batch_size=self.batch_size, num_workers=0) def test_dataloader_disable_autotune(self): - set_autotune_config(False) + config = {"dataloader": {"enable": False, "tuning_steps": 1}} + tfile = tempfile.NamedTemporaryFile(mode="w+", delete=False) + json.dump(config, tfile) + tfile.close() + paddle.incubate.autotune.set_config(tfile.name) + os.remove(tfile.name) loader = DataLoader( self.dataset, batch_size=self.batch_size, num_workers=2) if (sys.platform == 'darwin' or sys.platform == 'win32'): @@ -65,12 +76,28 @@ class TestAutoTune(unittest.TestCase): self.assertEqual(loader.num_workers, 2) def test_distributer_batch_sampler_autotune(self): - set_autotune_config(True, 1) + paddle.incubate.autotune.set_config( + config={"dataloader": { + "enable": True, + "tuning_steps": 1, + }}) batch_sampler = paddle.io.DistributedBatchSampler( self.dataset, batch_size=self.batch_size) loader = DataLoader( self.dataset, batch_sampler=batch_sampler, num_workers=2) +class TestAutoTuneAPI(unittest.TestCase): + def test_set_config_warnings(self): + with warnings.catch_warnings(record=True) as w: + config = {"kernel": {"enable": 1, "tuning_range": True}} + tfile = tempfile.NamedTemporaryFile(mode="w+", delete=False) + json.dump(config, tfile) + tfile.close() + paddle.incubate.autotune.set_config(tfile.name) + os.remove(tfile.name) + self.assertTrue(len(w) == 2) + + if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/test_layout_autotune.py b/python/paddle/fluid/tests/unittests/test_layout_autotune.py index c71ff4381028df3ebb5c9a7c4509598510ce7040..a1440f8587ab6bcf78aa0ce74004f2eae906d203 100644 --- a/python/paddle/fluid/tests/unittests/test_layout_autotune.py +++ b/python/paddle/fluid/tests/unittests/test_layout_autotune.py @@ -16,6 +16,10 @@ import paddle import unittest import numpy import paddle.nn.functional as F +import tempfile +import warnings +import json +import os class SimpleNet(paddle.nn.Layer): @@ -41,10 +45,18 @@ class SimpleNet(paddle.nn.Layer): class LayoutAutoTune(unittest.TestCase): def use_autoune(self): if paddle.is_compiled_with_cuda(): - paddle.fluid.core.enable_layout_autotune() + paddle.incubate.autotune.set_config( + config={"layout": { + "enable": True + }}) return paddle.fluid.core.use_layout_autotune() else: - paddle.fluid.core.disable_layout_autotune() + config = {"layout": {"enable": False}} + tfile = tempfile.NamedTemporaryFile(mode="w+", delete=False) + json.dump(config, tfile) + tfile.close() + paddle.incubate.autotune.set_config(tfile.name) + os.remove(tfile.name) return paddle.fluid.core.use_layout_autotune() def train(self, data_format): @@ -103,7 +115,6 @@ class LayoutAutoTune(unittest.TestCase): def test_flatten_op_transposer(self): if not self.use_autoune(): return - paddle.fluid.core.enable_layout_autotune() conv = paddle.nn.Conv2D(3, 8, (3, 3)) flatten = paddle.nn.Flatten(start_axis=1, stop_axis=2) data = paddle.rand([1, 3, 16, 14]) @@ -119,5 +130,20 @@ class LayoutAutoTune(unittest.TestCase): self.assertEqual(out.shape, [1, 112, 12]) +class TestAutoTuneAPI(unittest.TestCase): + def test_set_config_warnings(self): + with warnings.catch_warnings(record=True) as w: + config = {"layout": {"enable": 1}} + # On linux, we can open the file again to read the content + # without closing the file, but on windows system, there is + # no permission to open it again without closing it. + tfile = tempfile.NamedTemporaryFile(mode="w+", delete=False) + json.dump(config, tfile) + tfile.close() + paddle.incubate.autotune.set_config(tfile.name) + os.remove(tfile.name) + self.assertTrue(len(w) == 1) + + if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/test_switch_autotune.py b/python/paddle/fluid/tests/unittests/test_switch_autotune.py index 1775272aac69d0acc4cdc1128f3a94da796dbc2b..0049a922b916642c673f945766a4879d7714e444 100644 --- a/python/paddle/fluid/tests/unittests/test_switch_autotune.py +++ b/python/paddle/fluid/tests/unittests/test_switch_autotune.py @@ -15,6 +15,10 @@ import paddle import unittest import numpy as np +import tempfile +import warnings +import json +import os class SimpleNet(paddle.nn.Layer): @@ -73,10 +77,13 @@ class TestAutoTune(unittest.TestCase): return expected_res def test_autotune(self): - paddle.fluid.core.disable_autotune() + paddle.incubate.autotune.set_config( + config={"kernel": { + "enable": False + }}) self.assertEqual(self.get_flags("FLAGS_use_autotune"), False) - paddle.fluid.core.enable_autotune() + paddle.incubate.autotune.set_config(config={"kernel": {"enable": True}}) self.assertEqual(self.get_flags("FLAGS_use_autotune"), True) def check_status(self, expected_res): @@ -93,10 +100,16 @@ class TestDygraphAutoTuneStatus(TestAutoTune): def run_program(self, enable_autotune): self.set_flags(enable_autotune) if enable_autotune: - paddle.fluid.core.enable_autotune() + paddle.incubate.autotune.set_config( + config={"kernel": { + "enable": True, + "tuning_range": [1, 2] + }}) else: - paddle.fluid.core.disable_autotune() - paddle.fluid.core.set_autotune_range(1, 2) + paddle.incubate.autotune.set_config( + config={"kernel": { + "enable": False + }}) x_var = paddle.uniform((1, 1, 8, 8), dtype='float32', min=-1., max=1.) net = SimpleNet() for i in range(3): @@ -141,10 +154,18 @@ class TestStaticAutoTuneStatus(TestAutoTune): self.set_flags(enable_autotune) if enable_autotune: - paddle.fluid.core.enable_autotune() + config = {"kernel": {"enable": True, "tuning_range": [1, 2]}} + tfile = tempfile.NamedTemporaryFile(mode="w+", delete=False) + json.dump(config, tfile) + tfile.close() + paddle.incubate.autotune.set_config(tfile.name) + os.remove(tfile.name) else: - paddle.fluid.core.disable_autotune() - paddle.fluid.core.set_autotune_range(1, 2) + paddle.incubate.autotune.set_config( + config={"kernel": { + "enable": False, + "tuning_range": [1, 2] + }}) for i in range(3): exe.run(program=main_program, feed={'X': x}, fetch_list=[loss]) @@ -166,5 +187,22 @@ class TestStaticAutoTuneStatus(TestAutoTune): self.func_disable_autotune() +class TestAutoTuneAPI(unittest.TestCase): + def test_set_config_warnings(self): + with warnings.catch_warnings(record=True) as w: + config = {"kernel": {"enable": 1, "tuning_range": 1}} + tfile = tempfile.NamedTemporaryFile(mode="w+", delete=False) + json.dump(config, tfile) + tfile.close() + paddle.incubate.autotune.set_config(tfile.name) + os.remove(tfile.name) + self.assertTrue(len(w) == 2) + + def test_set_config_attr(self): + paddle.incubate.autotune.set_config(config=None) + self.assertEqual( + paddle.get_flags("FLAGS_use_autotune")["FLAGS_use_autotune"], True) + + if __name__ == '__main__': unittest.main() diff --git a/python/paddle/incubate/__init__.py b/python/paddle/incubate/__init__.py index d8cc322a66e27a6ba3da425a2f0e39661d54ef4b..ff7a167f1a670a1c7e64ef0ac0653092dfa20966 100644 --- a/python/paddle/incubate/__init__.py +++ b/python/paddle/incubate/__init__.py @@ -29,6 +29,7 @@ from .tensor import segment_max from .tensor import segment_min from .passes import fuse_resnet_unit_pass import paddle.incubate.autograd +import paddle.incubate.autotune from . import nn #noqa: F401 diff --git a/python/paddle/incubate/autotune.py b/python/paddle/incubate/autotune.py new file mode 100644 index 0000000000000000000000000000000000000000..82e3a9376c2da15b91a103b24d494a4de709165e --- /dev/null +++ b/python/paddle/incubate/autotune.py @@ -0,0 +1,157 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +import json +import warnings +from paddle.fluid import core + +__all__ = ['set_config'] + + +def set_config(config=None): + r""" + Set the configuration for kernel, layout and dataloader auto-tuning. + + 1. kernel: When it is enable, exhaustive search method will be used to select + and cache the best algorithm for the operator in the tuning iteration. Tuning + parameters are as follows: + + - enable(bool): Whether to enable kernel tuning. + - tuning_range(list): Start and end iteration for auto-tuning. Default: [1, 10]. + + 2. layout: When it is enable, the best data layout such as NCHW or NHWC will be + determined based on the device and data type. When the origin layout setting is + not best, layout transformation will be automaticly performed to improve model + performance. Layout auto-tuning only supports dygraph mode currently. Tuning + parameters are as follows: + + - enable(bool): Whether to enable layout tuning. + + 3. dataloader: When it is enable, the best num_workers will be selected to replace + the origin dataloader setting. Tuning parameters are as follows: + + - enable(bool): Whether to enable dataloader tuning. + + Args: + config (dict|str|None, optional): Configuration for auto-tuning. If it is a + dictionary, the key is the tuning type, and the value is a dictionary + of the corresponding tuning parameters. If it is a string, the path of + a json file will be specified and the tuning configuration will be set + by the the json file. Default: None, auto-tuning for kernel, layout and + dataloader will be enabled. + + Examples: + .. code-block:: python + :name: auto-tuning + + import paddle + import json + + # config is a dict. + config = { + "kernel": { + "enable": True, + "tuning_range": [1, 5], + }, + "layout": { + "enable": True, + }, + "dataloader": { + "enable": True, + } + } + paddle.incubate.autotune.set_config(config) + + # config is the path of json file. + config_json = json.dumps(config) + with open('config.json', 'w') as json_file: + json_file.write(config_json) + paddle.incubate.autotune.set_config('config.json') + + """ + if config is None: + core.enable_autotune() + core.enable_layout_autotune() + paddle.fluid.reader.set_autotune_config(use_autotune=True) + return + + config_dict = {} + if isinstance(config, dict): + config_dict = config + elif isinstance(config, str): + try: + with open(config, 'r') as filehandle: + config_dict = json.load(filehandle) + except Exception as e: + print('Load config error: {}'.format(e)) + warnings.warn("Use default configuration for auto-tuning.") + + if "kernel" in config_dict: + kernel_config = config_dict["kernel"] + if "enable" in kernel_config: + if isinstance(kernel_config['enable'], bool): + if kernel_config['enable']: + core.enable_autotune() + else: + core.disable_autotune() + else: + warnings.warn( + "The auto-tuning configuration of the kernel is incorrect." + "The `enable` should be bool. Use default parameter instead." + ) + if "tuning_range" in kernel_config: + if isinstance(kernel_config['tuning_range'], list): + tuning_range = kernel_config['tuning_range'] + assert len(tuning_range) == 2 + core.set_autotune_range(tuning_range[0], tuning_range[1]) + else: + warnings.warn( + "The auto-tuning configuration of the kernel is incorrect." + "The `tuning_range` should be list. Use default parameter instead." + ) + if "layout" in config_dict: + layout_config = config_dict["layout"] + if "enable" in layout_config: + if isinstance(layout_config['enable'], bool): + if layout_config['enable']: + core.enable_layout_autotune() + else: + core.disable_layout_autotune() + else: + warnings.warn( + "The auto-tuning configuration of the layout is incorrect." + "The `enable` should be bool. Use default parameter instead." + ) + if "dataloader" in config_dict: + dataloader_config = config_dict["dataloader"] + use_autoune = False + if "enable" in dataloader_config: + if isinstance(dataloader_config['enable'], bool): + use_autoune = dataloader_config['enable'] + else: + warnings.warn( + "The auto-tuning configuration of the dataloader is incorrect." + "The `enable` should be bool. Use default parameter instead." + ) + if "tuning_steps" in dataloader_config: + if isinstance(dataloader_config['tuning_steps'], int): + paddle.fluid.reader.set_autotune_config( + use_autoune, dataloader_config['tuning_steps']) + else: + warnings.warn( + "The auto-tuning configuration of the dataloader is incorrect." + "The `tuning_steps` should be int. Use default parameter instead." + ) + paddle.fluid.reader.set_autotune_config(use_autoune)