diff --git a/python/paddle/distributed/fleet/meta_optimizers/asp_optimizer.py b/python/paddle/distributed/fleet/meta_optimizers/asp_optimizer.py index 53ababc4824514ca60408881f81fe367f6edde14..96b38e39395611839506c246d2adf057ecba228b 100644 --- a/python/paddle/distributed/fleet/meta_optimizers/asp_optimizer.py +++ b/python/paddle/distributed/fleet/meta_optimizers/asp_optimizer.py @@ -12,7 +12,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -from paddle.fluid.contrib.sparsity.asp import ASPHelper + +from paddle.incubate.asp import ASPHelper from .meta_optimizer_base import MetaOptimizerBase diff --git a/python/paddle/fluid/contrib/__init__.py b/python/paddle/fluid/contrib/__init__.py index 2860d414d0a5bd4bb212c0ee56b82033eb34f498..1a509f725b5b69c880d011ae1c3104d7bc9dd58c 100644 --- a/python/paddle/fluid/contrib/__init__.py +++ b/python/paddle/fluid/contrib/__init__.py @@ -31,8 +31,6 @@ from . import layers from .layers import * from . import optimizer from .optimizer import * -from . import sparsity -from .sparsity import * __all__ = [] @@ -43,4 +41,3 @@ __all__ += extend_optimizer.__all__ __all__ += ['mixed_precision'] __all__ += layers.__all__ __all__ += optimizer.__all__ -__all__ += sparsity.__all__ diff --git a/python/paddle/fluid/contrib/sparsity/__init__.py b/python/paddle/fluid/contrib/sparsity/__init__.py deleted file mode 100644 index fcb7acda377a0b940af51e58d2021c0fc70bacec..0000000000000000000000000000000000000000 --- a/python/paddle/fluid/contrib/sparsity/__init__.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# Copyright (c) 2021 NVIDIA Corporation. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .utils import calculate_density -from .utils import check_mask_1d -from .utils import get_mask_1d -from .utils import check_mask_2d -from .utils import get_mask_2d_greedy -from .utils import get_mask_2d_best -from .utils import create_mask -from .utils import check_sparsity -from .utils import MaskAlgo -from .utils import CheckMethod -from .asp import decorate -from .asp import prune_model -from .asp import set_excluded_layers -from .asp import reset_excluded_layers -from .supported_layer_list import add_supported_layer - -__all__ = [ - 'calculate_density', - 'check_mask_1d', - 'get_mask_1d', - 'check_mask_2d', - 'get_mask_2d_greedy', - 'get_mask_2d_best', - 'create_mask', - 'check_sparsity', - 'MaskAlgo', - 'CheckMethod', - 'decorate', - 'prune_model', - 'set_excluded_layers', - 'reset_excluded_layers', - 'add_supported_layer', -] diff --git a/python/paddle/fluid/tests/unittests/asp/asp_pruning_base.py b/python/paddle/fluid/tests/unittests/asp/asp_pruning_base.py index 08c530241ccc3893640c07e381b442f71515a3ab..b51f963e9a7b0e3a943454b2ee4440eb954696a4 100644 --- a/python/paddle/fluid/tests/unittests/asp/asp_pruning_base.py +++ b/python/paddle/fluid/tests/unittests/asp/asp_pruning_base.py @@ -20,7 +20,7 @@ import numpy as np import paddle import paddle.fluid as fluid import paddle.fluid.core as core -from paddle.fluid.contrib.sparsity.asp import ASPHelper +from paddle.incubate.asp import ASPHelper paddle.enable_static() @@ -94,7 +94,7 @@ class TestASPHelperPruningBase(unittest.TestCase): fluid.global_scope().find_var(param.name).get_tensor() ) self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( + paddle.incubate.asp.check_sparsity( mat.T, func_name=check_func_name, n=2, m=4 ) ) diff --git a/python/paddle/fluid/tests/unittests/asp/test_asp_customized_pruning.py b/python/paddle/fluid/tests/unittests/asp/test_asp_customized_pruning.py index dc8527377b3c670226e588d1fd9a06dfe44cae4f..dc89178a53c67fa8700af32f388cd403964f81e5 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_asp_customized_pruning.py +++ b/python/paddle/fluid/tests/unittests/asp/test_asp_customized_pruning.py @@ -20,11 +20,11 @@ import numpy as np import paddle import paddle.fluid as fluid import paddle.fluid.core as core -from paddle.fluid.contrib import sparsity -from paddle.fluid.contrib.sparsity.supported_layer_list import ( +from paddle.fluid.dygraph.layers import Layer, _convert_camel_to_snake +from paddle.incubate import asp as sparsity +from paddle.incubate.asp.supported_layer_list import ( supported_layers_and_prune_func_map, ) -from paddle.fluid.dygraph.layers import Layer, _convert_camel_to_snake class MyOwnLayer(Layer): @@ -251,9 +251,7 @@ class TestASPStaticCustomerizedPruneFunc(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( diff --git a/python/paddle/fluid/tests/unittests/asp/test_asp_optimize_dynamic.py b/python/paddle/fluid/tests/unittests/asp/test_asp_optimize_dynamic.py index 32a8ac5a14c58f273254c58da6254370e1629e00..d222897b4519a9d8238e2e11ccbdcaab8f025751 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_asp_optimize_dynamic.py +++ b/python/paddle/fluid/tests/unittests/asp/test_asp_optimize_dynamic.py @@ -19,7 +19,7 @@ import numpy as np import paddle import paddle.fluid.core as core -from paddle.fluid.contrib.sparsity.asp import ASPHelper +from paddle.incubate.asp import ASPHelper class MyLayer(paddle.nn.Layer): @@ -180,15 +180,11 @@ class TestASPDynamicOptimize(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) def test_asp_training_with_amp(self): @@ -229,16 +225,12 @@ class TestASPDynamicOptimize(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) diff --git a/python/paddle/fluid/tests/unittests/asp/test_asp_optimize_static.py b/python/paddle/fluid/tests/unittests/asp/test_asp_optimize_static.py index d34129eced1b06b44f078619b937aa681bb7e802..9a743f74190b29b955db5f29addbf412a872c956 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_asp_optimize_static.py +++ b/python/paddle/fluid/tests/unittests/asp/test_asp_optimize_static.py @@ -20,7 +20,7 @@ import numpy as np import paddle import paddle.fluid as fluid import paddle.fluid.core as core -from paddle.fluid.contrib.sparsity.asp import ASPHelper +from paddle.incubate.asp import ASPHelper paddle.enable_static() @@ -202,15 +202,11 @@ class TestASPStaticOptimize(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) def test_asp_training_with_amp(self): @@ -248,15 +244,11 @@ class TestASPStaticOptimize(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) def __get_param_names(self, params): diff --git a/python/paddle/fluid/tests/unittests/asp/test_asp_pruning_dynamic.py b/python/paddle/fluid/tests/unittests/asp/test_asp_pruning_dynamic.py index 13282e0222c87fb1be89485908a6820b11d38db1..c749d739f979216d13e997b2d5cbc3fea45fa6fa 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_asp_pruning_dynamic.py +++ b/python/paddle/fluid/tests/unittests/asp/test_asp_pruning_dynamic.py @@ -19,7 +19,7 @@ import numpy as np import paddle from paddle.fluid import core -from paddle.fluid.contrib.sparsity.asp import ASPHelper +from paddle.incubate.asp import ASPHelper class MyLayer(paddle.nn.Layer): @@ -58,9 +58,7 @@ class TestASPDynamicPruningBase(unittest.TestCase): def set_config(self): self.mask_gen_func = 'mask_1d' - self.mask_check_func = ( - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D - ) + self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_1D def test_inference_pruning(self): self.__pruning_and_checking(False) @@ -89,13 +87,11 @@ class TestASPDynamicPruningBase(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( + paddle.incubate.asp.check_sparsity( mat.T, func_name=self.mask_check_func, n=2, m=4 ) ) @@ -104,25 +100,19 @@ class TestASPDynamicPruningBase(unittest.TestCase): class TestASPDynamicPruning1D(TestASPDynamicPruningBase): def set_config(self): self.mask_gen_func = 'mask_1d' - self.mask_check_func = ( - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D - ) + self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_1D class TestASPDynamicPruning2DBest(TestASPDynamicPruningBase): def set_config(self): self.mask_gen_func = 'mask_2d_best' - self.mask_check_func = ( - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D - ) + self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_2D class TestASPDynamicPruning2DGreedy(TestASPDynamicPruningBase): def set_config(self): self.mask_gen_func = 'mask_2d_greedy' - self.mask_check_func = ( - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D - ) + self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_2D if __name__ == '__main__': diff --git a/python/paddle/fluid/tests/unittests/asp/test_asp_pruning_static.py b/python/paddle/fluid/tests/unittests/asp/test_asp_pruning_static.py index 82c2afc299d72304b3ac3c2fa0ad370a3dc9fb4d..364f5d915892cd279a5ad5fc86dcddc4d936be74 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_asp_pruning_static.py +++ b/python/paddle/fluid/tests/unittests/asp/test_asp_pruning_static.py @@ -20,7 +20,7 @@ import numpy as np import paddle import paddle.fluid as fluid import paddle.fluid.core as core -from paddle.fluid.contrib.sparsity.asp import ASPHelper +from paddle.incubate.asp import ASPHelper paddle.enable_static() @@ -50,9 +50,7 @@ class TestASPStaticPruningBase(unittest.TestCase): def set_config(self): self.mask_gen_func = 'mask_1d' - self.mask_check_func = ( - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D - ) + self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_1D def test_inference_pruning(self): place = paddle.CPUPlace() @@ -98,13 +96,11 @@ class TestASPStaticPruningBase(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( + paddle.incubate.asp.check_sparsity( mat.T, func_name=self.mask_check_func, n=2, m=4 ) ) @@ -113,25 +109,19 @@ class TestASPStaticPruningBase(unittest.TestCase): class TestASPStaticPruning1D(TestASPStaticPruningBase): def set_config(self): self.mask_gen_func = 'mask_1d' - self.mask_check_func = ( - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D - ) + self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_1D class TestASPStaticPruning2DBest(TestASPStaticPruningBase): def set_config(self): self.mask_gen_func = 'mask_2d_best' - self.mask_check_func = ( - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D - ) + self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_2D class TestASPStaticPruning2DGreedy(TestASPStaticPruningBase): def set_config(self): self.mask_gen_func = 'mask_2d_greedy' - self.mask_check_func = ( - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D - ) + self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_2D if __name__ == '__main__': diff --git a/python/paddle/fluid/tests/unittests/asp/test_asp_save_load.py b/python/paddle/fluid/tests/unittests/asp/test_asp_save_load.py index 30d3155788c4abc6ac0abeb238e1294dc510c42e..89ded3d73a453262f7ef4cc69f0871d9dbf01ce8 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_asp_save_load.py +++ b/python/paddle/fluid/tests/unittests/asp/test_asp_save_load.py @@ -20,7 +20,7 @@ import numpy as np import paddle import paddle.fluid as fluid import paddle.fluid.core as core -from paddle.fluid.contrib.sparsity.asp import ASPHelper +from paddle.incubate.asp import ASPHelper class MyLayer(paddle.nn.Layer): @@ -112,15 +112,11 @@ class TestASPDynamicOptimize(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) @@ -197,15 +193,11 @@ class TestASPStaticOptimize(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) diff --git a/python/paddle/fluid/tests/unittests/asp/test_asp_utils.py b/python/paddle/fluid/tests/unittests/asp/test_asp_utils.py index 7151c55c0fc69e62c25c2b85f666a2a33dcc7255..8d1d7a37cb7cef177ddcae65969277cdc4823b6c 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_asp_utils.py +++ b/python/paddle/fluid/tests/unittests/asp/test_asp_utils.py @@ -25,22 +25,22 @@ import paddle class TestASPUtils(unittest.TestCase): def test_get_check_method(self): self.assertEqual( - paddle.fluid.contrib.sparsity.CheckMethod.get_checking_method( - paddle.fluid.contrib.sparsity.MaskAlgo.MASK_1D + paddle.incubate.asp.CheckMethod.get_checking_method( + paddle.incubate.asp.MaskAlgo.MASK_1D ), - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D, + paddle.incubate.asp.CheckMethod.CHECK_1D, ) self.assertEqual( - paddle.fluid.contrib.sparsity.CheckMethod.get_checking_method( - paddle.fluid.contrib.sparsity.MaskAlgo.MASK_2D_GREEDY + paddle.incubate.asp.CheckMethod.get_checking_method( + paddle.incubate.asp.MaskAlgo.MASK_2D_GREEDY ), - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D, + paddle.incubate.asp.CheckMethod.CHECK_2D, ) self.assertEqual( - paddle.fluid.contrib.sparsity.CheckMethod.get_checking_method( - paddle.fluid.contrib.sparsity.MaskAlgo.MASK_2D_BEST + paddle.incubate.asp.CheckMethod.get_checking_method( + paddle.incubate.asp.MaskAlgo.MASK_2D_BEST ), - paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D, + paddle.incubate.asp.CheckMethod.CHECK_2D, ) def test_density(self): @@ -67,26 +67,22 @@ class TestASPUtils(unittest.TestCase): [0.0, 1.0, 0.0, 0.0, 1.0], ] ) - self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_1d(x, 2, 4)) - self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_1d(x, 3, 4)) - self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_1d(x, 2, 5)) - self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_1d(x, 3, 5)) - self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_1d(x, 3, 6)) - self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_1d(x, 4, 6)) + self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 2, 4)) + self.assertFalse(paddle.incubate.asp.check_mask_1d(x, 3, 4)) + self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 2, 5)) + self.assertFalse(paddle.incubate.asp.check_mask_1d(x, 3, 5)) + self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 3, 6)) + self.assertFalse(paddle.incubate.asp.check_mask_1d(x, 4, 6)) def test_get_mask_1d(self): for _ in range(10): x = np.random.randint(10, size=(5, 5)) - x = paddle.fluid.contrib.sparsity.get_mask_1d(x, 2, 4) - self.assertTrue( - paddle.fluid.contrib.sparsity.check_mask_1d(x, 2, 4) - ) + x = paddle.incubate.asp.get_mask_1d(x, 2, 4) + self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 2, 4)) x = np.random.randn(5, 4) - x = paddle.fluid.contrib.sparsity.get_mask_1d(x, 2, 4) - self.assertTrue( - paddle.fluid.contrib.sparsity.check_mask_1d(x, 2, 4) - ) + x = paddle.incubate.asp.get_mask_1d(x, 2, 4) + self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 2, 4)) def test_check_mask_2d(self): x = np.array( @@ -98,40 +94,32 @@ class TestASPUtils(unittest.TestCase): [0.0, 1.0, 0.0, 0.0, 1.0], ] ) - self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4)) - self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_2d(x, 3, 4)) - self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 5)) - self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_2d(x, 3, 5)) - self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_2d(x, 3, 6)) - self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_2d(x, 4, 6)) + self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4)) + self.assertFalse(paddle.incubate.asp.check_mask_2d(x, 3, 4)) + self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 5)) + self.assertFalse(paddle.incubate.asp.check_mask_2d(x, 3, 5)) + self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 3, 6)) + self.assertFalse(paddle.incubate.asp.check_mask_2d(x, 4, 6)) def test_get_mask_2d_greedy(self): for _ in range(10): x = np.random.randint(10, size=(5, 5)) - x = paddle.fluid.contrib.sparsity.get_mask_2d_greedy(x, 2, 4) - self.assertTrue( - paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4) - ) + x = paddle.incubate.asp.get_mask_2d_greedy(x, 2, 4) + self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4)) x = np.random.randn(5, 4) - x = paddle.fluid.contrib.sparsity.get_mask_2d_greedy(x, 2, 4) - self.assertTrue( - paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4) - ) + x = paddle.incubate.asp.get_mask_2d_greedy(x, 2, 4) + self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4)) def test_get_mask_2d_best(self): for _ in range(10): x = np.random.randint(10, size=(5, 5)) - x = paddle.fluid.contrib.sparsity.get_mask_2d_best(x, 2, 4) - self.assertTrue( - paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4) - ) + x = paddle.incubate.asp.get_mask_2d_best(x, 2, 4) + self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4)) x = np.random.randn(5, 4) - x = paddle.fluid.contrib.sparsity.get_mask_2d_best(x, 2, 4) - self.assertTrue( - paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4) - ) + x = paddle.incubate.asp.get_mask_2d_best(x, 2, 4) + self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4)) def test_threadsafe_valid_2d_patterns(self): def get_reference(m=4, n=2): @@ -154,12 +142,12 @@ class TestASPUtils(unittest.TestCase): for _ in range(4): computing_thread = threading.Thread( - target=paddle.fluid.contrib.sparsity.utils._compute_valid_2d_patterns, + target=paddle.incubate.asp.utils._compute_valid_2d_patterns, args=(2, 4), ) computing_thread.start() time.sleep(3) - patterns_map = paddle.fluid.contrib.sparsity.utils._valid_2d_patterns + patterns_map = paddle.incubate.asp.utils._valid_2d_patterns reference_patterns = get_reference() reference_key = '4_2' @@ -202,66 +190,66 @@ class TestASPUtils(unittest.TestCase): self.__test_1D_2D_sparse_mask_generation_methods(x) def __test_1D_2D_sparsity_checking_methods(self, x_2d): - mask = paddle.fluid.contrib.sparsity.get_mask_1d(x_2d, 2, 4) + mask = paddle.incubate.asp.get_mask_1d(x_2d, 2, 4) self.assertEqual( - paddle.fluid.contrib.sparsity.check_sparsity( + paddle.incubate.asp.check_sparsity( mask, - func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D, + func_name=paddle.incubate.asp.CheckMethod.CHECK_1D, n=2, m=4, ), - paddle.fluid.contrib.sparsity.check_mask_1d(mask, 2, 4), + paddle.incubate.asp.check_mask_1d(mask, 2, 4), ) - mask = paddle.fluid.contrib.sparsity.get_mask_2d_best(x_2d, 2, 4) + mask = paddle.incubate.asp.get_mask_2d_best(x_2d, 2, 4) self.assertEqual( - paddle.fluid.contrib.sparsity.check_sparsity( + paddle.incubate.asp.check_sparsity( mask, - func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D, + func_name=paddle.incubate.asp.CheckMethod.CHECK_2D, n=2, m=4, ), - paddle.fluid.contrib.sparsity.check_mask_2d(mask, 2, 4), + paddle.incubate.asp.check_mask_2d(mask, 2, 4), ) def __test_1D_2D_sparse_mask_generation_methods(self, x): - mask = paddle.fluid.contrib.sparsity.create_mask( + mask = paddle.incubate.asp.create_mask( x, - func_name=paddle.fluid.contrib.sparsity.MaskAlgo.MASK_1D, + func_name=paddle.incubate.asp.MaskAlgo.MASK_1D, n=2, m=4, ) self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( + paddle.incubate.asp.check_sparsity( mask, - func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D, + func_name=paddle.incubate.asp.CheckMethod.CHECK_1D, n=2, m=4, ) ) - mask = paddle.fluid.contrib.sparsity.create_mask( + mask = paddle.incubate.asp.create_mask( x, - func_name=paddle.fluid.contrib.sparsity.MaskAlgo.MASK_2D_GREEDY, + func_name=paddle.incubate.asp.MaskAlgo.MASK_2D_GREEDY, n=2, m=4, ) self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( + paddle.incubate.asp.check_sparsity( mask, - func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D, + func_name=paddle.incubate.asp.CheckMethod.CHECK_2D, n=2, m=4, ) ) - mask = paddle.fluid.contrib.sparsity.create_mask( + mask = paddle.incubate.asp.create_mask( x, - func_name=paddle.fluid.contrib.sparsity.MaskAlgo.MASK_2D_BEST, + func_name=paddle.incubate.asp.MaskAlgo.MASK_2D_BEST, n=2, m=4, ) self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( + paddle.incubate.asp.check_sparsity( mask, - func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D, + func_name=paddle.incubate.asp.CheckMethod.CHECK_2D, n=2, m=4, ) diff --git a/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_dynamic.py b/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_dynamic.py index a6dc2368bd9cdc34e953f8ce444064f7f1e9db2d..a2c0545edf3ae35d6f778250fb8316ecbca560ed 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_dynamic.py +++ b/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_dynamic.py @@ -21,7 +21,7 @@ import numpy as np import paddle import paddle.distributed.fleet as fleet import paddle.fluid.core as core -from paddle.fluid.contrib.sparsity.asp import ASPHelper +from paddle.incubate.asp import ASPHelper cuda_visible_devices = os.getenv('CUDA_VISIBLE_DEVICES') if cuda_visible_devices is None or cuda_visible_devices == "": @@ -98,15 +98,11 @@ class TestFleetWithASPDynamic(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) @@ -169,15 +165,11 @@ class TestFleetWithASPAMPDynamic(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) diff --git a/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_sharding.py b/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_sharding.py index 7a5b86bd8a21a2707ca54be69c4bb2c5c16b57db..9c8fc3be0e9c5c9f44e56f75db0ef141487f3275 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_sharding.py +++ b/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_sharding.py @@ -21,8 +21,8 @@ import numpy as np import paddle import paddle.distributed.fleet as fleet import paddle.fluid as fluid -from paddle.fluid.contrib.sparsity.asp import ASPHelper -from paddle.static import sparsity +from paddle.incubate import asp as sparsity +from paddle.incubate.asp import ASPHelper cuda_visible_devices = os.getenv('CUDA_VISIBLE_DEVICES') if cuda_visible_devices is None or cuda_visible_devices == "": @@ -122,15 +122,11 @@ class TestFleetWithASPSharding(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) diff --git a/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_static.py b/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_static.py index b15acd8416c59d667d2f9279564864a3fd0e8dd1..e1121cfcfcea72d1a1f1963eec36036ae7211c80 100644 --- a/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_static.py +++ b/python/paddle/fluid/tests/unittests/asp/test_fleet_with_asp_static.py @@ -21,8 +21,8 @@ import numpy as np import paddle import paddle.distributed.fleet as fleet import paddle.fluid as fluid -from paddle.fluid.contrib.sparsity.asp import ASPHelper -from paddle.static import sparsity +from paddle.incubate import asp as sparsity +from paddle.incubate.asp import ASPHelper cuda_visible_devices = os.getenv('CUDA_VISIBLE_DEVICES') if cuda_visible_devices is None or cuda_visible_devices == "": @@ -99,15 +99,11 @@ class TestFleetWithASPStatic(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) @@ -180,15 +176,11 @@ class TestFleetWithASPAMPStatic(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) def test_with_asp_and_pure_fp16(self): @@ -237,15 +229,11 @@ class TestFleetWithASPAMPStatic(unittest.TestCase): len(param.shape) == 2 and param.shape[0] < 4 ): self.assertFalse( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) else: self.assertTrue( - paddle.fluid.contrib.sparsity.check_sparsity( - mat.T, n=2, m=4 - ) + paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4) ) diff --git a/python/paddle/incubate/asp/__init__.py b/python/paddle/incubate/asp/__init__.py index 662e24900fce821ab67d2a206aa8a326975273db..9e6af7e94c139a2699deab44a241054c5741f325 100644 --- a/python/paddle/incubate/asp/__init__.py +++ b/python/paddle/incubate/asp/__init__.py @@ -13,11 +13,26 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ...fluid.contrib.sparsity import calculate_density # noqa: F401 -from ...fluid.contrib.sparsity import decorate # noqa: F401 -from ...fluid.contrib.sparsity import prune_model # noqa: F401 -from ...fluid.contrib.sparsity import set_excluded_layers # noqa: F401 -from ...fluid.contrib.sparsity import reset_excluded_layers # noqa: F401 + +from .utils import check_mask_1d # noqa: F401 +from .utils import get_mask_1d # noqa: F401 +from .utils import check_mask_2d # noqa: F401 +from .utils import get_mask_2d_greedy # noqa: F401 +from .utils import get_mask_2d_best # noqa: F401 +from .utils import create_mask # noqa: F401 +from .utils import check_sparsity # noqa: F401 +from .utils import MaskAlgo # noqa: F401 +from .utils import CheckMethod # noqa: F401 +from .utils import calculate_density # noqa: F401 + +from .asp import decorate # noqa: F401 +from .asp import prune_model # noqa: F401 +from .asp import set_excluded_layers # noqa: F401 +from .asp import reset_excluded_layers # noqa: F401 +from .asp import ASPHelper # noqa: F401 + +from .supported_layer_list import add_supported_layer # noqa: F401 + __all__ = [ # noqa 'calculate_density', @@ -25,4 +40,5 @@ __all__ = [ # noqa 'prune_model', 'set_excluded_layers', 'reset_excluded_layers', + 'add_supported_layer', ] diff --git a/python/paddle/fluid/contrib/sparsity/asp.py b/python/paddle/incubate/asp/asp.py similarity index 96% rename from python/paddle/fluid/contrib/sparsity/asp.py rename to python/paddle/incubate/asp/asp.py index fda41705373c80adcd6e591080050742b8497235..df1c81bffe83507e3442fae89fd952538f397b07 100644 --- a/python/paddle/fluid/contrib/sparsity/asp.py +++ b/python/paddle/incubate/asp/asp.py @@ -16,29 +16,26 @@ Functions for Auto SParsity (ASP) training and inference. """ -import os import copy +import os + import numpy as np + import paddle +from paddle.fluid import core, global_scope, program_guard from paddle.fluid.framework import dygraph_only -from paddle.fluid import global_scope, program_guard, layers from paddle.fluid.initializer import ConstantInitializer -from paddle.fluid.contrib import sparsity -from paddle.fluid import core -from paddle.fluid.contrib.sparsity.supported_layer_list import ( +from paddle.incubate import asp + +from .supported_layer_list import ( + _default_pruning, supported_layers_and_prune_func_map, ) -from paddle.fluid.contrib.sparsity.supported_layer_list import _default_pruning OpRole = core.op_proto_and_checker_maker.OpRole OP_ROLE_KEY = core.op_proto_and_checker_maker.kOpRoleAttrName() -__all__ = [ - 'decorate', - 'prune_model', - 'set_excluded_layers', - 'reset_excluded_layers', -] +__all__ = [] def set_excluded_layers(param_names, main_program=None): @@ -164,7 +161,7 @@ def reset_excluded_layers(main_program=None): # Need to set excluded layers before calling decorate paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()]) # Reset excluded_layers, all supported layers would be included into Automatic SParsity's workflow. - # Please note, reset_excluded_layers also must be called before calling sparsity.decorate(). + # Please note, reset_excluded_layers also must be called before calling asp.decorate(). paddle.incubate.asp.reset_excluded_layers() optimizer = paddle.incubate.asp.decorate(optimizer) @@ -441,9 +438,9 @@ def prune_model(model, n=2, m=4, mask_algo='mask_1d', with_mask=True): place = paddle.set_device(device) MaskAlgo_mapping = { - 'mask_1d': sparsity.MaskAlgo.MASK_1D, - 'mask_2d_greedy': sparsity.MaskAlgo.MASK_2D_GREEDY, - 'mask_2d_best': sparsity.MaskAlgo.MASK_2D_BEST, + 'mask_1d': asp.MaskAlgo.MASK_1D, + 'mask_2d_greedy': asp.MaskAlgo.MASK_2D_GREEDY, + 'mask_2d_best': asp.MaskAlgo.MASK_2D_BEST, } assert ( mask_algo in MaskAlgo_mapping @@ -532,7 +529,7 @@ class ASPHelper: @classmethod def set_excluded_layers(cls, param_names, main_program): r""" - This is the implementation of `sparsity.set_excluded_layers`, for details please see explanation in `sparsity.set_excluded_layers`. + This is the implementation of `asp.set_excluded_layers`, for details please see explanation in `asp.set_excluded_layers`. """ asp_info = cls._get_program_asp_info(main_program) asp_info.update_excluded_layers(param_names) @@ -540,7 +537,7 @@ class ASPHelper: @classmethod def reset_excluded_layers(cls, main_program=None): r""" - This is the implementation of `sparsity.reset_excluded_layers`, for details please see explanation in `sparsity.reset_excluded_layers`. + This is the implementation of `asp.reset_excluded_layers`, for details please see explanation in `asp.reset_excluded_layers`. """ if main_program is None: for prog in cls.__asp_info: @@ -551,7 +548,7 @@ class ASPHelper: @staticmethod def decorate(optimizer): r""" - This is the implementation of `sparsity.decorate`, for details please see explanation in `sparsity.decorate`. + This is the implementation of `asp.decorate`, for details please see explanation in `asp.decorate`. """ if paddle.in_dynamic_mode(): # main_prog and startup_prog would be used with paddle.static.program_guard @@ -572,11 +569,11 @@ class ASPHelper: main_program=None, n=2, m=4, - mask_algo=sparsity.MaskAlgo.MASK_1D, + mask_algo=asp.MaskAlgo.MASK_1D, with_mask=True, ): r""" - This is the implementation of `sparsity.prune_model`, for details please see explanation in `sparsity.prune_model`. + This is the implementation of `asp.prune_model`, for details please see explanation in `asp.prune_model`. """ if main_program is None: @@ -604,7 +601,7 @@ class ASPHelper: ) assert weight_mask_param is not None, ( 'Cannot find {} variable, please call optimizer.minimize (' - 'paddle.sparsity.decorate(optimizer).minimize(loss)' + 'paddle.incubate.asp.decorate(optimizer).minimize(loss)' ' and initialization (exe.run(startup_program)) first!'.format( ASPHelper._get_mask_name(param.name) ) @@ -624,11 +621,11 @@ class ASPHelper: layer, n=2, m=4, - mask_algo=sparsity.MaskAlgo.MASK_1D, + mask_algo=asp.MaskAlgo.MASK_1D, with_mask=True, ): r""" - This is the implementation of `sparsity.prune_model`, for details please see explanation in `sparsity.prune_model`. + This is the implementation of `asp.prune_model`, for details please see explanation in `asp.prune_model`. """ if paddle.in_dynamic_mode(): main_program = paddle.static.default_main_program() @@ -654,7 +651,7 @@ class ASPHelper: param.name, None ) assert weight_mask_param is not None, ( - 'Cannot find {} variable, please call sparsity.decorate() to' + 'Cannot find {} variable, please call asp.decorate() to' ' decorate your optimizer first!'.format( ASPHelper._get_mask_name(param.name) ) @@ -730,7 +727,7 @@ class ASPHelper: Examples: .. code-block:: python - from paddle.static.sparsity.asp import ASPHelper + from paddle.incubate.asp import ASPHelper main_program = paddle.static.Program() startup_program = paddle.static.Program() diff --git a/python/paddle/fluid/contrib/sparsity/supported_layer_list.py b/python/paddle/incubate/asp/supported_layer_list.py similarity index 93% rename from python/paddle/fluid/contrib/sparsity/supported_layer_list.py rename to python/paddle/incubate/asp/supported_layer_list.py index b0b64f27eccc1ec5849f43ea1a57d584354e1527..a987f73446e1de1af6f4962607287872db5c28f1 100644 --- a/python/paddle/fluid/contrib/sparsity/supported_layer_list.py +++ b/python/paddle/incubate/asp/supported_layer_list.py @@ -13,15 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import numpy as np -import paddle import copy -from paddle.fluid.contrib import sparsity -import threading import logging -from ...log_helper import get_logger +import threading + +import numpy as np + +import paddle +from paddle.fluid.log_helper import get_logger +from paddle.incubate import asp -__all__ = ['add_supported_layer'] +__all__ = [] _logger = get_logger( __name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s' @@ -51,7 +53,7 @@ def _default_pruning(weight_nparray, m, n, func_name, param_name): ) return weight_pruned_nparray, weight_sparse_mask - checked_func_name = sparsity.CheckMethod.get_checking_method(func_name) + checked_func_name = asp.CheckMethod.get_checking_method(func_name) # The double transpose ops here make sure pruning direction consistent with cuSparseLt. # SPMMA in cuSparseLt: D = (AxB) + C, where matrix A (mxk) is sparse matrix. @@ -61,14 +63,14 @@ def _default_pruning(weight_nparray, m, n, func_name, param_name): # is 'Act(XW + b)'. For enabling SPMMA, weights and inputs should be transposed # for computing, Act( (W^T X^T)^T + b). Therefore, we have to prune alog k dimension # of W^T, which is m dimension of W. Moreove, all mask generating functions in - # sparsity/utils is row-major pruning. That is the reason we have to transpose weight + # asp/utils is row-major pruning. That is the reason we have to transpose weight # matrices beforce invoking create_mask. Then we transpose the result mask to make # sure its shape to be the same as the input weight. - weight_sparse_mask = sparsity.create_mask( + weight_sparse_mask = asp.create_mask( weight_nparray.T, func_name=func_name, n=n, m=m ).T weight_pruned_nparray = np.multiply(weight_nparray, weight_sparse_mask) - assert sparsity.check_sparsity( + assert asp.check_sparsity( weight_pruned_nparray.T, n=n, m=m, func_name=checked_func_name ), 'Pruning {} weight matrix failure!!!'.format(param_name) return weight_pruned_nparray, weight_sparse_mask diff --git a/python/paddle/fluid/contrib/sparsity/utils.py b/python/paddle/incubate/asp/utils.py similarity index 96% rename from python/paddle/fluid/contrib/sparsity/utils.py rename to python/paddle/incubate/asp/utils.py index b9a5c0a7b31dafddd657234890602d4aeb3dd186..684b4a933c63683064b4571c5e56beec63add73b 100644 --- a/python/paddle/fluid/contrib/sparsity/utils.py +++ b/python/paddle/incubate/asp/utils.py @@ -16,26 +16,15 @@ Utilities of Auto SParsity (ASP). """ -import sys -import math import collections -import numpy as np +import sys +import threading from enum import Enum from itertools import permutations -import threading -__all__ = [ - 'calculate_density', - 'check_mask_1d', - 'get_mask_1d', - 'check_mask_2d', - 'get_mask_2d_greedy', - 'get_mask_2d_best', - 'create_mask', - 'check_sparsity', - 'MaskAlgo', - 'CheckMethod', -] +import numpy as np + +__all__ = [] class MaskAlgo(Enum): @@ -69,8 +58,7 @@ class CheckMethod(Enum): .. code-block:: python import numpy as np - from paddle.static.sparsity import MaskAlgo - from paddle.fluid.contrib.sparsity import CheckMethod + from paddle.incubate.asp import CheckMethod, MaskAlgo CheckMethod.get_checking_method(MaskAlgo.MASK_1D) # CheckMethod.CHECK_1D @@ -162,7 +150,7 @@ def check_mask_1d(mat, n, m): .. code-block:: python import numpy as np - import paddle.fluid.contrib.sparsity as sparsity + import paddle.incubate.asp as sparsity x = np.array([[0, 1, 3, 0], [1, 0, 0, 1]]) @@ -206,7 +194,7 @@ def get_mask_1d(mat, n, m): .. code-block:: python import numpy as np - import paddle.fluid.contrib.sparsity as sparsity + import paddle.incubate.asp as sparsity mat = np.array([[0, 1, 5, 4], [2, 7, 3, 6]]) @@ -290,7 +278,7 @@ def check_mask_2d(mat, n, m): .. code-block:: python import numpy as np - import paddle.fluid.contrib.sparsity as sparsity + import paddle.incubate.asp as sparsity x = np.array([[0, 8, 9, 0], [9, 0, 0, 10], @@ -341,7 +329,7 @@ def get_mask_2d_greedy(mat, n, m): .. code-block:: python import numpy as np - import paddle.fluid.contrib.sparsity as sparsity + import paddle.incubate.asp as sparsity mat = np.array([[9, 8, 3, 7], [9, 2, 1, 10], @@ -456,7 +444,7 @@ def get_mask_2d_best(mat, n, m): .. code-block:: python import numpy as np - import paddle.fluid.contrib.sparsity as sparsity + import paddle.incubate.asp as sparsity mat = np.array([[2, 8, 9, 9], [9, 1, 3, 9], @@ -505,7 +493,7 @@ def create_mask(tensor, func_name=MaskAlgo.MASK_1D, n=2, m=4): .. code-block:: python import numpy as np - import paddle.fluid.contrib.sparsity as sparsity + import paddle.incubate.asp as sparsity tensor = np.array([[2, 8, 9, 9], [9, 1, 3, 9], @@ -574,7 +562,7 @@ def check_sparsity(tensor, func_name=CheckMethod.CHECK_1D, n=2, m=4): .. code-block:: python import numpy as np - import paddle.fluid.contrib.sparsity as sparsity + import paddle.incubate.asp as sparsity tensor = np.array([[2, 8, 9, 9], [9, 1, 3, 9], diff --git a/python/paddle/static/__init__.py b/python/paddle/static/__init__.py index 0692e2bc69e8596e6b7be20db369b7de43c35422..631d17eb548b067f6cfbbe14ee6622d1655ad1d5 100644 --- a/python/paddle/static/__init__.py +++ b/python/paddle/static/__init__.py @@ -14,7 +14,6 @@ # limitations under the License. from . import amp # noqa: F401 -from . import sparsity # noqa: F401 from . import nn # noqa: F401 from .nn.common import py_func # noqa: F401 diff --git a/python/paddle/static/sparsity/__init__.py b/python/paddle/static/sparsity/__init__.py deleted file mode 100644 index 37b48132bda900bea7385c0d9ce12597344f9775..0000000000000000000000000000000000000000 --- a/python/paddle/static/sparsity/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -# Copyright (c) 2021 NVIDIA Corporation. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ...fluid.contrib.sparsity import calculate_density # noqa: F401 -from ...fluid.contrib.sparsity import decorate # noqa: F401 -from ...fluid.contrib.sparsity import prune_model # noqa: F401 -from ...fluid.contrib.sparsity import reset_excluded_layers # noqa: F401 -from ...fluid.contrib.sparsity import add_supported_layer # noqa: F401 -from ...fluid.contrib import sparsity # noqa: F401 - - -def set_excluded_layers(main_program, param_names): - sparsity.set_excluded_layers( - param_names=param_names, main_program=main_program - ) - - -__all__ = [ # noqa - 'calculate_density', - 'decorate', - 'prune_model', - 'set_excluded_layers', - 'reset_excluded_layers', - 'add_supported_layer', -] diff --git a/python/setup.py.in b/python/setup.py.in index faa994d744747a91b105947301e15c69c625094a..e1884e51bf6b801734eb49d422b37ca58a12f0f7 100755 --- a/python/setup.py.in +++ b/python/setup.py.in @@ -346,7 +346,6 @@ packages=['paddle', 'paddle.fluid.contrib.mixed_precision', 'paddle.fluid.contrib.mixed_precision.bf16', 'paddle.fluid.contrib.layers', - 'paddle.fluid.contrib.sparsity', 'paddle.fluid.transpiler', 'paddle.fluid.transpiler.details', 'paddle.fluid.incubate', @@ -404,7 +403,6 @@ packages=['paddle', 'paddle.static', 'paddle.static.nn', 'paddle.static.amp', - 'paddle.static.sparsity', 'paddle.tensor', 'paddle.onnx', 'paddle.autograd', diff --git a/setup.py b/setup.py index a1ad5e6ecd9334e8c5ec51878a30bec732fd01ed..6abd61c002220caeae0bdd7b8e5fcc3d6cec61de 100644 --- a/setup.py +++ b/setup.py @@ -1207,7 +1207,6 @@ def get_setup_parameters(): 'paddle.fluid.contrib.mixed_precision', 'paddle.fluid.contrib.mixed_precision.bf16', 'paddle.fluid.contrib.layers', - 'paddle.fluid.contrib.sparsity', 'paddle.fluid.transpiler', 'paddle.fluid.transpiler.details', 'paddle.fluid.incubate', @@ -1265,7 +1264,6 @@ def get_setup_parameters(): 'paddle.static', 'paddle.static.nn', 'paddle.static.amp', - 'paddle.static.sparsity', 'paddle.tensor', 'paddle.onnx', 'paddle.autograd',