未验证 提交 b51a752f 编写于 作者: G GGBond8488 提交者: GitHub

remove paddle.static.sparsity and move out sparsity from fluid (#48450)

* move out sparsity from fluid

* fix typro

* fix circle import error

* fix circle import

* fix circle import

* fix conflicts

* remove paddle.static.sparsity

* remove paddle.asp

* Undo history modifications

* remove sparsity in static package

* modify setup.py

* add missing modifications on unitest files

* remove redundant sparsiy hierarchy

* fix unitest file
上级 65c17315
......@@ -12,7 +12,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.incubate.asp import ASPHelper
from .meta_optimizer_base import MetaOptimizerBase
......
......@@ -31,8 +31,6 @@ from . import layers
from .layers import *
from . import optimizer
from .optimizer import *
from . import sparsity
from .sparsity import *
__all__ = []
......@@ -43,4 +41,3 @@ __all__ += extend_optimizer.__all__
__all__ += ['mixed_precision']
__all__ += layers.__all__
__all__ += optimizer.__all__
__all__ += sparsity.__all__
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2021 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .utils import calculate_density
from .utils import check_mask_1d
from .utils import get_mask_1d
from .utils import check_mask_2d
from .utils import get_mask_2d_greedy
from .utils import get_mask_2d_best
from .utils import create_mask
from .utils import check_sparsity
from .utils import MaskAlgo
from .utils import CheckMethod
from .asp import decorate
from .asp import prune_model
from .asp import set_excluded_layers
from .asp import reset_excluded_layers
from .supported_layer_list import add_supported_layer
__all__ = [
'calculate_density',
'check_mask_1d',
'get_mask_1d',
'check_mask_2d',
'get_mask_2d_greedy',
'get_mask_2d_best',
'create_mask',
'check_sparsity',
'MaskAlgo',
'CheckMethod',
'decorate',
'prune_model',
'set_excluded_layers',
'reset_excluded_layers',
'add_supported_layer',
]
......@@ -20,7 +20,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.incubate.asp import ASPHelper
paddle.enable_static()
......@@ -94,7 +94,7 @@ class TestASPHelperPruningBase(unittest.TestCase):
fluid.global_scope().find_var(param.name).get_tensor()
)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
paddle.incubate.asp.check_sparsity(
mat.T, func_name=check_func_name, n=2, m=4
)
)
......@@ -20,11 +20,11 @@ import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.contrib import sparsity
from paddle.fluid.contrib.sparsity.supported_layer_list import (
from paddle.fluid.dygraph.layers import Layer, _convert_camel_to_snake
from paddle.incubate import asp as sparsity
from paddle.incubate.asp.supported_layer_list import (
supported_layers_and_prune_func_map,
)
from paddle.fluid.dygraph.layers import Layer, _convert_camel_to_snake
class MyOwnLayer(Layer):
......@@ -251,9 +251,7 @@ class TestASPStaticCustomerizedPruneFunc(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
......
......@@ -19,7 +19,7 @@ import numpy as np
import paddle
import paddle.fluid.core as core
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.incubate.asp import ASPHelper
class MyLayer(paddle.nn.Layer):
......@@ -180,15 +180,11 @@ class TestASPDynamicOptimize(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
def test_asp_training_with_amp(self):
......@@ -229,16 +225,12 @@ class TestASPDynamicOptimize(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
......
......@@ -20,7 +20,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.incubate.asp import ASPHelper
paddle.enable_static()
......@@ -202,15 +202,11 @@ class TestASPStaticOptimize(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
def test_asp_training_with_amp(self):
......@@ -248,15 +244,11 @@ class TestASPStaticOptimize(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
def __get_param_names(self, params):
......
......@@ -19,7 +19,7 @@ import numpy as np
import paddle
from paddle.fluid import core
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.incubate.asp import ASPHelper
class MyLayer(paddle.nn.Layer):
......@@ -58,9 +58,7 @@ class TestASPDynamicPruningBase(unittest.TestCase):
def set_config(self):
self.mask_gen_func = 'mask_1d'
self.mask_check_func = (
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D
)
self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_1D
def test_inference_pruning(self):
self.__pruning_and_checking(False)
......@@ -89,13 +87,11 @@ class TestASPDynamicPruningBase(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
paddle.incubate.asp.check_sparsity(
mat.T, func_name=self.mask_check_func, n=2, m=4
)
)
......@@ -104,25 +100,19 @@ class TestASPDynamicPruningBase(unittest.TestCase):
class TestASPDynamicPruning1D(TestASPDynamicPruningBase):
def set_config(self):
self.mask_gen_func = 'mask_1d'
self.mask_check_func = (
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D
)
self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_1D
class TestASPDynamicPruning2DBest(TestASPDynamicPruningBase):
def set_config(self):
self.mask_gen_func = 'mask_2d_best'
self.mask_check_func = (
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D
)
self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_2D
class TestASPDynamicPruning2DGreedy(TestASPDynamicPruningBase):
def set_config(self):
self.mask_gen_func = 'mask_2d_greedy'
self.mask_check_func = (
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D
)
self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_2D
if __name__ == '__main__':
......
......@@ -20,7 +20,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.incubate.asp import ASPHelper
paddle.enable_static()
......@@ -50,9 +50,7 @@ class TestASPStaticPruningBase(unittest.TestCase):
def set_config(self):
self.mask_gen_func = 'mask_1d'
self.mask_check_func = (
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D
)
self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_1D
def test_inference_pruning(self):
place = paddle.CPUPlace()
......@@ -98,13 +96,11 @@ class TestASPStaticPruningBase(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
paddle.incubate.asp.check_sparsity(
mat.T, func_name=self.mask_check_func, n=2, m=4
)
)
......@@ -113,25 +109,19 @@ class TestASPStaticPruningBase(unittest.TestCase):
class TestASPStaticPruning1D(TestASPStaticPruningBase):
def set_config(self):
self.mask_gen_func = 'mask_1d'
self.mask_check_func = (
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D
)
self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_1D
class TestASPStaticPruning2DBest(TestASPStaticPruningBase):
def set_config(self):
self.mask_gen_func = 'mask_2d_best'
self.mask_check_func = (
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D
)
self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_2D
class TestASPStaticPruning2DGreedy(TestASPStaticPruningBase):
def set_config(self):
self.mask_gen_func = 'mask_2d_greedy'
self.mask_check_func = (
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D
)
self.mask_check_func = paddle.incubate.asp.CheckMethod.CHECK_2D
if __name__ == '__main__':
......
......@@ -20,7 +20,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.incubate.asp import ASPHelper
class MyLayer(paddle.nn.Layer):
......@@ -112,15 +112,11 @@ class TestASPDynamicOptimize(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
......@@ -197,15 +193,11 @@ class TestASPStaticOptimize(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
......
......@@ -25,22 +25,22 @@ import paddle
class TestASPUtils(unittest.TestCase):
def test_get_check_method(self):
self.assertEqual(
paddle.fluid.contrib.sparsity.CheckMethod.get_checking_method(
paddle.fluid.contrib.sparsity.MaskAlgo.MASK_1D
paddle.incubate.asp.CheckMethod.get_checking_method(
paddle.incubate.asp.MaskAlgo.MASK_1D
),
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D,
paddle.incubate.asp.CheckMethod.CHECK_1D,
)
self.assertEqual(
paddle.fluid.contrib.sparsity.CheckMethod.get_checking_method(
paddle.fluid.contrib.sparsity.MaskAlgo.MASK_2D_GREEDY
paddle.incubate.asp.CheckMethod.get_checking_method(
paddle.incubate.asp.MaskAlgo.MASK_2D_GREEDY
),
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D,
paddle.incubate.asp.CheckMethod.CHECK_2D,
)
self.assertEqual(
paddle.fluid.contrib.sparsity.CheckMethod.get_checking_method(
paddle.fluid.contrib.sparsity.MaskAlgo.MASK_2D_BEST
paddle.incubate.asp.CheckMethod.get_checking_method(
paddle.incubate.asp.MaskAlgo.MASK_2D_BEST
),
paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D,
paddle.incubate.asp.CheckMethod.CHECK_2D,
)
def test_density(self):
......@@ -67,26 +67,22 @@ class TestASPUtils(unittest.TestCase):
[0.0, 1.0, 0.0, 0.0, 1.0],
]
)
self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_1d(x, 2, 4))
self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_1d(x, 3, 4))
self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_1d(x, 2, 5))
self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_1d(x, 3, 5))
self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_1d(x, 3, 6))
self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_1d(x, 4, 6))
self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 2, 4))
self.assertFalse(paddle.incubate.asp.check_mask_1d(x, 3, 4))
self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 2, 5))
self.assertFalse(paddle.incubate.asp.check_mask_1d(x, 3, 5))
self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 3, 6))
self.assertFalse(paddle.incubate.asp.check_mask_1d(x, 4, 6))
def test_get_mask_1d(self):
for _ in range(10):
x = np.random.randint(10, size=(5, 5))
x = paddle.fluid.contrib.sparsity.get_mask_1d(x, 2, 4)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_mask_1d(x, 2, 4)
)
x = paddle.incubate.asp.get_mask_1d(x, 2, 4)
self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 2, 4))
x = np.random.randn(5, 4)
x = paddle.fluid.contrib.sparsity.get_mask_1d(x, 2, 4)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_mask_1d(x, 2, 4)
)
x = paddle.incubate.asp.get_mask_1d(x, 2, 4)
self.assertTrue(paddle.incubate.asp.check_mask_1d(x, 2, 4))
def test_check_mask_2d(self):
x = np.array(
......@@ -98,40 +94,32 @@ class TestASPUtils(unittest.TestCase):
[0.0, 1.0, 0.0, 0.0, 1.0],
]
)
self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4))
self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_2d(x, 3, 4))
self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 5))
self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_2d(x, 3, 5))
self.assertTrue(paddle.fluid.contrib.sparsity.check_mask_2d(x, 3, 6))
self.assertFalse(paddle.fluid.contrib.sparsity.check_mask_2d(x, 4, 6))
self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4))
self.assertFalse(paddle.incubate.asp.check_mask_2d(x, 3, 4))
self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 5))
self.assertFalse(paddle.incubate.asp.check_mask_2d(x, 3, 5))
self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 3, 6))
self.assertFalse(paddle.incubate.asp.check_mask_2d(x, 4, 6))
def test_get_mask_2d_greedy(self):
for _ in range(10):
x = np.random.randint(10, size=(5, 5))
x = paddle.fluid.contrib.sparsity.get_mask_2d_greedy(x, 2, 4)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4)
)
x = paddle.incubate.asp.get_mask_2d_greedy(x, 2, 4)
self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4))
x = np.random.randn(5, 4)
x = paddle.fluid.contrib.sparsity.get_mask_2d_greedy(x, 2, 4)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4)
)
x = paddle.incubate.asp.get_mask_2d_greedy(x, 2, 4)
self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4))
def test_get_mask_2d_best(self):
for _ in range(10):
x = np.random.randint(10, size=(5, 5))
x = paddle.fluid.contrib.sparsity.get_mask_2d_best(x, 2, 4)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4)
)
x = paddle.incubate.asp.get_mask_2d_best(x, 2, 4)
self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4))
x = np.random.randn(5, 4)
x = paddle.fluid.contrib.sparsity.get_mask_2d_best(x, 2, 4)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_mask_2d(x, 2, 4)
)
x = paddle.incubate.asp.get_mask_2d_best(x, 2, 4)
self.assertTrue(paddle.incubate.asp.check_mask_2d(x, 2, 4))
def test_threadsafe_valid_2d_patterns(self):
def get_reference(m=4, n=2):
......@@ -154,12 +142,12 @@ class TestASPUtils(unittest.TestCase):
for _ in range(4):
computing_thread = threading.Thread(
target=paddle.fluid.contrib.sparsity.utils._compute_valid_2d_patterns,
target=paddle.incubate.asp.utils._compute_valid_2d_patterns,
args=(2, 4),
)
computing_thread.start()
time.sleep(3)
patterns_map = paddle.fluid.contrib.sparsity.utils._valid_2d_patterns
patterns_map = paddle.incubate.asp.utils._valid_2d_patterns
reference_patterns = get_reference()
reference_key = '4_2'
......@@ -202,66 +190,66 @@ class TestASPUtils(unittest.TestCase):
self.__test_1D_2D_sparse_mask_generation_methods(x)
def __test_1D_2D_sparsity_checking_methods(self, x_2d):
mask = paddle.fluid.contrib.sparsity.get_mask_1d(x_2d, 2, 4)
mask = paddle.incubate.asp.get_mask_1d(x_2d, 2, 4)
self.assertEqual(
paddle.fluid.contrib.sparsity.check_sparsity(
paddle.incubate.asp.check_sparsity(
mask,
func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D,
func_name=paddle.incubate.asp.CheckMethod.CHECK_1D,
n=2,
m=4,
),
paddle.fluid.contrib.sparsity.check_mask_1d(mask, 2, 4),
paddle.incubate.asp.check_mask_1d(mask, 2, 4),
)
mask = paddle.fluid.contrib.sparsity.get_mask_2d_best(x_2d, 2, 4)
mask = paddle.incubate.asp.get_mask_2d_best(x_2d, 2, 4)
self.assertEqual(
paddle.fluid.contrib.sparsity.check_sparsity(
paddle.incubate.asp.check_sparsity(
mask,
func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D,
func_name=paddle.incubate.asp.CheckMethod.CHECK_2D,
n=2,
m=4,
),
paddle.fluid.contrib.sparsity.check_mask_2d(mask, 2, 4),
paddle.incubate.asp.check_mask_2d(mask, 2, 4),
)
def __test_1D_2D_sparse_mask_generation_methods(self, x):
mask = paddle.fluid.contrib.sparsity.create_mask(
mask = paddle.incubate.asp.create_mask(
x,
func_name=paddle.fluid.contrib.sparsity.MaskAlgo.MASK_1D,
func_name=paddle.incubate.asp.MaskAlgo.MASK_1D,
n=2,
m=4,
)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
paddle.incubate.asp.check_sparsity(
mask,
func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_1D,
func_name=paddle.incubate.asp.CheckMethod.CHECK_1D,
n=2,
m=4,
)
)
mask = paddle.fluid.contrib.sparsity.create_mask(
mask = paddle.incubate.asp.create_mask(
x,
func_name=paddle.fluid.contrib.sparsity.MaskAlgo.MASK_2D_GREEDY,
func_name=paddle.incubate.asp.MaskAlgo.MASK_2D_GREEDY,
n=2,
m=4,
)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
paddle.incubate.asp.check_sparsity(
mask,
func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D,
func_name=paddle.incubate.asp.CheckMethod.CHECK_2D,
n=2,
m=4,
)
)
mask = paddle.fluid.contrib.sparsity.create_mask(
mask = paddle.incubate.asp.create_mask(
x,
func_name=paddle.fluid.contrib.sparsity.MaskAlgo.MASK_2D_BEST,
func_name=paddle.incubate.asp.MaskAlgo.MASK_2D_BEST,
n=2,
m=4,
)
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
paddle.incubate.asp.check_sparsity(
mask,
func_name=paddle.fluid.contrib.sparsity.CheckMethod.CHECK_2D,
func_name=paddle.incubate.asp.CheckMethod.CHECK_2D,
n=2,
m=4,
)
......
......@@ -21,7 +21,7 @@ import numpy as np
import paddle
import paddle.distributed.fleet as fleet
import paddle.fluid.core as core
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.incubate.asp import ASPHelper
cuda_visible_devices = os.getenv('CUDA_VISIBLE_DEVICES')
if cuda_visible_devices is None or cuda_visible_devices == "":
......@@ -98,15 +98,11 @@ class TestFleetWithASPDynamic(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
......@@ -169,15 +165,11 @@ class TestFleetWithASPAMPDynamic(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
......
......@@ -21,8 +21,8 @@ import numpy as np
import paddle
import paddle.distributed.fleet as fleet
import paddle.fluid as fluid
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.static import sparsity
from paddle.incubate import asp as sparsity
from paddle.incubate.asp import ASPHelper
cuda_visible_devices = os.getenv('CUDA_VISIBLE_DEVICES')
if cuda_visible_devices is None or cuda_visible_devices == "":
......@@ -122,15 +122,11 @@ class TestFleetWithASPSharding(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
......
......@@ -21,8 +21,8 @@ import numpy as np
import paddle
import paddle.distributed.fleet as fleet
import paddle.fluid as fluid
from paddle.fluid.contrib.sparsity.asp import ASPHelper
from paddle.static import sparsity
from paddle.incubate import asp as sparsity
from paddle.incubate.asp import ASPHelper
cuda_visible_devices = os.getenv('CUDA_VISIBLE_DEVICES')
if cuda_visible_devices is None or cuda_visible_devices == "":
......@@ -99,15 +99,11 @@ class TestFleetWithASPStatic(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
......@@ -180,15 +176,11 @@ class TestFleetWithASPAMPStatic(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
def test_with_asp_and_pure_fp16(self):
......@@ -237,15 +229,11 @@ class TestFleetWithASPAMPStatic(unittest.TestCase):
len(param.shape) == 2 and param.shape[0] < 4
):
self.assertFalse(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
else:
self.assertTrue(
paddle.fluid.contrib.sparsity.check_sparsity(
mat.T, n=2, m=4
)
paddle.incubate.asp.check_sparsity(mat.T, n=2, m=4)
)
......
......@@ -13,11 +13,26 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from ...fluid.contrib.sparsity import calculate_density # noqa: F401
from ...fluid.contrib.sparsity import decorate # noqa: F401
from ...fluid.contrib.sparsity import prune_model # noqa: F401
from ...fluid.contrib.sparsity import set_excluded_layers # noqa: F401
from ...fluid.contrib.sparsity import reset_excluded_layers # noqa: F401
from .utils import check_mask_1d # noqa: F401
from .utils import get_mask_1d # noqa: F401
from .utils import check_mask_2d # noqa: F401
from .utils import get_mask_2d_greedy # noqa: F401
from .utils import get_mask_2d_best # noqa: F401
from .utils import create_mask # noqa: F401
from .utils import check_sparsity # noqa: F401
from .utils import MaskAlgo # noqa: F401
from .utils import CheckMethod # noqa: F401
from .utils import calculate_density # noqa: F401
from .asp import decorate # noqa: F401
from .asp import prune_model # noqa: F401
from .asp import set_excluded_layers # noqa: F401
from .asp import reset_excluded_layers # noqa: F401
from .asp import ASPHelper # noqa: F401
from .supported_layer_list import add_supported_layer # noqa: F401
__all__ = [ # noqa
'calculate_density',
......@@ -25,4 +40,5 @@ __all__ = [ # noqa
'prune_model',
'set_excluded_layers',
'reset_excluded_layers',
'add_supported_layer',
]
......@@ -16,29 +16,26 @@
Functions for Auto SParsity (ASP) training and inference.
"""
import os
import copy
import os
import numpy as np
import paddle
from paddle.fluid import core, global_scope, program_guard
from paddle.fluid.framework import dygraph_only
from paddle.fluid import global_scope, program_guard, layers
from paddle.fluid.initializer import ConstantInitializer
from paddle.fluid.contrib import sparsity
from paddle.fluid import core
from paddle.fluid.contrib.sparsity.supported_layer_list import (
from paddle.incubate import asp
from .supported_layer_list import (
_default_pruning,
supported_layers_and_prune_func_map,
)
from paddle.fluid.contrib.sparsity.supported_layer_list import _default_pruning
OpRole = core.op_proto_and_checker_maker.OpRole
OP_ROLE_KEY = core.op_proto_and_checker_maker.kOpRoleAttrName()
__all__ = [
'decorate',
'prune_model',
'set_excluded_layers',
'reset_excluded_layers',
]
__all__ = []
def set_excluded_layers(param_names, main_program=None):
......@@ -164,7 +161,7 @@ def reset_excluded_layers(main_program=None):
# Need to set excluded layers before calling decorate
paddle.incubate.asp.set_excluded_layers([my_layer.linear1.full_name()])
# Reset excluded_layers, all supported layers would be included into Automatic SParsity's workflow.
# Please note, reset_excluded_layers also must be called before calling sparsity.decorate().
# Please note, reset_excluded_layers also must be called before calling asp.decorate().
paddle.incubate.asp.reset_excluded_layers()
optimizer = paddle.incubate.asp.decorate(optimizer)
......@@ -441,9 +438,9 @@ def prune_model(model, n=2, m=4, mask_algo='mask_1d', with_mask=True):
place = paddle.set_device(device)
MaskAlgo_mapping = {
'mask_1d': sparsity.MaskAlgo.MASK_1D,
'mask_2d_greedy': sparsity.MaskAlgo.MASK_2D_GREEDY,
'mask_2d_best': sparsity.MaskAlgo.MASK_2D_BEST,
'mask_1d': asp.MaskAlgo.MASK_1D,
'mask_2d_greedy': asp.MaskAlgo.MASK_2D_GREEDY,
'mask_2d_best': asp.MaskAlgo.MASK_2D_BEST,
}
assert (
mask_algo in MaskAlgo_mapping
......@@ -532,7 +529,7 @@ class ASPHelper:
@classmethod
def set_excluded_layers(cls, param_names, main_program):
r"""
This is the implementation of `sparsity.set_excluded_layers`, for details please see explanation in `sparsity.set_excluded_layers`.
This is the implementation of `asp.set_excluded_layers`, for details please see explanation in `asp.set_excluded_layers`.
"""
asp_info = cls._get_program_asp_info(main_program)
asp_info.update_excluded_layers(param_names)
......@@ -540,7 +537,7 @@ class ASPHelper:
@classmethod
def reset_excluded_layers(cls, main_program=None):
r"""
This is the implementation of `sparsity.reset_excluded_layers`, for details please see explanation in `sparsity.reset_excluded_layers`.
This is the implementation of `asp.reset_excluded_layers`, for details please see explanation in `asp.reset_excluded_layers`.
"""
if main_program is None:
for prog in cls.__asp_info:
......@@ -551,7 +548,7 @@ class ASPHelper:
@staticmethod
def decorate(optimizer):
r"""
This is the implementation of `sparsity.decorate`, for details please see explanation in `sparsity.decorate`.
This is the implementation of `asp.decorate`, for details please see explanation in `asp.decorate`.
"""
if paddle.in_dynamic_mode():
# main_prog and startup_prog would be used with paddle.static.program_guard
......@@ -572,11 +569,11 @@ class ASPHelper:
main_program=None,
n=2,
m=4,
mask_algo=sparsity.MaskAlgo.MASK_1D,
mask_algo=asp.MaskAlgo.MASK_1D,
with_mask=True,
):
r"""
This is the implementation of `sparsity.prune_model`, for details please see explanation in `sparsity.prune_model`.
This is the implementation of `asp.prune_model`, for details please see explanation in `asp.prune_model`.
"""
if main_program is None:
......@@ -604,7 +601,7 @@ class ASPHelper:
)
assert weight_mask_param is not None, (
'Cannot find {} variable, please call optimizer.minimize ('
'paddle.sparsity.decorate(optimizer).minimize(loss)'
'paddle.incubate.asp.decorate(optimizer).minimize(loss)'
' and initialization (exe.run(startup_program)) first!'.format(
ASPHelper._get_mask_name(param.name)
)
......@@ -624,11 +621,11 @@ class ASPHelper:
layer,
n=2,
m=4,
mask_algo=sparsity.MaskAlgo.MASK_1D,
mask_algo=asp.MaskAlgo.MASK_1D,
with_mask=True,
):
r"""
This is the implementation of `sparsity.prune_model`, for details please see explanation in `sparsity.prune_model`.
This is the implementation of `asp.prune_model`, for details please see explanation in `asp.prune_model`.
"""
if paddle.in_dynamic_mode():
main_program = paddle.static.default_main_program()
......@@ -654,7 +651,7 @@ class ASPHelper:
param.name, None
)
assert weight_mask_param is not None, (
'Cannot find {} variable, please call sparsity.decorate() to'
'Cannot find {} variable, please call asp.decorate() to'
' decorate your optimizer first!'.format(
ASPHelper._get_mask_name(param.name)
)
......@@ -730,7 +727,7 @@ class ASPHelper:
Examples:
.. code-block:: python
from paddle.static.sparsity.asp import ASPHelper
from paddle.incubate.asp import ASPHelper
main_program = paddle.static.Program()
startup_program = paddle.static.Program()
......
......@@ -13,15 +13,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import paddle
import copy
from paddle.fluid.contrib import sparsity
import threading
import logging
from ...log_helper import get_logger
import threading
import numpy as np
import paddle
from paddle.fluid.log_helper import get_logger
from paddle.incubate import asp
__all__ = ['add_supported_layer']
__all__ = []
_logger = get_logger(
__name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s'
......@@ -51,7 +53,7 @@ def _default_pruning(weight_nparray, m, n, func_name, param_name):
)
return weight_pruned_nparray, weight_sparse_mask
checked_func_name = sparsity.CheckMethod.get_checking_method(func_name)
checked_func_name = asp.CheckMethod.get_checking_method(func_name)
# The double transpose ops here make sure pruning direction consistent with cuSparseLt.
# SPMMA in cuSparseLt: D = (AxB) + C, where matrix A (mxk) is sparse matrix.
......@@ -61,14 +63,14 @@ def _default_pruning(weight_nparray, m, n, func_name, param_name):
# is 'Act(XW + b)'. For enabling SPMMA, weights and inputs should be transposed
# for computing, Act( (W^T X^T)^T + b). Therefore, we have to prune alog k dimension
# of W^T, which is m dimension of W. Moreove, all mask generating functions in
# sparsity/utils is row-major pruning. That is the reason we have to transpose weight
# asp/utils is row-major pruning. That is the reason we have to transpose weight
# matrices beforce invoking create_mask. Then we transpose the result mask to make
# sure its shape to be the same as the input weight.
weight_sparse_mask = sparsity.create_mask(
weight_sparse_mask = asp.create_mask(
weight_nparray.T, func_name=func_name, n=n, m=m
).T
weight_pruned_nparray = np.multiply(weight_nparray, weight_sparse_mask)
assert sparsity.check_sparsity(
assert asp.check_sparsity(
weight_pruned_nparray.T, n=n, m=m, func_name=checked_func_name
), 'Pruning {} weight matrix failure!!!'.format(param_name)
return weight_pruned_nparray, weight_sparse_mask
......
......@@ -16,26 +16,15 @@
Utilities of Auto SParsity (ASP).
"""
import sys
import math
import collections
import numpy as np
import sys
import threading
from enum import Enum
from itertools import permutations
import threading
__all__ = [
'calculate_density',
'check_mask_1d',
'get_mask_1d',
'check_mask_2d',
'get_mask_2d_greedy',
'get_mask_2d_best',
'create_mask',
'check_sparsity',
'MaskAlgo',
'CheckMethod',
]
import numpy as np
__all__ = []
class MaskAlgo(Enum):
......@@ -69,8 +58,7 @@ class CheckMethod(Enum):
.. code-block:: python
import numpy as np
from paddle.static.sparsity import MaskAlgo
from paddle.fluid.contrib.sparsity import CheckMethod
from paddle.incubate.asp import CheckMethod, MaskAlgo
CheckMethod.get_checking_method(MaskAlgo.MASK_1D)
# CheckMethod.CHECK_1D
......@@ -162,7 +150,7 @@ def check_mask_1d(mat, n, m):
.. code-block:: python
import numpy as np
import paddle.fluid.contrib.sparsity as sparsity
import paddle.incubate.asp as sparsity
x = np.array([[0, 1, 3, 0],
[1, 0, 0, 1]])
......@@ -206,7 +194,7 @@ def get_mask_1d(mat, n, m):
.. code-block:: python
import numpy as np
import paddle.fluid.contrib.sparsity as sparsity
import paddle.incubate.asp as sparsity
mat = np.array([[0, 1, 5, 4],
[2, 7, 3, 6]])
......@@ -290,7 +278,7 @@ def check_mask_2d(mat, n, m):
.. code-block:: python
import numpy as np
import paddle.fluid.contrib.sparsity as sparsity
import paddle.incubate.asp as sparsity
x = np.array([[0, 8, 9, 0],
[9, 0, 0, 10],
......@@ -341,7 +329,7 @@ def get_mask_2d_greedy(mat, n, m):
.. code-block:: python
import numpy as np
import paddle.fluid.contrib.sparsity as sparsity
import paddle.incubate.asp as sparsity
mat = np.array([[9, 8, 3, 7],
[9, 2, 1, 10],
......@@ -456,7 +444,7 @@ def get_mask_2d_best(mat, n, m):
.. code-block:: python
import numpy as np
import paddle.fluid.contrib.sparsity as sparsity
import paddle.incubate.asp as sparsity
mat = np.array([[2, 8, 9, 9],
[9, 1, 3, 9],
......@@ -505,7 +493,7 @@ def create_mask(tensor, func_name=MaskAlgo.MASK_1D, n=2, m=4):
.. code-block:: python
import numpy as np
import paddle.fluid.contrib.sparsity as sparsity
import paddle.incubate.asp as sparsity
tensor = np.array([[2, 8, 9, 9],
[9, 1, 3, 9],
......@@ -574,7 +562,7 @@ def check_sparsity(tensor, func_name=CheckMethod.CHECK_1D, n=2, m=4):
.. code-block:: python
import numpy as np
import paddle.fluid.contrib.sparsity as sparsity
import paddle.incubate.asp as sparsity
tensor = np.array([[2, 8, 9, 9],
[9, 1, 3, 9],
......
......@@ -14,7 +14,6 @@
# limitations under the License.
from . import amp # noqa: F401
from . import sparsity # noqa: F401
from . import nn # noqa: F401
from .nn.common import py_func # noqa: F401
......
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2021 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ...fluid.contrib.sparsity import calculate_density # noqa: F401
from ...fluid.contrib.sparsity import decorate # noqa: F401
from ...fluid.contrib.sparsity import prune_model # noqa: F401
from ...fluid.contrib.sparsity import reset_excluded_layers # noqa: F401
from ...fluid.contrib.sparsity import add_supported_layer # noqa: F401
from ...fluid.contrib import sparsity # noqa: F401
def set_excluded_layers(main_program, param_names):
sparsity.set_excluded_layers(
param_names=param_names, main_program=main_program
)
__all__ = [ # noqa
'calculate_density',
'decorate',
'prune_model',
'set_excluded_layers',
'reset_excluded_layers',
'add_supported_layer',
]
......@@ -346,7 +346,6 @@ packages=['paddle',
'paddle.fluid.contrib.mixed_precision',
'paddle.fluid.contrib.mixed_precision.bf16',
'paddle.fluid.contrib.layers',
'paddle.fluid.contrib.sparsity',
'paddle.fluid.transpiler',
'paddle.fluid.transpiler.details',
'paddle.fluid.incubate',
......@@ -404,7 +403,6 @@ packages=['paddle',
'paddle.static',
'paddle.static.nn',
'paddle.static.amp',
'paddle.static.sparsity',
'paddle.tensor',
'paddle.onnx',
'paddle.autograd',
......
......@@ -1207,7 +1207,6 @@ def get_setup_parameters():
'paddle.fluid.contrib.mixed_precision',
'paddle.fluid.contrib.mixed_precision.bf16',
'paddle.fluid.contrib.layers',
'paddle.fluid.contrib.sparsity',
'paddle.fluid.transpiler',
'paddle.fluid.transpiler.details',
'paddle.fluid.incubate',
......@@ -1265,7 +1264,6 @@ def get_setup_parameters():
'paddle.static',
'paddle.static.nn',
'paddle.static.amp',
'paddle.static.sparsity',
'paddle.tensor',
'paddle.onnx',
'paddle.autograd',
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册