Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
80614429
P
Paddle
项目概览
PaddlePaddle
/
Paddle
大约 1 年 前同步成功
通知
2298
Star
20931
Fork
5422
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
80614429
编写于
6月 10, 2021
作者:
M
Ming-Xu Huang
提交者:
GitHub
6月 10, 2021
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Automatic SParsity Helper (#33132)
上级
a2256366
变更
12
展开全部
隐藏空白更改
内联
并排
Showing
12 changed file
with
980 addition
and
52 deletion
+980
-52
python/paddle/fluid/contrib/sparsity/__init__.py
python/paddle/fluid/contrib/sparsity/__init__.py
+18
-3
python/paddle/fluid/contrib/sparsity/asp.py
python/paddle/fluid/contrib/sparsity/asp.py
+497
-0
python/paddle/fluid/contrib/sparsity/utils.py
python/paddle/fluid/contrib/sparsity/utils.py
+40
-45
python/paddle/fluid/tests/unittests/CMakeLists.txt
python/paddle/fluid/tests/unittests/CMakeLists.txt
+2
-0
python/paddle/fluid/tests/unittests/asp/CMakeLists.txt
python/paddle/fluid/tests/unittests/asp/CMakeLists.txt
+6
-0
python/paddle/fluid/tests/unittests/asp/__init__.py
python/paddle/fluid/tests/unittests/asp/__init__.py
+14
-0
python/paddle/fluid/tests/unittests/asp/asp_pruning_base.py
python/paddle/fluid/tests/unittests/asp/asp_pruning_base.py
+89
-0
python/paddle/fluid/tests/unittests/asp/test_asp_optimize.py
python/paddle/fluid/tests/unittests/asp/test_asp_optimize.py
+202
-0
python/paddle/fluid/tests/unittests/asp/test_asp_pruning_1d.py
...n/paddle/fluid/tests/unittests/asp/test_asp_pruning_1d.py
+36
-0
python/paddle/fluid/tests/unittests/asp/test_asp_pruning_2d_best.py
...dle/fluid/tests/unittests/asp/test_asp_pruning_2d_best.py
+36
-0
python/paddle/fluid/tests/unittests/asp/test_asp_pruning_2d_greedy.py
...e/fluid/tests/unittests/asp/test_asp_pruning_2d_greedy.py
+36
-0
python/paddle/fluid/tests/unittests/asp/test_asp_utils.py
python/paddle/fluid/tests/unittests/asp/test_asp_utils.py
+4
-4
未找到文件。
python/paddle/fluid/contrib/sparsity/__init__.py
浏览文件 @
80614429
...
...
@@ -15,7 +15,22 @@
from
__future__
import
print_function
from
.
import
utils
from
.utils
import
*
from
.utils
import
calculate_density
from
.utils
import
check_mask_1d
from
.utils
import
get_mask_1d
from
.utils
import
check_mask_2d
from
.utils
import
get_mask_2d_greedy
from
.utils
import
get_mask_2d_best
from
.utils
import
create_mask
from
.utils
import
check_sparsity
from
.utils
import
MaskAlgo
from
.utils
import
CheckMethod
from
.asp
import
decorate
,
prune_model
from
.asp
import
set_excluded_layers
,
reset_excluded_layers
__all__
=
utils
.
__all__
__all__
=
[
'calculate_density'
,
'check_mask_1d'
,
'get_mask_1d'
,
'check_mask_2d'
,
'get_mask_2d_greedy'
,
'get_mask_2d_best'
,
'create_mask'
,
'check_sparsity'
,
'MaskAlgo'
,
'CheckMethod'
,
'decorate'
,
'prune_model'
,
'set_excluded_layers'
,
'reset_excluded_layers'
]
python/paddle/fluid/contrib/sparsity/asp.py
0 → 100644
浏览文件 @
80614429
此差异已折叠。
点击以展开。
python/paddle/fluid/contrib/sparsity/utils.py
浏览文件 @
80614429
...
...
@@ -27,7 +27,7 @@ from itertools import permutations
import
threading
__all__
=
[
'density'
,
'check_mask_1d'
,
'get_mask_1d'
,
'check_mask_2d'
,
'
calculate_
density'
,
'check_mask_1d'
,
'get_mask_1d'
,
'check_mask_2d'
,
'get_mask_2d_greedy'
,
'get_mask_2d_best'
,
'create_mask'
,
'check_sparsity'
,
'MaskAlgo'
,
'CheckMethod'
]
...
...
@@ -75,7 +75,7 @@ class CheckMethod(Enum):
CheckMethod.get_checking_method(MaskAlgo.MASK_2D_BEST)
# CheckMethod.CHECK_2D
"""
assert
type
(
mask_algo
)
==
MaskAlgo
,
\
assert
isinstance
(
mask_algo
,
MaskAlgo
)
,
\
"mask_algo should be MaskAlgo type"
if
mask_algo
==
MaskAlgo
.
MASK_1D
:
return
CheckMethod
.
CHECK_1D
...
...
@@ -83,7 +83,7 @@ class CheckMethod(Enum):
return
CheckMethod
.
CHECK_2D
def
density
(
x
):
def
calculate_
density
(
x
):
r
"""
Return the density of the input tensor.
...
...
@@ -99,15 +99,15 @@ def density(x):
x = np.array([[0, 1, 3, 0],
[1, 1, 0, 1]])
sparsity.density(x) # 0.625
sparsity.
calculate_
density(x) # 0.625
"""
x_flattened
=
x
.
flatten
()
return
float
(
np
.
nonzero
(
x_flattened
)[
0
].
size
)
/
x_flattened
.
size
def
reshape_1d
(
mat
,
m
):
def
_
reshape_1d
(
mat
,
m
):
r
"""
Reshape the input matrix to shape (-1, m).
Reshape the input
2D
matrix to shape (-1, m).
If the second dimension of :attr:`mat` is not a multiples of :attr:`m`,
then this function would pad the remainder with 0 before reshaping.
...
...
@@ -116,11 +116,13 @@ def reshape_1d(mat, m):
remainder = mat.shape[1] % m
Args:
mat (nparray): The input matrix.
mat (nparray): The input
2D
matrix.
m (int): The second dimension of reshaped matrix.
Returns:
tuple: A pair of the reshaped and padded matrix and the shape of padded matrix (non-reshaping).
"""
assert
len
(
mat
.
shape
)
==
2
,
"The input mat should be a 2D matrix!"
remainder
=
mat
.
shape
[
1
]
%
m
if
mat
.
shape
[
1
]
%
m
>
0
:
mat_padded
=
np
.
zeros
((
mat
.
shape
[
0
],
mat
.
shape
[
1
]
+
(
m
-
remainder
)))
...
...
@@ -165,9 +167,9 @@ def check_mask_1d(mat, n, m):
sparsity.check_mask_1d(x, 2, 4) # True
"""
if
len
(
mat
.
shape
)
<=
1
:
mat_flattern
,
shape
=
reshape_1d
(
mat
.
reshape
(
1
,
mat
.
shape
[
0
]),
m
)
mat_flattern
,
shape
=
_
reshape_1d
(
mat
.
reshape
(
1
,
mat
.
shape
[
0
]),
m
)
else
:
mat_flattern
,
shape
=
reshape_1d
(
mat
,
m
)
mat_flattern
,
shape
=
_
reshape_1d
(
mat
,
m
)
for
sub_mat
in
mat_flattern
:
if
np
.
nonzero
(
sub_mat
)[
0
].
size
>
(
m
-
n
):
...
...
@@ -202,7 +204,7 @@ def get_mask_1d(mat, n, m):
# [0, 1, 0, 1]])
sparsity.check_mask_1d(mask, 2, 4) # True
"""
mat_flattern
,
shape
=
reshape_1d
(
mat
,
m
)
mat_flattern
,
shape
=
_
reshape_1d
(
mat
,
m
)
mask_flattern
=
np
.
ones_like
(
mat_flattern
)
mask
=
np
.
ones_like
(
mat
)
...
...
@@ -215,9 +217,9 @@ def get_mask_1d(mat, n, m):
return
mask
def
reshape_2d
(
mat
,
m
):
def
_
reshape_2d
(
mat
,
m
):
r
"""
Reshape the input matrix to shape (-1, :math:`m \times m`).
Reshape the input
2D
matrix to shape (-1, :math:`m \times m`).
In each dimension of :attr:`mat`, if it is not a multiples of :attr:`m`,
then this function would pad the remainder with 0 before reshaping.
...
...
@@ -227,11 +229,13 @@ def reshape_2d(mat, m):
remainder_1 = mat.shape[1] % m
Args:
mat (nparray): The input matrix.
mat (nparray): The input
2D
matrix.
m (int): The square root of second dimension of reshaped matrix.
Returns:
tuple: A pair of the reshaped and padded matrix and the shape of padded matrix (non-reshaping).
"""
assert
len
(
mat
.
shape
)
==
2
,
"The input mat should be a 2D matrix!"
remainder_0
=
mat
.
shape
[
0
]
%
m
remainder_1
=
mat
.
shape
[
1
]
%
m
...
...
@@ -297,7 +301,7 @@ def check_mask_2d(mat, n, m):
[1, 1, 0, 1]])
sparsity.check_mask_2d(x, 2, 4) # True
"""
mat_padded
,
shape
=
reshape_2d
(
mat
,
m
)
mat_padded
,
shape
=
_
reshape_2d
(
mat
,
m
)
for
sub_mat
in
mat_padded
:
sub_mask
=
np
.
absolute
(
np
.
squeeze
(
sub_mat
.
reshape
(
m
,
m
)))
>
0
if
(
np
.
sum
(
np
.
sum
(
sub_mask
,
axis
=
1
)
>
(
m
-
n
))
!=
0
)
and
\
...
...
@@ -338,7 +342,7 @@ def get_mask_2d_greedy(mat, n, m):
# [0. 1. 1. 0.]])
sparsity.check_mask_2d(mask, 2, 4) # True
"""
mat_padded
,
shape
=
reshape_2d
(
mat
,
m
)
mat_padded
,
shape
=
_
reshape_2d
(
mat
,
m
)
mask_padded
=
np
.
zeros_like
(
mat_padded
).
reshape
(
-
1
,
m
,
m
)
for
idx
in
range
(
len
(
mat_padded
)):
...
...
@@ -372,11 +376,11 @@ def get_mask_2d_greedy(mat, n, m):
return
mask
[:
mat
.
shape
[
0
],
:
mat
.
shape
[
1
]]
valid_2d_patterns_lock
=
threading
.
Lock
()
valid_2d_patterns
=
{}
_
valid_2d_patterns_lock
=
threading
.
Lock
()
_
valid_2d_patterns
=
{}
def
compute_valid_2d_patterns
(
n
,
m
):
def
_
compute_valid_2d_patterns
(
n
,
m
):
r
"""
Compute all vaild 2D `n:m` sparse patterns.
...
...
@@ -389,12 +393,12 @@ def compute_valid_2d_patterns(n, m):
Returns:
dictionary: A dictionary with key: *m_n* (string) and value: all vaild 2D `n:m` sparse patterns.
"""
global
valid_2d_patterns_lock
global
valid_2d_patterns
global
_
valid_2d_patterns_lock
global
_
valid_2d_patterns
valid_key
=
'{}_{}'
.
format
(
m
,
n
)
if
valid_key
in
valid_2d_patterns
:
return
valid_2d_patterns
[
valid_key
]
if
valid_key
in
_
valid_2d_patterns
:
return
_
valid_2d_patterns
[
valid_key
]
else
:
patterns
=
np
.
zeros
(
m
)
patterns
[:
n
]
=
1
...
...
@@ -407,9 +411,9 @@ def compute_valid_2d_patterns(n, m):
valid_patterns
=
np
.
empty
((
valid
.
shape
[
0
],
m
,
m
))
valid_patterns
[:]
=
patterns
[
valid
[:]]
valid_2d_patterns_lock
.
acquire
()
valid_2d_patterns
[
valid_key
]
=
valid_patterns
valid_2d_patterns_lock
.
release
()
_
valid_2d_patterns_lock
.
acquire
()
_
valid_2d_patterns
[
valid_key
]
=
valid_patterns
_
valid_2d_patterns_lock
.
release
()
return
valid_patterns
...
...
@@ -446,9 +450,9 @@ def get_mask_2d_best(mat, n, m):
print("L1 norm of `greedy` sparse matrix", np.multiply(mat, mask_greedy).sum()) # 56
print("L1 norm of `best` sparse matrix", np.multiply(mat, mask_best).sum()) # 61
"""
patterns
=
compute_valid_2d_patterns
(
n
,
m
)
patterns
=
_
compute_valid_2d_patterns
(
n
,
m
)
mat_flattern
,
shape
=
reshape_2d
(
mat
,
m
)
mat_flattern
,
shape
=
_
reshape_2d
(
mat
,
m
)
mask_flattern
=
np
.
ones_like
(
mat_flattern
).
reshape
(
-
1
,
m
,
m
)
pmax
=
np
.
argmax
(
np
.
matmul
(
mat_flattern
,
patterns
.
reshape
(
patterns
.
shape
[
0
],
m
*
m
).
T
),
...
...
@@ -504,30 +508,25 @@ def create_mask(tensor, func_name=MaskAlgo.MASK_1D, n=2, m=4):
dtype
=
tensor
.
dtype
t
=
tensor
.
astype
(
float
)
assert
type
(
func_name
)
==
MaskAlgo
,
\
assert
isinstance
(
func_name
,
MaskAlgo
)
,
\
"func_name argumet of create_mask is only accepted as type MaskAlgo. "
\
"But got {}"
.
format
(
type
(
func_name
))
func
=
getattr
(
sys
.
modules
[
__name__
],
func_name
.
value
,
None
)
if
len
(
shape
)
==
1
:
t
=
t
.
reshape
(
1
,
shape
[
0
])
mask
=
func
(
t
,
n
=
n
,
m
=
m
)
return
mask
.
reshape
(
shape
).
astype
(
dtype
)
elif
len
(
shape
)
==
2
:
t
=
t
.
reshape
(
shape
[
0
],
shape
[
1
])
mask
=
func
(
t
,
n
=
n
,
m
=
m
)
return
mask
.
reshape
(
shape
).
astype
(
dtype
)
elif
len
(
shape
)
==
3
:
t
=
t
.
reshape
(
shape
[
0
]
*
shape
[
1
],
shape
[
2
])
mask
=
func
(
t
,
n
=
n
,
m
=
m
)
return
mask
.
reshape
(
shape
).
astype
(
dtype
)
# 4d-tensor conv (out, in, h, w) -> (out, in*h*w) in GemmConvKernel Op
elif
len
(
shape
)
==
4
:
t
=
t
.
reshape
(
shape
[
0
],
shape
[
1
]
*
shape
[
2
]
*
shape
[
3
])
mask
=
func
(
t
,
n
=
n
,
m
=
m
)
return
mask
.
reshape
(
shape
).
astype
(
dtype
)
else
:
assert
True
,
"The dimension of input tensor is not supported in create_mask, "
\
"Only dimension < 4 is supported but got {}"
.
format
(
len
(
shape
))
raise
ValueError
(
"The dimension of input tensor is not supported in create_mask, "
\
"Only dimension < 4 is supported but got {}"
.
format
(
len
(
shape
)))
mask
=
func
(
t
,
n
=
n
,
m
=
m
)
return
mask
.
reshape
(
shape
).
astype
(
dtype
)
def
check_sparsity
(
tensor
,
func_name
=
CheckMethod
.
CHECK_1D
,
n
=
2
,
m
=
4
):
...
...
@@ -569,19 +568,15 @@ def check_sparsity(tensor, func_name=CheckMethod.CHECK_1D, n=2, m=4):
func
=
getattr
(
sys
.
modules
[
__name__
],
func_name
.
value
,
None
)
if
len
(
shape
)
==
1
:
t
=
t
.
reshape
(
1
,
shape
[
0
])
return
func
(
t
,
n
=
n
,
m
=
m
)
elif
len
(
shape
)
==
2
:
t
=
t
.
reshape
(
shape
[
0
],
shape
[
1
])
return
func
(
t
,
n
=
n
,
m
=
m
)
elif
len
(
shape
)
==
3
:
t
=
t
.
reshape
(
shape
[
0
]
*
shape
[
1
],
shape
[
2
])
return
func
(
t
,
n
=
n
,
m
=
m
)
# 4d-tensor conv (out, in, h, w) -> (out, in*h*w) in GemmConvKernel Op
elif
len
(
shape
)
==
4
:
t
=
t
.
reshape
(
shape
[
0
],
shape
[
1
]
*
shape
[
2
]
*
shape
[
3
])
return
func
(
t
,
n
=
n
,
m
=
m
)
else
:
assert
True
,
"The dimension of input tensor is not supported in check_sparsity
, "
\
"Only dimension < 4 is supported but got {}"
.
format
(
len
(
shape
))
raise
ValueError
(
"The dimension of input tensor is not supported in create_mask
, "
\
"Only dimension < 4 is supported but got {}"
.
format
(
len
(
shape
)
))
return
False
return
func
(
t
,
n
=
n
,
m
=
m
)
python/paddle/fluid/tests/unittests/CMakeLists.txt
浏览文件 @
80614429
...
...
@@ -661,6 +661,8 @@ if (WITH_MKLDNN)
add_subdirectory
(
mkldnn
)
endif
()
add_subdirectory
(
asp
)
add_subdirectory
(
ir
)
if
(
WITH_TESTING
)
...
...
python/paddle/fluid/tests/unittests/asp/CMakeLists.txt
0 → 100644
浏览文件 @
80614429
file
(
GLOB TEST_OPS RELATIVE
"
${
CMAKE_CURRENT_SOURCE_DIR
}
"
"test_*.py"
)
string
(
REPLACE
".py"
""
TEST_OPS
"
${
TEST_OPS
}
"
)
foreach
(
TEST_OP
${
TEST_OPS
}
)
py_test_modules
(
${
TEST_OP
}
MODULES
${
TEST_OP
}
)
endforeach
(
TEST_OP
)
python/paddle/fluid/tests/unittests/asp/__init__.py
0 → 100644
浏览文件 @
80614429
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2021 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
python/paddle/fluid/tests/unittests/asp/asp_pruning_base.py
0 → 100644
浏览文件 @
80614429
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2021 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
__future__
import
print_function
import
unittest
import
threading
,
time
import
paddle
import
paddle.fluid
as
fluid
import
paddle.fluid.core
as
core
from
paddle.fluid.contrib
import
sparsity
from
paddle.fluid.contrib.sparsity.asp
import
ASPHelper
import
numpy
as
np
paddle
.
enable_static
()
class
TestASPHelperPruningBase
(
unittest
.
TestCase
):
def
setUp
(
self
):
self
.
main_program
=
fluid
.
Program
()
self
.
startup_program
=
fluid
.
Program
()
def
build_model
():
img
=
fluid
.
data
(
name
=
'img'
,
shape
=
[
None
,
3
,
32
,
32
],
dtype
=
'float32'
)
label
=
fluid
.
data
(
name
=
'label'
,
shape
=
[
None
,
1
],
dtype
=
'int64'
)
hidden
=
fluid
.
layers
.
conv2d
(
input
=
img
,
num_filters
=
4
,
filter_size
=
3
,
padding
=
2
,
act
=
"relu"
)
hidden
=
fluid
.
layers
.
fc
(
input
=
hidden
,
size
=
32
,
act
=
'relu'
)
prediction
=
fluid
.
layers
.
fc
(
input
=
hidden
,
size
=
10
,
act
=
'softmax'
)
return
img
,
label
,
prediction
with
fluid
.
program_guard
(
self
.
main_program
,
self
.
startup_program
):
self
.
img
,
self
.
label
,
self
.
predict
=
build_model
()
def
run_inference_pruning_test
(
self
,
get_mask_gen_func
,
get_mask_check_func
):
place
=
paddle
.
CPUPlace
()
if
core
.
is_compiled_with_cuda
():
place
=
paddle
.
CUDAPlace
(
0
)
exe
=
fluid
.
Executor
(
place
)
self
.
__pruning_and_checking
(
exe
,
place
,
get_mask_gen_func
,
get_mask_check_func
,
False
)
def
run_training_pruning_test
(
self
,
get_mask_gen_func
,
get_mask_check_func
):
with
fluid
.
program_guard
(
self
.
main_program
,
self
.
startup_program
):
loss
=
fluid
.
layers
.
mean
(
fluid
.
layers
.
cross_entropy
(
input
=
self
.
predict
,
label
=
self
.
label
))
optimizer
=
sparsity
.
decorate
(
fluid
.
optimizer
.
SGD
(
learning_rate
=
0.01
))
optimizer
.
minimize
(
loss
,
self
.
startup_program
)
place
=
paddle
.
CPUPlace
()
if
core
.
is_compiled_with_cuda
():
place
=
paddle
.
CUDAPlace
(
0
)
exe
=
fluid
.
Executor
(
place
)
self
.
__pruning_and_checking
(
exe
,
place
,
get_mask_gen_func
,
get_mask_check_func
,
True
)
def
__pruning_and_checking
(
self
,
exe
,
place
,
mask_func_name
,
check_func_name
,
with_mask
):
exe
.
run
(
self
.
startup_program
)
sparsity
.
prune_model
(
place
,
self
.
main_program
,
func_name
=
mask_func_name
,
with_mask
=
with_mask
)
for
param
in
self
.
main_program
.
global_block
().
all_parameters
():
if
ASPHelper
.
_is_supported_layer
(
self
.
main_program
,
param
.
name
):
mat
=
np
.
array
(
fluid
.
global_scope
().
find_var
(
param
.
name
)
.
get_tensor
())
self
.
assertTrue
(
sparsity
.
check_sparsity
(
mat
.
T
,
func_name
=
check_func_name
,
n
=
2
,
m
=
4
))
python/paddle/fluid/tests/unittests/asp/test_asp_optimize.py
0 → 100644
浏览文件 @
80614429
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2021 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
__future__
import
print_function
import
unittest
import
threading
,
time
import
paddle
import
paddle.fluid
as
fluid
import
paddle.fluid.core
as
core
from
paddle.fluid.contrib
import
sparsity
from
paddle.fluid.contrib.sparsity.asp
import
ASPHelper
import
numpy
as
np
paddle
.
enable_static
()
class
TestASPHelper
(
unittest
.
TestCase
):
def
setUp
(
self
):
self
.
main_program
=
fluid
.
Program
()
self
.
startup_program
=
fluid
.
Program
()
def
build_model
():
img
=
fluid
.
data
(
name
=
'img'
,
shape
=
[
None
,
3
,
32
,
32
],
dtype
=
'float32'
)
label
=
fluid
.
data
(
name
=
'label'
,
shape
=
[
None
,
1
],
dtype
=
'int64'
)
hidden
=
fluid
.
layers
.
conv2d
(
input
=
img
,
num_filters
=
4
,
filter_size
=
3
,
padding
=
2
,
act
=
"relu"
)
hidden
=
fluid
.
layers
.
fc
(
input
=
hidden
,
size
=
32
,
act
=
'relu'
)
prediction
=
fluid
.
layers
.
fc
(
input
=
hidden
,
size
=
10
,
act
=
'softmax'
)
return
img
,
label
,
prediction
with
fluid
.
program_guard
(
self
.
main_program
,
self
.
startup_program
):
self
.
img
,
self
.
label
,
predict
=
build_model
()
self
.
loss
=
fluid
.
layers
.
mean
(
fluid
.
layers
.
cross_entropy
(
input
=
predict
,
label
=
self
.
label
))
self
.
optimizer
=
fluid
.
optimizer
.
SGD
(
learning_rate
=
0.01
)
def
test_get_not_ASP_relevant_vars
(
self
):
def
check_params
(
params
,
params_from_asp
):
if
len
(
params_from_asp
)
!=
len
(
params
):
return
False
for
i
,
p
in
enumerate
(
params_from_asp
):
if
p
.
name
!=
params
[
i
].
name
:
return
False
return
True
params
=
self
.
main_program
.
global_block
().
all_parameters
()
params_from_asp
=
ASPHelper
.
_get_not_ASP_relevant_vars
(
self
.
main_program
)
self
.
assertTrue
(
check_params
(
params
,
params_from_asp
))
with
fluid
.
program_guard
(
self
.
main_program
,
self
.
startup_program
):
ASPHelper
.
_minimize
(
self
.
optimizer
,
self
.
loss
,
self
.
main_program
,
self
.
startup_program
)
params_from_asp_after_opt
=
ASPHelper
.
_get_not_ASP_relevant_vars
(
self
.
main_program
)
self
.
assertTrue
(
check_params
(
params
,
params_from_asp_after_opt
))
def
test_is_supported_layers
(
self
):
program
=
paddle
.
static
.
default_main_program
()
names
=
[
'embedding_0.w_0'
,
'fack_layer_0.w_0'
,
'conv2d_0.w_0'
,
'conv2d_0.b_0'
,
'conv2d_1.w_0'
,
'conv2d_1.b_0'
,
'fc_0.w_0'
,
'fc_0.b_0'
,
'fc_1.w_0'
,
'fc_1.b_0'
,
'linear_2.w_0'
,
'linear_2.b_0'
]
ref
=
[
False
,
False
,
True
,
False
,
True
,
False
,
True
,
False
,
True
,
False
,
True
,
False
]
for
i
,
name
in
enumerate
(
names
):
self
.
assertTrue
(
ref
[
i
]
==
ASPHelper
.
_is_supported_layer
(
program
,
name
))
sparsity
.
set_excluded_layers
(
program
,
[
'fc_1'
,
'conv2d_0'
])
ref
=
[
False
,
False
,
False
,
False
,
True
,
False
,
True
,
False
,
False
,
False
,
True
,
False
]
for
i
,
name
in
enumerate
(
names
):
self
.
assertTrue
(
ref
[
i
]
==
ASPHelper
.
_is_supported_layer
(
program
,
name
))
sparsity
.
reset_excluded_layers
(
program
)
ref
=
[
False
,
False
,
True
,
False
,
True
,
False
,
True
,
False
,
True
,
False
,
True
,
False
]
for
i
,
name
in
enumerate
(
names
):
self
.
assertTrue
(
ref
[
i
]
==
ASPHelper
.
_is_supported_layer
(
program
,
name
))
def
test_decorate
(
self
):
param_names
=
self
.
__get_param_names
(
self
.
main_program
.
global_block
()
.
all_parameters
())
with
fluid
.
program_guard
(
self
.
main_program
,
self
.
startup_program
):
self
.
optimizer
=
sparsity
.
decorate
(
self
.
optimizer
)
self
.
optimizer
.
minimize
(
self
.
loss
,
self
.
startup_program
)
param_names_after_minimize
=
self
.
__get_param_names
(
self
.
main_program
.
global_block
().
all_parameters
())
self
.
__check_mask_variables_and_ops
(
param_names
,
param_names_after_minimize
)
def
test_asp_training
(
self
):
with
fluid
.
program_guard
(
self
.
main_program
,
self
.
startup_program
):
self
.
optimizer
=
sparsity
.
decorate
(
self
.
optimizer
)
self
.
optimizer
.
minimize
(
self
.
loss
,
self
.
startup_program
)
place
=
paddle
.
CPUPlace
()
if
core
.
is_compiled_with_cuda
():
place
=
paddle
.
CUDAPlace
(
0
)
exe
=
fluid
.
Executor
(
place
)
feeder
=
fluid
.
DataFeeder
(
feed_list
=
[
self
.
img
,
self
.
label
],
place
=
place
)
exe
.
run
(
self
.
startup_program
)
sparsity
.
prune_model
(
place
,
self
.
main_program
)
data
=
(
np
.
random
.
randn
(
64
,
3
,
32
,
32
),
np
.
random
.
randint
(
10
,
size
=
(
64
,
1
)))
exe
.
run
(
self
.
main_program
,
feed
=
feeder
.
feed
([
data
]))
for
param
in
self
.
main_program
.
global_block
().
all_parameters
():
if
ASPHelper
.
_is_supported_layer
(
self
.
main_program
,
param
.
name
):
mat
=
np
.
array
(
fluid
.
global_scope
().
find_var
(
param
.
name
)
.
get_tensor
())
self
.
assertTrue
(
sparsity
.
check_sparsity
(
mat
.
T
,
n
=
2
,
m
=
4
))
def
test_asp_training_with_amp
(
self
):
if
core
.
is_compiled_with_cuda
():
place
=
paddle
.
CUDAPlace
(
0
)
with
fluid
.
program_guard
(
self
.
main_program
,
self
.
startup_program
):
self
.
optimizer
=
fluid
.
contrib
.
mixed_precision
.
decorator
.
decorate
(
self
.
optimizer
)
self
.
optimizer
=
sparsity
.
decorate
(
self
.
optimizer
)
self
.
optimizer
.
minimize
(
self
.
loss
,
self
.
startup_program
)
exe
=
fluid
.
Executor
(
place
)
feeder
=
fluid
.
DataFeeder
(
feed_list
=
[
self
.
img
,
self
.
label
],
place
=
place
)
exe
.
run
(
self
.
startup_program
)
sparsity
.
prune_model
(
place
,
self
.
main_program
)
data
=
(
np
.
random
.
randn
(
64
,
3
,
32
,
32
),
np
.
random
.
randint
(
10
,
size
=
(
64
,
1
)))
exe
.
run
(
self
.
main_program
,
feed
=
feeder
.
feed
([
data
]))
for
param
in
self
.
main_program
.
global_block
().
all_parameters
():
if
ASPHelper
.
_is_supported_layer
(
self
.
main_program
,
param
.
name
):
mat
=
np
.
array
(
fluid
.
global_scope
().
find_var
(
param
.
name
)
.
get_tensor
())
self
.
assertTrue
(
sparsity
.
check_sparsity
(
mat
.
T
,
n
=
2
,
m
=
4
))
def
__get_param_names
(
self
,
params
):
param_names
=
[]
for
p
in
params
:
param_names
.
append
(
p
.
name
)
return
param_names
def
__check_mask_variables_and_ops
(
self
,
param_names
,
param_names_after_minimize
):
for
n
in
param_names
:
self
.
assertFalse
(
ASPHelper
.
_is_supported_layer
(
self
.
main_program
,
n
)
and
\
ASPHelper
.
_get_mask_name
(
n
)
not
in
param_names_after_minimize
)
mask_names
=
[]
for
n
in
param_names
:
if
ASPHelper
.
_is_supported_layer
(
self
.
main_program
,
n
):
mask_names
.
append
(
ASPHelper
.
_get_mask_name
(
n
))
masking_ops
=
[]
for
op
in
self
.
main_program
.
global_block
().
ops
:
if
op
.
type
==
'elementwise_mul'
and
\
op
.
input
(
'Y'
)[
0
]
in
mask_names
:
masking_ops
.
append
(
op
.
input
(
'Y'
)[
0
])
self
.
assertTrue
(
len
(
masking_ops
)
==
len
(
mask_names
))
for
n
in
masking_ops
:
self
.
assertTrue
(
n
in
mask_names
)
for
n
in
mask_names
:
self
.
assertTrue
(
n
in
masking_ops
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/asp/test_asp_pruning_1d.py
0 → 100644
浏览文件 @
80614429
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2021 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
__future__
import
print_function
import
paddle
from
paddle.fluid.contrib
import
sparsity
from
paddle.fluid.tests.unittests.asp.asp_pruning_base
import
TestASPHelperPruningBase
paddle
.
enable_static
()
class
TestASPHelperPruning1D
(
TestASPHelperPruningBase
):
def
test_1D_inference_pruning
(
self
):
self
.
run_inference_pruning_test
(
sparsity
.
MaskAlgo
.
MASK_1D
,
sparsity
.
CheckMethod
.
CHECK_1D
)
def
test_1D_training_pruning
(
self
):
self
.
run_training_pruning_test
(
sparsity
.
MaskAlgo
.
MASK_1D
,
sparsity
.
CheckMethod
.
CHECK_1D
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/asp/test_asp_pruning_2d_best.py
0 → 100644
浏览文件 @
80614429
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2021 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
__future__
import
print_function
import
paddle
from
paddle.fluid.contrib
import
sparsity
from
paddle.fluid.tests.unittests.asp.asp_pruning_base
import
TestASPHelperPruningBase
paddle
.
enable_static
()
class
TestASPHelperPruning2DBest
(
TestASPHelperPruningBase
):
def
test_2D_best_inference_pruning
(
self
):
self
.
run_inference_pruning_test
(
sparsity
.
MaskAlgo
.
MASK_2D_BEST
,
sparsity
.
CheckMethod
.
CHECK_2D
)
def
test_2D_best_training_pruning
(
self
):
self
.
run_training_pruning_test
(
sparsity
.
MaskAlgo
.
MASK_2D_BEST
,
sparsity
.
CheckMethod
.
CHECK_2D
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/asp/test_asp_pruning_2d_greedy.py
0 → 100644
浏览文件 @
80614429
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2021 NVIDIA Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
__future__
import
print_function
import
paddle
from
paddle.fluid.contrib
import
sparsity
from
paddle.fluid.tests.unittests.asp.asp_pruning_base
import
TestASPHelperPruningBase
paddle
.
enable_static
()
class
TestASPHelperPruning2DGreedy
(
TestASPHelperPruningBase
):
def
test_2D_greedy_inference_pruning
(
self
):
self
.
run_inference_pruning_test
(
sparsity
.
MaskAlgo
.
MASK_2D_GREEDY
,
sparsity
.
CheckMethod
.
CHECK_2D
)
def
test_2D_greedy_training_pruning
(
self
):
self
.
run_training_pruning_test
(
sparsity
.
MaskAlgo
.
MASK_2D_GREEDY
,
sparsity
.
CheckMethod
.
CHECK_2D
)
if
__name__
==
'__main__'
:
unittest
.
main
()
python/paddle/fluid/tests/unittests/test_asp_utils.py
→
python/paddle/fluid/tests/unittests/
asp/
test_asp_utils.py
浏览文件 @
80614429
...
...
@@ -39,9 +39,9 @@ class TestASPUtils(unittest.TestCase):
x
=
np
.
array
([[
1.0
,
1.0
,
1.0
,
0.0
,
1.0
],
[
1.0
,
1.0
,
0.0
,
0.0
,
1.0
],
[
1.0
,
0.0
,
0.0
,
0.0
,
1.0
],
[
1.0
,
1.0
,
0.0
,
0.0
,
1.0
],
[
0.0
,
1.0
,
0.0
,
0.0
,
1.0
]])
self
.
assertEqual
(
sparsity
.
density
(
x
),
0.56
)
self
.
assertEqual
(
sparsity
.
calculate_
density
(
x
),
0.56
)
x
[:,
0
]
=
0.0
self
.
assertEqual
(
sparsity
.
density
(
x
),
0.4
)
self
.
assertEqual
(
sparsity
.
calculate_
density
(
x
),
0.4
)
def
test_check_mask_1d
(
self
):
x
=
np
.
array
([[
1.0
,
0.0
,
0.0
,
1.0
,
1.0
],
[
1.0
,
1.0
,
0.0
,
0.0
,
1.0
],
...
...
@@ -114,11 +114,11 @@ class TestASPUtils(unittest.TestCase):
for
_
in
range
(
4
):
computing_thread
=
threading
.
Thread
(
target
=
paddle
.
fluid
.
contrib
.
sparsity
.
utils
.
compute_valid_2d_patterns
,
_
compute_valid_2d_patterns
,
args
=
(
2
,
4
))
computing_thread
.
start
()
time
.
sleep
(
3
)
patterns_map
=
paddle
.
fluid
.
contrib
.
sparsity
.
utils
.
valid_2d_patterns
patterns_map
=
paddle
.
fluid
.
contrib
.
sparsity
.
utils
.
_
valid_2d_patterns
reference_patterns
=
get_reference
()
reference_key
=
'4_2'
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录