pruner.py 7.9 KB
Newer Older
W
wanghaoshuang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import logging
16
import sys
W
wanghaoshuang 已提交
17
import numpy as np
18
from functools import reduce
W
wanghaoshuang 已提交
19
import paddle.fluid as fluid
20 21
import copy
from ..core import VarWrapper, OpWrapper, GraphWrapper
22 23 24
from .group_param import collect_convs
from .criterion import CRITERION
from .idx_selector import IDX_SELECTOR
25
from ..common import get_logger
W
wanghaoshuang 已提交
26

27
__all__ = ["Pruner"]
W
wanghaoshuang 已提交
28

29 30
_logger = get_logger(__name__, level=logging.INFO)

W
wanghaoshuang 已提交
31 32

class Pruner():
33 34 35
    """The pruner used to prune channels of convolution.

    Args:
36 37
        criterion(str|function): the criterion used to sort channels for pruning.
        idx_selector(str|function): 
38 39 40

    """

41 42 43 44 45 46 47 48 49 50 51 52 53
    def __init__(self,
                 criterion="l1_norm",
                 idx_selector="default_idx_selector"):
        if isinstance(criterion, str):
            self.criterion = CRITERION.get(criterion)
        else:
            self.criterion = criterion
        if isinstance(idx_selector, str):
            self.idx_selector = IDX_SELECTOR.get(idx_selector)
        else:
            self.idx_selector = idx_selector

        self.pruned_weights = False
W
wanghaoshuang 已提交
54 55 56 57 58 59 60 61 62

    def prune(self,
              program,
              scope,
              params,
              ratios,
              place=None,
              lazy=False,
              only_graph=False,
W
wanghaoshuang 已提交
63 64
              param_backup=False,
              param_shape_backup=False):
65 66
        """Pruning the given parameters.

W
wanghaoshuang 已提交
67
        Args:
68

W
wanghaoshuang 已提交
69 70 71 72 73 74 75 76 77
            program(fluid.Program): The program to be pruned.
            scope(fluid.Scope): The scope storing paramaters to be pruned.
            params(list<str>): A list of parameter names to be pruned.
            ratios(list<float>): A list of ratios to be used to pruning parameters.
            place(fluid.Place): The device place of filter parameters. Defalut: None.
            lazy(bool): True means setting the pruned elements to zero.
                        False means cutting down the pruned elements. Default: False.
            only_graph(bool): True means only modifying the graph.
                              False means modifying graph and variables in scope. Default: False.
W
wanghaoshuang 已提交
78 79
            param_backup(bool): Whether to return a dict to backup the values of parameters. Default: False.
            param_shape_backup(bool): Whether to return a dict to backup the shapes of parameters. Default: False.
80

W
wanghaoshuang 已提交
81
        Returns:
82
            tuple: ``(pruned_program, param_backup, param_shape_backup)``. ``pruned_program`` is the pruned program. ``param_backup`` is a dict to backup the values of parameters. ``param_shape_backup`` is a dict to backup the shapes of parameters.
W
wanghaoshuang 已提交
83 84 85 86
        """

        self.pruned_list = []
        graph = GraphWrapper(program.clone())
W
wanghaoshuang 已提交
87 88
        param_backup = {} if param_backup else None
        param_shape_backup = {} if param_shape_backup else None
W
wanghaoshuang 已提交
89

W
wanghaoshuang 已提交
90
        visited = {}
W
whs 已提交
91
        pruned_params = []
W
wanghaoshuang 已提交
92
        for param, ratio in zip(params, ratios):
W
whs 已提交
93
            _logger.info("pruning: {}".format(param))
W
whs 已提交
94 95 96 97 98
            if graph.var(param) is None:
                _logger.warn(
                    "Variable[{}] to be pruned is not in current graph.".
                    format(param))
                continue
W
whs 已提交
99 100
            group = collect_convs([param], graph,
                                  visited)[0]  # [(name, axis, pruned_idx)]
W
whs 已提交
101 102
            if group is None or len(group) == 0:
                continue
103 104
            if only_graph and self.idx_selector.__name__ == "default_idx_selector":

W
whs 已提交
105 106
                param_v = graph.var(param)
                pruned_num = int(round(param_v.shape()[0] * ratio))
107
                pruned_idx = [0] * pruned_num
W
whs 已提交
108
                for name, axis, _ in group:
109 110
                    pruned_params.append((name, axis, pruned_idx))

W
whs 已提交
111
            else:
112 113 114
                assert ((not self.pruned_weights),
                        "The weights have been pruned once.")
                group_values = []
W
whs 已提交
115
                for name, axis, pruned_idx in group:
116
                    values = np.array(scope.find_var(name).get_tensor())
W
whs 已提交
117
                    group_values.append((name, values, axis, pruned_idx))
118

W
whs 已提交
119 120
                scores = self.criterion(
                    group_values, graph)  # [(name, axis, score, pruned_idx)]
121

122
                pruned_params.extend(self.idx_selector(scores, ratio))
W
whs 已提交
123 124 125

        merge_pruned_params = {}
        for param, pruned_axis, pruned_idx in pruned_params:
126 127 128 129 130
            if param not in merge_pruned_params:
                merge_pruned_params[param] = {}
            if pruned_axis not in merge_pruned_params[param]:
                merge_pruned_params[param][pruned_axis] = []
            merge_pruned_params[param][pruned_axis].append(pruned_idx)
W
whs 已提交
131 132 133

        for param_name in merge_pruned_params:
            for pruned_axis in merge_pruned_params[param_name]:
W
whs 已提交
134 135
                pruned_idx = np.concatenate(merge_pruned_params[param_name][
                    pruned_axis])
W
whs 已提交
136
                param = graph.var(param_name)
W
whs 已提交
137
                if not lazy:
138 139 140
                    _logger.debug("{}\t{}\t{}\t{}".format(
                        param.name(), pruned_axis,
                        param.shape()[pruned_axis], len(pruned_idx)))
W
whs 已提交
141 142 143 144 145 146
                    if param_shape_backup is not None:
                        origin_shape = copy.deepcopy(param.shape())
                        param_shape_backup[param.name()] = origin_shape
                    new_shape = list(param.shape())
                    new_shape[pruned_axis] -= len(pruned_idx)
                    param.set_shape(new_shape)
W
whs 已提交
147 148
                if not only_graph:
                    param_t = scope.find_var(param.name()).get_tensor()
W
whs 已提交
149 150 151 152
                    if param_backup is not None and (
                            param.name() not in param_backup):
                        param_backup[param.name()] = copy.deepcopy(
                            np.array(param_t))
W
whs 已提交
153 154 155 156 157 158 159
                    try:
                        pruned_param = self._prune_tensor(
                            np.array(param_t),
                            pruned_idx,
                            pruned_axis=pruned_axis,
                            lazy=lazy)
                    except IndexError as e:
W
whs 已提交
160 161 162
                        _logger.error("Pruning {}, but get [{}]".format(
                            param.name(), e))

W
whs 已提交
163
                    param_t.set(pruned_param, place)
W
whs 已提交
164
        graph.update_groups_of_conv()
165
        graph.infer_shape()
166
        self.pruned_weights = (not only_graph)
W
whs 已提交
167
        return graph.program, param_backup, param_shape_backup
W
wanghaoshuang 已提交
168 169 170 171

    def _prune_tensor(self, tensor, pruned_idx, pruned_axis, lazy=False):
        """
        Pruning a array by indexes on given axis.
172

W
wanghaoshuang 已提交
173 174 175 176 177 178 179
        Args:
            tensor(numpy.array): The target array to be pruned.
            pruned_idx(list<int>): The indexes to be pruned.
            pruned_axis(int): The axis of given array to be pruned on. 
            lazy(bool): True means setting the pruned elements to zero.
                        False means remove the pruned elements from memory.
                        default: False.
180

W
wanghaoshuang 已提交
181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
        Returns:
            numpy.array: The pruned array.
        """
        mask = np.zeros(tensor.shape[pruned_axis], dtype=bool)
        mask[pruned_idx] = True

        def func(data):
            return data[~mask]

        def lazy_func(data):
            data[mask] = 0
            return data

        if lazy:
            return np.apply_along_axis(lazy_func, pruned_axis, tensor)
        else:
            return np.apply_along_axis(func, pruned_axis, tensor)