From 576c60343d60db170f6bf63bfe1a5b1b34c8b6b8 Mon Sep 17 00:00:00 2001 From: ceci3 <592712189@qq.com> Date: Mon, 4 Nov 2019 07:30:47 +0000 Subject: [PATCH] update mobilenet space and add register --- paddleslim/nas/SearchSpaceFactory.py | 36 ---------- paddleslim/nas/searchspace/__init__.py | 15 +++++ .../nas/{ => searchspace}/base_layer.py | 0 .../mobilenetv2_space.py} | 66 +++++++++++-------- paddleslim/nas/searchspace/registry.py | 5 ++ .../{SearchSpace.py => searchspacebase.py} | 4 +- paddleslim/nas/searchspacefactory.py | 36 ++++++++++ paddleslim/nas/test_searchspace.py | 36 ++++++++++ paddleslim/nas/utils/__init__.py | 13 ++++ paddleslim/nas/utils/registry.py | 31 +++++++++ 10 files changed, 178 insertions(+), 64 deletions(-) delete mode 100644 paddleslim/nas/SearchSpaceFactory.py create mode 100644 paddleslim/nas/searchspace/__init__.py rename paddleslim/nas/{ => searchspace}/base_layer.py (100%) rename paddleslim/nas/{MobileNetV2Space.py => searchspace/mobilenetv2_space.py} (80%) create mode 100644 paddleslim/nas/searchspace/registry.py rename paddleslim/nas/{SearchSpace.py => searchspacebase.py} (95%) create mode 100644 paddleslim/nas/searchspacefactory.py create mode 100644 paddleslim/nas/test_searchspace.py create mode 100644 paddleslim/nas/utils/__init__.py create mode 100644 paddleslim/nas/utils/registry.py diff --git a/paddleslim/nas/SearchSpaceFactory.py b/paddleslim/nas/SearchSpaceFactory.py deleted file mode 100644 index e8e5ce61..00000000 --- a/paddleslim/nas/SearchSpaceFactory.py +++ /dev/null @@ -1,36 +0,0 @@ -from MobileNetV2Space import MobileNetV2Space - -class SearchSpaceFactory(object): - def __init__(self): - pass - - def get_search_space(self, key, config): - """ - Args: - key(str): model name - config(dict): basic config information. - """ - if key == 'MobileNetV2': - space = MobileNetV2Space(config['input_size'], config['output_size'], config['block_num']) - - return space - - -import paddle.fluid as fluid -if __name__ == '__main__': - config = {'input_size': 224, 'output_size': 7, 'block_num': 5} - space = SearchSpaceFactory() - - my_space = space.get_search_space('MobileNetV2', config) - model_arch = my_space.token2arch() - - train_prog = fluid.Program() - startup_prog = fluid.Program() - with fluid.program_guard(train_prog, startup_prog): - model_input = fluid.layers.data(name='model_in', shape=[1, 3, 224, 224], dtype='float32', append_batch_size=False) - predict = model_arch(model_input) - print('output shape', predict.shape) - - - #for op in train_prog.global_block().ops: - # print(op.type) diff --git a/paddleslim/nas/searchspace/__init__.py b/paddleslim/nas/searchspace/__init__.py new file mode 100644 index 00000000..d1b5c527 --- /dev/null +++ b/paddleslim/nas/searchspace/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License" +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .mobilenetv2_space import MobileNetV2Space diff --git a/paddleslim/nas/base_layer.py b/paddleslim/nas/searchspace/base_layer.py similarity index 100% rename from paddleslim/nas/base_layer.py rename to paddleslim/nas/searchspace/base_layer.py diff --git a/paddleslim/nas/MobileNetV2Space.py b/paddleslim/nas/searchspace/mobilenetv2_space.py similarity index 80% rename from paddleslim/nas/MobileNetV2Space.py rename to paddleslim/nas/searchspace/mobilenetv2_space.py index 20b52a8b..e09e00a6 100644 --- a/paddleslim/nas/MobileNetV2Space.py +++ b/paddleslim/nas/searchspace/mobilenetv2_space.py @@ -16,14 +16,18 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +import sys +sys.path.append('..') import numpy as np import paddle.fluid as fluid from paddle.fluid.param_attr import ParamAttr -from SearchSpace import SearchSpace -from base_layer import conv_bn_layer +from searchspacebase import SearchSpaceBase +from .base_layer import conv_bn_layer +from .registry import SEARCHSPACE -class MobileNetV2Space(SearchSpace): - def __init__(self, input_size, output_size, block_num, scale=1.0): +@SEARCHSPACE.register_module +class MobileNetV2Space(SearchSpaceBase): + def __init__(self, input_size, output_size, block_num, scale=1.0, class_dim=1000): super(MobileNetV2Space, self).__init__(input_size, output_size, block_num) self.head_num = np.array([3,4,8,12,16,24,32]) #7 self.filter_num1 = np.array([3,4,8,12,16,24,32,48]) #8 @@ -36,10 +40,11 @@ class MobileNetV2Space(SearchSpace): self.multiply = np.array([1,2,3,4,6]) #5 self.repeat = np.array([1,2,3,4,5,6]) #6 self.scale=scale + self.class_dim=class_dim def init_tokens(self): """ - initial tokens. The fist tokens to controller. + The initial token send to controller. The first one is the index of the first layers' channel in self.head_num, each line in the following represent the index of the [expansion_factor, filter_num, repeat_num, kernel_size] """ @@ -74,7 +79,7 @@ class MobileNetV2Space(SearchSpace): if tokens is None: tokens = self.init_tokens() - bottleneck_params_list = [ + base_bottleneck_params_list = [ (1, self.head_num[tokens[0]], 1, 1, 3), (self.multiply[tokens[1]], self.filter_num1[tokens[2]], self.repeat[tokens[3]], 2, self.k_size[tokens[4]]), (self.multiply[tokens[5]], self.filter_num1[tokens[6]], self.repeat[tokens[7]], 2, self.k_size[tokens[8]]), @@ -85,10 +90,24 @@ class MobileNetV2Space(SearchSpace): (self.multiply[tokens[25]], self.filter_num6[tokens[26]], self.repeat[tokens[27]], 1, self.k_size[tokens[28]]), ] - bottleneck_params_list = bottleneck_params_list[:self.block_num] + assert self.block_num < 7, 'block number must less than 7, but receive block number is {}'.format(self.block_num) + + # the stride = 2 means downsample feature map in the convolution, so only when stride=2, block_num minus 1, + # otherwise, add layers to params_list directly. + bottleneck_params_list = [] + for param_list in base_bottleneck_params_list: + if param_list[3] == 1: + bottleneck_params_list.append(param_list) + else: + if self.block_num > 1: + bottleneck_params_list.append(param_list) + self.block_num -= 1 + else: + break def net_arch(input): #conv1 + # all padding is 'SAME' in the conv2d, can compute the actual padding automatic. input = conv_bn_layer( input, num_filters=int(32 * self.scale), @@ -114,23 +133,18 @@ class MobileNetV2Space(SearchSpace): k=k, name='conv' + str(i)) in_c = int(c * self.scale) - ##last_conv - #input = conv_bn_layer( - # input=input, - # num_filters=int(1280 * self.scale) if self.scale > 1.0 else 1280, - # filter_size=1, - # stride=1, - # padding='SAME', - # act='relu6', - # name='conv9') - - #input = fluid.layers.pool2d( - # input=input, pool_type='avg', global_pooling=True) - #output = fluid.layers.fc(input=input, - # size=class_dim, - # param_attr=ParamAttr(name='fc10_weights'), - # bias_attr=ParamAttr(name='fc10_offset')) + # if output_size is 1, add fc layer in the end + if self.output_size == 1: + input = fluid.layers.fc(input=input, + size=self.class_dim, + param_attr=ParamAttr(name='fc10_weights'), + bias_attr=ParamAttr(name='fc10_offset')) + else: + assert self.output_size == input.shape[2], \ + ("output_size must EQUAL to input_size / (2^block_num)." + "But receive input_size={}, output_size={}, block_num={}".format( + self.input_size, self.output_size, self.block_num)) return input @@ -140,8 +154,8 @@ class MobileNetV2Space(SearchSpace): def shortcut(self, input, data_residual): """Build shortcut layer. Args: - input: Variable, input. - data_residual: Variable, residual layer. + input(Variable): input. + data_residual(Variable): residual layer. Returns: Variable, layer output. """ @@ -166,7 +180,7 @@ class MobileNetV2Space(SearchSpace): ifshortcut(bool), whether using shortcut. stride(int), stride. filter_size(int), filter size. - padding(str, 'SAME'|'VAILD'), padding. + padding(str|int|list), padding. expansion_factor(float), expansion factor. name(str), name. Returns: diff --git a/paddleslim/nas/searchspace/registry.py b/paddleslim/nas/searchspace/registry.py new file mode 100644 index 00000000..33fb7212 --- /dev/null +++ b/paddleslim/nas/searchspace/registry.py @@ -0,0 +1,5 @@ +import sys +sys.path.append('..') +from utils.registry import Registry + +SEARCHSPACE = Registry('searchspace') diff --git a/paddleslim/nas/SearchSpace.py b/paddleslim/nas/searchspacebase.py similarity index 95% rename from paddleslim/nas/SearchSpace.py rename to paddleslim/nas/searchspacebase.py index 322c6f01..cc1d462a 100644 --- a/paddleslim/nas/SearchSpace.py +++ b/paddleslim/nas/searchspacebase.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -__all__ = ['SearchSpace'] +__all__ = ['SearchSpaceBase'] -class SearchSpace(object): +class SearchSpaceBase(object): """Controller for Neural Architecture Search. """ diff --git a/paddleslim/nas/searchspacefactory.py b/paddleslim/nas/searchspacefactory.py new file mode 100644 index 00000000..10d076e8 --- /dev/null +++ b/paddleslim/nas/searchspacefactory.py @@ -0,0 +1,36 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License" +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from searchspace.registry import SEARCHSPACE + +class SearchSpaceFactory(object): + def __init__(self): + pass + + def get_search_space(self, key, config): + """ + get specific model space based on key and config. + + Args: + key(str): model space name. + config(dict): basic config information. + return: + model space(class) + """ + cls = SEARCHSPACE.get(key) + space = cls(config['input_size'], config['output_size'], config['block_num']) + + return space + + diff --git a/paddleslim/nas/test_searchspace.py b/paddleslim/nas/test_searchspace.py new file mode 100644 index 00000000..4761bf36 --- /dev/null +++ b/paddleslim/nas/test_searchspace.py @@ -0,0 +1,36 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License" +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle.fluid as fluid +from searchspacefactory import SearchSpaceFactory +if __name__ == '__main__': + # if output_size is 1, the model will add fc layer in the end. + config = {'input_size': 224, 'output_size': 7, 'block_num': 5} + space = SearchSpaceFactory() + + my_space = space.get_search_space('MobileNetV2Space', config) + model_arch = my_space.token2arch() + + train_prog = fluid.Program() + startup_prog = fluid.Program() + with fluid.program_guard(train_prog, startup_prog): + input_size= config['input_size'] + model_input = fluid.layers.data(name='model_in', shape=[1, 3, input_size, input_size], dtype='float32', append_batch_size=False) + print('input shape', model_input.shape) + predict = model_arch(model_input) + print('output shape', predict.shape) + + + #for op in train_prog.global_block().ops: + # print(op.type) diff --git a/paddleslim/nas/utils/__init__.py b/paddleslim/nas/utils/__init__.py new file mode 100644 index 00000000..9d053150 --- /dev/null +++ b/paddleslim/nas/utils/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License" +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/paddleslim/nas/utils/registry.py b/paddleslim/nas/utils/registry.py new file mode 100644 index 00000000..5d055a9c --- /dev/null +++ b/paddleslim/nas/utils/registry.py @@ -0,0 +1,31 @@ +import inspect + +class Registry(object): + def __init__(self, name): + self._name = name + self._module_dict = dict() + def __repr__(self): + format_str = self.__class__.__name__ + '(name={}, items={})'.format(self._name, list(self._module_dict.keys())) + return format_str + + @property + def name(self): + return self._name + @property + def module_dict(self): + return self._module_dict + + def get(self, key): + return self._module_dict.get(key, None) + + def _register_module(self, module_class): + if not inspect.isclass(module_class): + raise TypeError('module must be a class, but receive {}.'.format(type(module_class))) + module_name = module_class.__name__ + if module_name in self._module_dict: + raise KeyError('{} is already registered in {}.'.format(module_name, self.name)) + self._module_dict[module_name] = module_class + + def register_module(self, cls): + self._register_module(cls) + return cls -- GitLab