module.py 28.1 KB
Newer Older
S
Steffy-zxf 已提交
1
#coding:utf-8
W
wuzewu 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
# Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
W
wuzewu 已提交
19 20 21 22 23 24 25 26 27 28

import os
import time
import sys
import functools
from shutil import copyfile

import paddle
import paddle.fluid as fluid

W
wuzewu 已提交
29 30 31
from paddlehub.common import utils
from paddlehub.common import paddle_helper
from paddlehub.common.logger import logger
S
shenyuhan 已提交
32
from paddlehub.common.lock import lock
W
wuzewu 已提交
33 34
from paddlehub.common.downloader import default_downloader
from paddlehub.module import module_desc_pb2
S
shenyuhan 已提交
35
from paddlehub.common.dir import CONF_HOME
W
wuzewu 已提交
36 37 38 39 40
from paddlehub.module import check_info_pb2
from paddlehub.module.signature import Signature, create_signature
from paddlehub.module.checker import ModuleChecker
from paddlehub.module.manager import default_module_manager
from paddlehub.module.base_processor import BaseProcessor
W
wuzewu 已提交
41
from paddlehub.io.parser import yaml_parser
W
wuzewu 已提交
42
from paddlehub import version
W
wuzewu 已提交
43 44 45

__all__ = ['Module', 'create_module']

Z
Zeyu Chen 已提交
46
# PaddleHub module dir name
W
wuzewu 已提交
47 48 49 50 51
ASSETS_DIRNAME = "assets"
MODEL_DIRNAME = "model"
MODULE_DESC_PBNAME = "module_desc.pb"
PYTHON_DIR = "python"
PROCESSOR_NAME = "processor"
Z
Zeyu Chen 已提交
52
# PaddleHub var prefix
W
wuzewu 已提交
53 54
HUB_VAR_PREFIX = "@HUB_%s@"

W
wuzewu 已提交
55

W
wuzewu 已提交
56 57
def create_module(sign_arr,
                  module_dir,
W
wuzewu 已提交
58
                  processor=None,
W
wuzewu 已提交
59 60
                  assets=None,
                  module_info=None,
W
wuzewu 已提交
61 62
                  exe=None,
                  extra_info=None):
W
wuzewu 已提交
63
    sign_arr = utils.to_list(sign_arr)
W
wuzewu 已提交
64 65 66 67
    module = Module(
        signatures=sign_arr,
        processor=processor,
        assets=assets,
W
wuzewu 已提交
68 69
        module_info=module_info,
        extra_info=extra_info)
W
wuzewu 已提交
70 71 72
    module.serialize_to_path(path=module_dir, exe=exe)


73
class ModuleHelper(object):
W
wuzewu 已提交
74 75 76 77 78 79 80 81 82
    def __init__(self, module_dir):
        self.module_dir = module_dir

    def module_desc_path(self):
        return os.path.join(self.module_dir, MODULE_DESC_PBNAME)

    def model_path(self):
        return os.path.join(self.module_dir, MODEL_DIRNAME)

W
wuzewu 已提交
83 84 85 86 87 88 89 90 91
    def processor_path(self):
        return os.path.join(self.module_dir, PYTHON_DIR)

    def processor_name(self):
        return PROCESSOR_NAME

    def assets_path(self):
        return os.path.join(self.module_dir, ASSETS_DIRNAME)

W
wuzewu 已提交
92

93
class Module(object):
W
wuzewu 已提交
94
    def __init__(self,
W
wuzewu 已提交
95
                 name=None,
W
wuzewu 已提交
96 97 98 99
                 module_dir=None,
                 signatures=None,
                 module_info=None,
                 assets=None,
W
wuzewu 已提交
100
                 processor=None,
B
BinLong 已提交
101 102
                 extra_info=None,
                 version=None):
W
wuzewu 已提交
103 104 105 106 107
        self.desc = module_desc_pb2.ModuleDesc()
        self.program = None
        self.assets = []
        self.helper = None
        self.signatures = {}
W
wuzewu 已提交
108
        self.default_signature = None
W
wuzewu 已提交
109 110
        self.module_info = None
        self.processor = None
W
wuzewu 已提交
111 112 113 114
        self.extra_info = {} if extra_info is None else extra_info
        if not isinstance(self.extra_info, dict):
            raise TypeError(
                "The extra_info should be an instance of python dict")
W
wuzewu 已提交
115 116 117 118 119 120 121

        # cache data
        self.last_call_name = None
        self.cache_feed_dict = None
        self.cache_fetch_dict = None
        self.cache_program = None

S
shenyuhan 已提交
122
        fp_lock = open(os.path.join(CONF_HOME, 'config.json'))
S
shenyuhan 已提交
123
        lock.flock(fp_lock, lock.LOCK_EX)
W
wuzewu 已提交
124
        if name:
B
BinLong 已提交
125
            self._init_with_name(name=name, version=version)
S
shenyuhan 已提交
126
            lock.flock(fp_lock, lock.LOCK_UN)
W
wuzewu 已提交
127
        elif module_dir:
B
BinLong 已提交
128
            self._init_with_module_file(module_dir=module_dir[0])
S
shenyuhan 已提交
129
            lock.flock(fp_lock, lock.LOCK_UN)
W
wuzewu 已提交
130
        elif signatures:
W
wuzewu 已提交
131
            if processor:
W
wuzewu 已提交
132 133
                if not issubclass(processor, BaseProcessor):
                    raise TypeError(
Z
Zeyu Chen 已提交
134
                        "Processor shoule be an instance of paddlehub.BaseProcessor"
W
wuzewu 已提交
135
                    )
W
wuzewu 已提交
136 137
            if assets:
                self.assets = utils.to_list(assets)
138 139
                # for asset in assets:
                #     utils.check_path(assets)
W
wuzewu 已提交
140 141
            self.processor = processor
            self._generate_module_info(module_info)
W
wuzewu 已提交
142
            self._init_with_signature(signatures=signatures)
S
shenyuhan 已提交
143
            lock.flock(fp_lock, lock.LOCK_UN)
W
wuzewu 已提交
144
        else:
S
shenyuhan 已提交
145
            lock.flock(fp_lock, lock.LOCK_UN)
Z
Zeyu Chen 已提交
146
            raise ValueError("Module initialized parameter is empty")
W
wuzewu 已提交
147

B
BinLong 已提交
148 149 150 151 152
    def _init_with_name(self, name, version=None):
        log_msg = "Installing %s module" % name
        if version:
            log_msg += "-%s" % version
        logger.info(log_msg)
S
shenyuhan 已提交
153
        extra = {"command": "install"}
W
wuzewu 已提交
154
        result, tips, module_dir = default_module_manager.install_module(
S
shenyuhan 已提交
155
            module_name=name, module_version=version, extra=extra)
W
wuzewu 已提交
156 157
        if not result:
            logger.error(tips)
走神的阿圆's avatar
走神的阿圆 已提交
158 159 160 161
            raise RuntimeError(tips)
        else:
            logger.info(tips)
            self._init_with_module_file(module_dir[0])
W
wuzewu 已提交
162

W
wuzewu 已提交
163
    def _init_with_url(self, url):
W
wuzewu 已提交
164
        utils.check_url(url)
W
wuzewu 已提交
165
        result, tips, module_dir = default_downloader.download_file_and_uncompress(
W
wuzewu 已提交
166
            url, save_path=".")
W
wuzewu 已提交
167 168
        if not result:
            logger.error(tips)
走神的阿圆's avatar
走神的阿圆 已提交
169 170 171
            raise RuntimeError(tips)
        else:
            self._init_with_module_file(module_dir)
W
wuzewu 已提交
172

W
wuzewu 已提交
173 174 175 176 177
    def _dump_processor(self):
        import inspect
        pymodule = inspect.getmodule(self.processor)
        pycode = inspect.getsource(pymodule)
        processor_path = self.helper.processor_path()
178 179 180
        processor_md5 = utils.md5(pycode)
        processor_md5 += str(time.time())
        processor_name = utils.md5(processor_md5)
W
wuzewu 已提交
181 182 183 184
        output_file = os.path.join(processor_path, processor_name + ".py")
        utils.mkdir(processor_path)
        with open(output_file, "w") as file:
            file.write(pycode)
W
wuzewu 已提交
185 186
        utils.from_pyobj_to_module_attr(
            processor_name, self.desc.attr.map.data['processor_info'])
W
wuzewu 已提交
187

W
wuzewu 已提交
188 189
    def _load_processor(self):
        processor_path = self.helper.processor_path()
W
wuzewu 已提交
190 191
        if os.path.exists(processor_path):
            sys.path.append(processor_path)
W
wuzewu 已提交
192 193
            processor_name = utils.from_module_attr_to_pyobj(
                self.desc.attr.map.data['processor_info'])
W
wuzewu 已提交
194 195 196
            self.processor = __import__(processor_name).Processor(module=self)
        else:
            self.processor = None
W
wuzewu 已提交
197

W
wuzewu 已提交
198 199 200 201 202 203 204 205 206 207 208 209 210 211
    def _dump_assets(self):
        utils.mkdir(self.helper.assets_path())
        for asset in self.assets:
            filename = os.path.basename(asset)
            newfile = os.path.join(self.helper.assets_path(), filename)
            copyfile(asset, newfile)

    def _load_assets(self):
        assets_path = self.helper.assets_path()
        self.assets = []
        for file in os.listdir(assets_path):
            filepath = os.path.join(self.helper.assets_path(), file)
            self.assets.append(filepath)

W
wuzewu 已提交
212
    def _init_with_module_file(self, module_dir):
W
wuzewu 已提交
213
        checker = ModuleChecker(module_dir)
W
wuzewu 已提交
214
        checker.check()
W
wuzewu 已提交
215

W
wuzewu 已提交
216 217 218 219 220 221 222
        self.helper = ModuleHelper(module_dir)
        with open(self.helper.module_desc_path(), "rb") as fi:
            self.desc.ParseFromString(fi.read())

        exe = fluid.Executor(fluid.CPUPlace())
        self.program, _, _ = fluid.io.load_inference_model(
            self.helper.model_path(), executor=exe)
W
wuzewu 已提交
223 224 225 226
        for block in self.program.blocks:
            for op in block.ops:
                if "op_callstack" in op.all_attrs():
                    op._set_attr("op_callstack", [""])
W
wuzewu 已提交
227
        self._load_processor()
W
wuzewu 已提交
228
        self._load_assets()
W
wuzewu 已提交
229
        self._recover_from_desc()
W
wuzewu 已提交
230
        self._generate_sign_attr()
W
wuzewu 已提交
231
        self._generate_extra_info()
Z
Zeyu Chen 已提交
232
        self._restore_parameter(self.program)
W
wuzewu 已提交
233
        self._recover_variable_info(self.program)
W
wuzewu 已提交
234 235

    def _init_with_signature(self, signatures):
W
wuzewu 已提交
236
        self.name_prefix = HUB_VAR_PREFIX % self.name
W
wuzewu 已提交
237 238 239 240
        self._process_signatures(signatures)
        self._check_signatures()
        self._generate_desc()
        self._generate_sign_attr()
W
wuzewu 已提交
241
        self._generate_extra_info()
W
wuzewu 已提交
242 243 244 245 246 247 248 249 250

    def _init_with_program(self, program):
        pass

    def _process_signatures(self, signatures):
        self.signatures = {}
        self.program = signatures[0].inputs[0].block.program
        for sign in signatures:
            if sign.name in self.signatures:
Z
Zeyu Chen 已提交
251 252 253
                raise ValueError(
                    "Error! Signature array contains duplicated signatrues %s" %
                    sign)
W
wuzewu 已提交
254 255
            if self.default_signature is None and sign.for_predict:
                self.default_signature = sign
W
wuzewu 已提交
256 257
            self.signatures[sign.name] = sign

Z
Zeyu Chen 已提交
258
    def _restore_parameter(self, program):
W
wuzewu 已提交
259
        global_block = program.global_block()
W
wuzewu 已提交
260
        param_attrs = self.desc.attr.map.data['param_attrs']
W
wuzewu 已提交
261
        for key, param_attr in param_attrs.map.data.items():
W
wuzewu 已提交
262
            param = paddle_helper.from_module_attr_to_param(param_attr)
263
            param['name'] = self.get_var_name_with_prefix(key)
W
wuzewu 已提交
264 265 266 267 268 269 270 271 272 273
            if (param['name'] not in global_block.vars):
                continue
            var = global_block.var(param['name'])
            global_block.create_parameter(
                shape=var.shape,
                dtype=var.dtype,
                type=var.type,
                lod_level=var.lod_level,
                error_clip=var.error_clip,
                stop_gradient=var.stop_gradient,
W
wuzewu 已提交
274 275
                is_data=var.is_data,
                **param)
W
wuzewu 已提交
276 277

    def _recover_variable_info(self, program):
W
wuzewu 已提交
278
        var_infos = self.desc.attr.map.data['var_infos']
W
wuzewu 已提交
279
        for var_info in var_infos.map.data:
W
wuzewu 已提交
280
            idx = utils.from_module_attr_to_pyobj(
W
wuzewu 已提交
281
                var_infos.map.data[var_info].map.data['block_id'])
W
wuzewu 已提交
282
            stop_gradient = utils.from_module_attr_to_pyobj(
W
wuzewu 已提交
283 284
                var_infos.map.data[var_info].map.data['stop_gradient'])
            block = program.blocks[idx]
285
            var_name = self.get_var_name_with_prefix(var_info)
W
wuzewu 已提交
286 287 288 289
            if var_name in block.vars:
                var = block.vars[var_name]
                var.stop_gradient = stop_gradient

W
wuzewu 已提交
290 291 292 293 294 295 296 297
    def get_extra_info(self, key):
        return self.extra_info.get(key, None)

    def _generate_extra_info(self):
        for key in self.extra_info:
            self.__dict__["get_%s" % key] = functools.partial(
                self.get_extra_info, key=key)

W
wuzewu 已提交
298 299 300 301 302
    def _generate_module_info(self, module_info=None):
        if not module_info:
            self.module_info = {}
        else:
            if not utils.is_yaml_file(module_info):
Z
Zeyu Chen 已提交
303
                logger.critical("Module info file should be yaml format")
W
wuzewu 已提交
304
                exit(1)
W
wuzewu 已提交
305
            self.module_info = yaml_parser.parse(module_info)
306 307 308 309 310 311
        self.author = self.module_info.get('author', 'UNKNOWN')
        self.author_email = self.module_info.get('author_email', 'UNKNOWN')
        self.summary = self.module_info.get('summary', 'UNKNOWN')
        self.type = self.module_info.get('type', 'UNKNOWN')
        self.version = self.module_info.get('version', 'UNKNOWN')
        self.name = self.module_info.get('name', 'UNKNOWN')
W
wuzewu 已提交
312

W
wuzewu 已提交
313 314 315
    def _generate_sign_attr(self):
        self._check_signatures()
        for sign in self.signatures:
W
wuzewu 已提交
316 317
            self.__dict__[sign] = functools.partial(
                self.__call__, sign_name=sign)
W
wuzewu 已提交
318

319 320 321 322 323
    def get_vocab_path(self):
        for assets_file in self.assets:
            if "vocab.txt" in assets_file:
                return assets_file

W
wuzewu 已提交
324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347
    def _recover_from_desc(self):
        # recover signature
        for sign, module_var in self.desc.sign2var.items():
            inputs = []
            outputs = []
            feed_names = []
            fetch_names = []
            for var in module_var.feed_desc:
                variable = self.program.global_block().vars[var.var_name]
                inputs.append(variable)
                feed_names.append(var.alias)

            for var in module_var.fetch_desc:
                variable = self.program.global_block().vars[var.var_name]
                outputs.append(variable)
                fetch_names.append(var.alias)

            self.signatures[sign] = create_signature(
                sign,
                inputs=inputs,
                outputs=outputs,
                feed_names=feed_names,
                fetch_names=fetch_names)

W
wuzewu 已提交
348
        # recover default signature
W
wuzewu 已提交
349 350
        default_signature_name = utils.from_module_attr_to_pyobj(
            self.desc.attr.map.data['default_signature'])
W
wuzewu 已提交
351 352 353
        self.default_signature = self.signatures[
            default_signature_name] if default_signature_name else None

W
wuzewu 已提交
354
        # recover module info
W
wuzewu 已提交
355 356
        module_info = self.desc.attr.map.data['module_info']
        self.name = utils.from_module_attr_to_pyobj(
W
wuzewu 已提交
357
            module_info.map.data['name'])
W
wuzewu 已提交
358
        self.author = utils.from_module_attr_to_pyobj(
W
wuzewu 已提交
359
            module_info.map.data['author'])
W
wuzewu 已提交
360
        self.author_email = utils.from_module_attr_to_pyobj(
W
wuzewu 已提交
361
            module_info.map.data['author_email'])
W
wuzewu 已提交
362
        self.version = utils.from_module_attr_to_pyobj(
W
wuzewu 已提交
363
            module_info.map.data['version'])
W
wuzewu 已提交
364
        self.type = utils.from_module_attr_to_pyobj(
W
wuzewu 已提交
365
            module_info.map.data['type'])
W
wuzewu 已提交
366
        self.summary = utils.from_module_attr_to_pyobj(
W
wuzewu 已提交
367 368
            module_info.map.data['summary'])

W
wuzewu 已提交
369 370 371 372 373 374
        # recover extra info
        extra_info = self.desc.attr.map.data['extra_info']
        self.extra_info = {}
        for key, value in extra_info.map.data.items():
            self.extra_info[key] = utils.from_module_attr_to_pyobj(value)

W
wuzewu 已提交
375
        # recover name prefix
W
wuzewu 已提交
376 377
        self.name_prefix = utils.from_module_attr_to_pyobj(
            self.desc.attr.map.data["name_prefix"])
W
wuzewu 已提交
378

W
wuzewu 已提交
379 380
    def _generate_desc(self):
        # save fluid Parameter
W
wuzewu 已提交
381 382 383
        attr = self.desc.attr
        attr.type = module_desc_pb2.MAP
        param_attrs = attr.map.data['param_attrs']
W
wuzewu 已提交
384 385 386
        param_attrs.type = module_desc_pb2.MAP
        for param in self.program.global_block().iter_parameters():
            param_attr = param_attrs.map.data[param.name]
W
wuzewu 已提交
387
            paddle_helper.from_param_to_module_attr(param, param_attr)
W
wuzewu 已提交
388 389

        # save Variable Info
W
wuzewu 已提交
390
        var_infos = attr.map.data['var_infos']
W
wuzewu 已提交
391 392 393 394 395
        var_infos.type = module_desc_pb2.MAP
        for block in self.program.blocks:
            for var in block.vars.values():
                var_info = var_infos.map.data[var.name]
                var_info.type = module_desc_pb2.MAP
W
wuzewu 已提交
396
                utils.from_pyobj_to_module_attr(
W
wuzewu 已提交
397
                    var.stop_gradient, var_info.map.data['stop_gradient'])
W
wuzewu 已提交
398 399
                utils.from_pyobj_to_module_attr(block.idx,
                                                var_info.map.data['block_id'])
W
wuzewu 已提交
400 401 402 403 404 405 406 407 408 409

        # save signarture info
        for key, sign in self.signatures.items():
            var = self.desc.sign2var[sign.name]
            feed_desc = var.feed_desc
            fetch_desc = var.fetch_desc
            feed_names = sign.feed_names
            fetch_names = sign.fetch_names
            for index, input in enumerate(sign.inputs):
                feed_var = feed_desc.add()
410
                feed_var.var_name = self.get_var_name_with_prefix(input.name)
W
wuzewu 已提交
411 412 413 414
                feed_var.alias = feed_names[index]

            for index, output in enumerate(sign.outputs):
                fetch_var = fetch_desc.add()
415
                fetch_var.var_name = self.get_var_name_with_prefix(output.name)
W
wuzewu 已提交
416 417
                fetch_var.alias = fetch_names[index]

W
wuzewu 已提交
418
        # save default signature
W
wuzewu 已提交
419
        utils.from_pyobj_to_module_attr(
W
wuzewu 已提交
420
            self.default_signature.name if self.default_signature else None,
W
wuzewu 已提交
421
            attr.map.data['default_signature'])
W
wuzewu 已提交
422

W
wuzewu 已提交
423
        # save name prefix
W
wuzewu 已提交
424 425
        utils.from_pyobj_to_module_attr(self.name_prefix,
                                        self.desc.attr.map.data["name_prefix"])
W
wuzewu 已提交
426

W
wuzewu 已提交
427
        # save module info
W
wuzewu 已提交
428
        module_info = attr.map.data['module_info']
W
wuzewu 已提交
429
        module_info.type = module_desc_pb2.MAP
W
wuzewu 已提交
430 431 432 433 434 435 436 437 438 439
        utils.from_pyobj_to_module_attr(self.name, module_info.map.data['name'])
        utils.from_pyobj_to_module_attr(self.version,
                                        module_info.map.data['version'])
        utils.from_pyobj_to_module_attr(self.author,
                                        module_info.map.data['author'])
        utils.from_pyobj_to_module_attr(self.author_email,
                                        module_info.map.data['author_email'])
        utils.from_pyobj_to_module_attr(self.type, module_info.map.data['type'])
        utils.from_pyobj_to_module_attr(self.summary,
                                        module_info.map.data['summary'])
W
wuzewu 已提交
440

W
wuzewu 已提交
441 442 443 444 445 446
        # save extra info
        extra_info = attr.map.data['extra_info']
        extra_info.type = module_desc_pb2.MAP
        for key, value in self.extra_info.items():
            utils.from_pyobj_to_module_attr(value, extra_info.map.data[key])

447
    def __call__(self, sign_name, data, use_gpu=False, batch_size=1, **kwargs):
W
wuzewu 已提交
448 449
        self.check_processor()

W
wuzewu 已提交
450
        def _get_reader_and_feeder(data_format, data, place):
W
wuzewu 已提交
451
            def _reader(process_data):
W
wuzewu 已提交
452 453 454 455 456 457 458 459 460
                for item in zip(*process_data):
                    yield item

            process_data = []
            feed_name_list = []
            for key in data_format:
                process_data.append([value['processed'] for value in data[key]])
                feed_name_list.append(data_format[key]['feed_key'])
            feeder = fluid.DataFeeder(feed_list=feed_name_list, place=place)
W
wuzewu 已提交
461
            return functools.partial(_reader, process_data=process_data), feeder
W
wuzewu 已提交
462

W
wuzewu 已提交
463 464 465 466 467 468 469 470
        if self.last_call_name != sign_name:
            self.last_call_name = sign_name
            self.cache_feed_dict, self.cache_fetch_dict, self.cache_program = self.context(
                sign_name, for_test=True)
        feed_dict = self.cache_feed_dict
        fetch_dict = self.cache_fetch_dict
        program = self.cache_program

W
wuzewu 已提交
471 472
        fetch_list = list(set([value for key, value in fetch_dict.items()]))
        with fluid.program_guard(program):
W
wuzewu 已提交
473 474
            result = []
            index = 0
475 476 477 478 479 480 481
            try:
                _places = os.environ["CUDA_VISIBLE_DEVICES"]
                int(_places[0])
            except:
                use_gpu = False

            place = fluid.CUDAPlace(0) if use_gpu else fluid.CPUPlace()
482

W
wuzewu 已提交
483
            exe = fluid.Executor(place=place)
W
wuzewu 已提交
484 485 486 487
            data = self.processor.preprocess(
                sign_name=sign_name, data_dict=data)
            data_format = self.processor.data_format(sign_name=sign_name)
            reader, feeder = _get_reader_and_feeder(data_format, data, place)
488
            reader = paddle.batch(reader, batch_size=batch_size)
W
wuzewu 已提交
489 490 491 492 493
            for batch in reader():
                data_out = exe.run(
                    feed=feeder.feed(batch),
                    fetch_list=fetch_list,
                    return_numpy=False)
W
wuzewu 已提交
494 495 496 497 498 499 500 501 502
                sub_data = {
                    key: value[index:index + len(batch)]
                    for key, value in data.items()
                }
                result += self.processor.postprocess(sign_name, data_out,
                                                     sub_data, **kwargs)
                index += len(batch)

        return result
W
wuzewu 已提交
503

W
wuzewu 已提交
504
    def check_processor(self):
W
wuzewu 已提交
505 506
        if not self.processor:
            raise ValueError("This Module is not callable!")
W
wuzewu 已提交
507

W
wuzewu 已提交
508
    def context(self,
509
                sign_name=None,
W
wuzewu 已提交
510
                for_test=False,
Z
Zeyu Chen 已提交
511
                trainable=True,
W
wuzewu 已提交
512
                regularizer=None,
513
                max_seq_len=128,
W
wuzewu 已提交
514
                learning_rate=1e-3):
515 516 517 518 519
        """
        Args:
            max_seq_len(int): maximum sequence length, this option is only
            available for BERT/ERNIE module
        """
W
wuzewu 已提交
520

521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549
        if sign_name:
            if sign_name not in self.signatures:
                raise KeyError(
                    "Module did not have a signature with name %s" % sign_name)
            signature = self.signatures[sign_name]
        else:
            inputs = [
                input for signature in self.signatures.values()
                for input in signature.inputs
            ]
            outputs = [
                output for signature in self.signatures.values()
                for output in signature.outputs
            ]
            feed_names = [
                feed_name for signature in self.signatures.values()
                for feed_name in signature.feed_names
            ]
            fetch_names = [
                fetch_name for signature in self.signatures.values()
                for fetch_name in signature.fetch_names
            ]
            signature = create_signature(
                name="hub_temp_signature",
                inputs=inputs,
                outputs=outputs,
                feed_names=feed_names,
                fetch_names=fetch_names,
                for_predict=False)
W
wuzewu 已提交
550

W
wuzewu 已提交
551
        program = self.program.clone(for_test=for_test)
W
wuzewu 已提交
552
        paddle_helper.remove_feed_fetch_op(program)
W
wuzewu 已提交
553 554

        if not for_test:
W
wuzewu 已提交
555
            paddle_helper.set_parameter_trainable(program, trainable)
W
wuzewu 已提交
556

W
wuzewu 已提交
557
            paddle_helper.set_parameter_learning_rate(program, learning_rate)
W
wuzewu 已提交
558

W
wuzewu 已提交
559
            paddle_helper.set_parameter_regularizer(program, regularizer)
W
wuzewu 已提交
560

Z
Zeyu Chen 已提交
561
            self._restore_parameter(program)
W
wuzewu 已提交
562

W
wuzewu 已提交
563 564
        self._recover_variable_info(program)

W
wuzewu 已提交
565
        paddle_helper.set_op_attr(program, is_test=for_test)
W
wuzewu 已提交
566 567 568 569 570 571 572 573 574 575 576 577 578 579
        feed_dict = {}
        fetch_dict = {}
        for index, var in enumerate(signature.inputs):
            feed_dict[index] = program.global_block().var(var.name)
            key = signature.feed_names[index]
            if key:
                feed_dict[key] = program.global_block().var(var.name)

        for index, var in enumerate(signature.outputs):
            fetch_dict[index] = program.global_block().var(var.name)
            key = signature.fetch_names[index]
            if key:
                fetch_dict[key] = program.global_block().var(var.name)

580
        # update BERT/ERNIE's input tensor's sequence length to max_seq_len
K
kinghuin 已提交
581
        if "bert" in self.name or self.name.startswith("ernie"):
582 583 584 585 586 587
            MAX_SEQ_LENGTH = 512
            if max_seq_len > MAX_SEQ_LENGTH or max_seq_len <= 0:
                raise ValueError(
                    "max_seq_len({}) should be in the range of [1, {}]".format(
                        MAX_SEQ_LENGTH))
            logger.info(
588
                "Set maximum sequence length of input tensor to {}".format(
589
                    max_seq_len))
590 591
            if self.name.startswith("ernie_v2"):
                feed_list = [
592 593
                    "input_ids", "position_ids", "segment_ids", "input_mask",
                    "task_ids"
594 595 596 597 598 599 600 601 602 603 604 605
                ]
                logger.warning(
                    "%s will exploite task_id, the arguement use_taskid of Reader class must be True."
                    % self.name)
            else:
                feed_list = [
                    "input_ids", "position_ids", "segment_ids", "input_mask"
                ]
                logger.warning(
                    "%s has no task_id, the arguement use_taskid of Reader class must be False."
                    % self.name)
            for tensor_name in feed_list:
606 607 608 609 610 611 612
                seq_tensor_shape = [-1, max_seq_len, 1]
                logger.info("The shape of input tensor[{}] set to {}".format(
                    tensor_name, seq_tensor_shape))
                program.global_block().var(
                    feed_dict[tensor_name].name).desc.set_shape(
                        seq_tensor_shape)

613 614
        # record num parameters loaded by paddlehub
        num_param_loaded = 0
W
wuzewu 已提交
615
        for param in program.global_block().iter_parameters():
616 617 618
            num_param_loaded += 1
        logger.info(
            "%d pretrained paramaters loaded by PaddleHub" % num_param_loaded)
W
wuzewu 已提交
619

W
wuzewu 已提交
620 621
        return feed_dict, fetch_dict, program

622
    def get_name_prefix(self):
W
wuzewu 已提交
623
        return self.name_prefix
624 625 626 627

    def get_var_name_with_prefix(self, var_name):
        return self.get_name_prefix() + var_name

W
wuzewu 已提交
628
    def _check_signatures(self):
W
wuzewu 已提交
629 630
        if not self.signatures:
            raise ValueError("Signatures should not be None")
W
wuzewu 已提交
631 632

        for key, sign in self.signatures.items():
W
wuzewu 已提交
633 634 635 636
            if not isinstance(sign, Signature):
                raise TypeError(
                    "Item in Signatures shoule be an instance of paddlehub.Signature"
                )
W
wuzewu 已提交
637 638 639

            for input in sign.inputs:
                _tmp_program = input.block.program
W
wuzewu 已提交
640 641 642 643
                if not self.program == _tmp_program:
                    raise ValueError(
                        "All input and outputs variables in signature should come from the same Program"
                    )
W
wuzewu 已提交
644 645 646

            for output in sign.outputs:
                _tmp_program = output.block.program
W
wuzewu 已提交
647 648 649 650
                if not self.program == _tmp_program:
                    raise ValueError(
                        "All input and outputs variables in signature should come from the same Program"
                    )
W
wuzewu 已提交
651 652 653 654 655 656 657 658 659 660 661 662

    def serialize_to_path(self, path=None, exe=None):
        self._check_signatures()
        self._generate_desc()
        # create module path for saving
        if path is None:
            path = os.path.join(".", self.name)
        self.helper = ModuleHelper(path)
        utils.mkdir(self.helper.module_dir)

        # create module pb
        module_desc = module_desc_pb2.ModuleDesc()
663 664 665 666
        logger.info("PaddleHub version = %s" % version.hub_version)
        logger.info("PaddleHub Module proto version = %s" %
                    version.module_proto_version)
        logger.info("Paddle version = %s" % paddle.__version__)
W
wuzewu 已提交
667 668 669 670 671 672 673 674 675 676 677 678 679 680

        feeded_var_names = [
            input.name for key, sign in self.signatures.items()
            for input in sign.inputs
        ]
        target_vars = [
            output for key, sign in self.signatures.items()
            for output in sign.outputs
        ]
        feeded_var_names = list(set(feeded_var_names))
        target_vars = list(set(target_vars))

        # save inference program
        program = self.program.clone()
W
wuzewu 已提交
681 682 683 684 685 686

        for block in program.blocks:
            for op in block.ops:
                if "op_callstack" in op.all_attrs():
                    op._set_attr("op_callstack", [""])

W
wuzewu 已提交
687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705
        if not exe:
            place = fluid.CPUPlace()
            exe = fluid.Executor(place=place)
        utils.mkdir(self.helper.model_path())
        fluid.io.save_inference_model(
            self.helper.model_path(),
            feeded_var_names=list(feeded_var_names),
            target_vars=list(target_vars),
            main_program=program,
            executor=exe)

        with open(os.path.join(self.helper.model_path(), "__model__"),
                  "rb") as file:
            program_desc_str = file.read()
            rename_program = fluid.framework.Program.parse_from_string(
                program_desc_str)
            varlist = {
                var: block
                for block in rename_program.blocks for var in block.vars
706
                if self.get_name_prefix() not in var
W
wuzewu 已提交
707 708 709
            }
            for var, block in varlist.items():
                old_name = var
710
                new_name = self.get_var_name_with_prefix(old_name)
W
wuzewu 已提交
711 712 713 714 715 716 717 718
                block._rename_var(old_name, new_name)
            utils.mkdir(self.helper.model_path())
            with open(
                    os.path.join(self.helper.model_path(), "__model__"),
                    "wb") as f:
                f.write(rename_program.desc.serialize_to_string())

            for file in os.listdir(self.helper.model_path()):
719
                if (file == "__model__" or self.get_name_prefix() in file):
W
wuzewu 已提交
720 721 722 723
                    continue
                os.rename(
                    os.path.join(self.helper.model_path(), file),
                    os.path.join(self.helper.model_path(),
724
                                 self.get_var_name_with_prefix(file)))
W
wuzewu 已提交
725 726

        # create processor file
W
wuzewu 已提交
727 728
        if self.processor:
            self._dump_processor()
W
wuzewu 已提交
729 730 731

        # create assets
        self._dump_assets()
W
wuzewu 已提交
732 733 734 735

        # create check info
        checker = ModuleChecker(self.helper.module_dir)
        checker.generate_check_info()
736 737 738 739 740

        # Serialize module_desc pb
        module_pb = self.desc.SerializeToString()
        with open(self.helper.module_desc_path(), "wb") as f:
            f.write(module_pb)