export_model.py 3.4 KB
Newer Older
D
dyning 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16 17 18 19 20 21
import os
import sys

__dir__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__)
sys.path.append(os.path.abspath(os.path.join(__dir__, '..')))

D
dyning 已提交
22 23 24 25 26 27 28 29
import argparse

import paddle
from paddle.jit import to_static

from ppocr.modeling.architectures import build_model
from ppocr.postprocess import build_post_process
from ppocr.utils.save_load import init_model
30
from ppocr.utils.logging import get_logger
W
WenmuZhou 已提交
31
from tools.program import load_config, merge_config, ArgsParser
D
dyning 已提交
32 33 34


def main():
35
    FLAGS = ArgsParser().parse_args()
D
dyning 已提交
36
    config = load_config(FLAGS.config)
37
    merge_config(FLAGS.opt)
38
    logger = get_logger()
D
dyning 已提交
39
    # build post process
T
tink2123 已提交
40

D
dyning 已提交
41 42 43 44
    post_process_class = build_post_process(config['PostProcess'],
                                            config['Global'])

    # build model
45
    # for rec algorithm
D
dyning 已提交
46 47 48 49 50 51 52
    if hasattr(post_process_class, 'character'):
        char_num = len(getattr(post_process_class, 'character'))
        config['Architecture']["Head"]['out_channels'] = char_num
    model = build_model(config['Architecture'])
    init_model(config, model, logger)
    model.eval()

53
    save_path = '{}/inference'.format(config['Global']['save_inference_dir'])
T
tink2123 已提交
54 55 56

    if config['Architecture']['algorithm'] == "SRN":
        other_shape = [
T
tink2123 已提交
57
            paddle.static.InputSpec(
T
tink2123 已提交
58 59 60 61 62 63 64 65 66 67 68 69 70
                shape=[None, 1, 64, 256], dtype='float32'), [
                    paddle.static.InputSpec(
                        shape=[None, 256, 1],
                        dtype="int64"), paddle.static.InputSpec(
                            shape=[None, 25, 1],
                            dtype="int64"), paddle.static.InputSpec(
                                shape=[None, 8, 25, 25], dtype="int64"),
                    paddle.static.InputSpec(
                        shape=[None, 8, 25, 25], dtype="int64")
                ]
        ]
        model = to_static(model, input_spec=other_shape)
    else:
71 72 73 74 75 76 77 78 79 80
        infer_shape = [3, -1, -1]
        if config['Architecture']['model_type'] == "rec":
            infer_shape = [3, 32, -1]  # for rec model, H must be 32
            if 'Transform' in config['Architecture'] and config['Architecture'][
                    'Transform'] is not None and config['Architecture'][
                        'Transform']['name'] == 'TPS':
                logger.info(
                    'When there is tps in the network, variable length input is not supported, and the input size needs to be the same as during training'
                )
                infer_shape[-1] = 100
T
tink2123 已提交
81 82 83 84 85 86 87
        model = to_static(
            model,
            input_spec=[
                paddle.static.InputSpec(
                    shape=[None] + infer_shape, dtype='float32')
            ])

88 89
    paddle.jit.save(model, save_path)
    logger.info('inference model is saved to {}'.format(save_path))
D
dyning 已提交
90 91 92 93


if __name__ == "__main__":
    main()