export_model.py 2.3 KB
Newer Older
D
dyning 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16 17 18 19 20 21
import os
import sys

__dir__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__)
sys.path.append(os.path.abspath(os.path.join(__dir__, '..')))

D
dyning 已提交
22 23 24 25 26 27 28 29
import argparse

import paddle
from paddle.jit import to_static

from ppocr.modeling.architectures import build_model
from ppocr.postprocess import build_post_process
from ppocr.utils.save_load import init_model
30
from ppocr.utils.logging import get_logger
D
dyning 已提交
31 32 33 34 35 36 37 38 39 40 41 42 43 44
from tools.program import load_config


def parse_args():
    parser = argparse.ArgumentParser()
    parser.add_argument("-c", "--config", help="configuration file to use")
    parser.add_argument(
        "-o", "--output_path", type=str, default='./output/infer/')
    return parser.parse_args()


def main():
    FLAGS = parse_args()
    config = load_config(FLAGS.config)
45
    logger = get_logger()
D
dyning 已提交
46
    # build post process
T
tink2123 已提交
47

D
dyning 已提交
48 49 50 51
    post_process_class = build_post_process(config['PostProcess'],
                                            config['Global'])

    # build model
52
    # for rec algorithm
D
dyning 已提交
53 54 55 56 57 58 59
    if hasattr(post_process_class, 'character'):
        char_num = len(getattr(post_process_class, 'character'))
        config['Architecture']["Head"]['out_channels'] = char_num
    model = build_model(config['Architecture'])
    init_model(config, model, logger)
    model.eval()

文幕地方's avatar
文幕地方 已提交
60
    save_path = '{}/inference'.format(FLAGS.output_path)
T
tink2123 已提交
61 62 63 64 65 66 67 68
    infer_shape = [3, 32, 100] if config['Architecture'][
        'model_type'] != "det" else [3, 640, 640]
    model = to_static(
        model,
        input_spec=[
            paddle.static.InputSpec(
                shape=[None] + infer_shape, dtype='float32')
        ])
69 70
    paddle.jit.save(model, save_path)
    logger.info('inference model is saved to {}'.format(save_path))
D
dyning 已提交
71 72 73 74


if __name__ == "__main__":
    main()