predict.py 3.4 KB
Newer Older
K
kinghuin 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35
#coding:utf-8
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Finetuning on classification task """

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import argparse
import ast
import collections
import json
import io
import math
import numpy as np
import os
import six
import sys
import time

import paddle
import paddle.fluid as fluid
import paddlehub as hub
K
kinghuin 已提交
36
from paddlehub.finetune.task.reading_comprehension_task import write_predictions
K
kinghuin 已提交
37 38 39 40 41 42 43 44 45 46 47 48 49 50

hub.common.logger.logger.setLevel("INFO")

# yapf: disable
parser = argparse.ArgumentParser(__doc__)
parser.add_argument("--num_epoch", type=int, default=1, help="Number of epoches for fine-tuning.")
parser.add_argument("--use_gpu", type=ast.literal_eval, default=True, help="Whether use GPU for finetuning, input should be True or False")
parser.add_argument("--checkpoint_dir", type=str, default=None, help="Directory to model checkpoint.")
parser.add_argument("--max_seq_len", type=int, default=384, help="Number of words of the longest seqence.")
parser.add_argument("--batch_size", type=int, default=8, help="Total examples' number in batch for training.")
args = parser.parse_args()
# yapf: enable.

if __name__ == '__main__':
S
Steffy-zxf 已提交
51 52
    # Load Paddlehub BERT pretrained model
    module = hub.Module(name="bert_uncased_L-12_H-768_A-12")
K
kinghuin 已提交
53 54 55
    inputs, outputs, program = module.context(
        trainable=True, max_seq_len=args.max_seq_len)

S
Steffy-zxf 已提交
56 57 58 59 60
    # Download dataset and use ReadingComprehensionReader to read dataset
    # If you wanna load SQuAD 2.0 dataset, just set version_2_with_negative as True
    dataset = hub.dataset.SQUAD(version_2_with_negative=False)
    # dataset = hub.dataset.SQUAD(version_2_with_negative=True)

K
kinghuin 已提交
61 62 63
    reader = hub.reader.ReadingComprehensionReader(
        dataset=dataset,
        vocab_path=module.get_vocab_path(),
K
kinghuin 已提交
64
        max_seq_len=args.max_seq_len,
K
kinghuin 已提交
65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
        doc_stride=128,
        max_query_length=64)

    # Use "sequence_output" for token-level output.
    seq_output = outputs["sequence_output"]

    # Setup feed list for data feeder
    feed_list = [
        inputs["input_ids"].name,
        inputs["position_ids"].name,
        inputs["segment_ids"].name,
        inputs["input_mask"].name,
    ]

    # Setup runing config for PaddleHub Finetune API
    config = hub.RunConfig(
S
Steffy-zxf 已提交
81
        use_data_parallel=False,
K
kinghuin 已提交
82 83 84
        use_cuda=args.use_gpu,
        batch_size=args.batch_size,
        checkpoint_dir=args.checkpoint_dir,
S
Steffy-zxf 已提交
85
        strategy=hub.AdamWeightDecayStrategy())
K
kinghuin 已提交
86 87 88 89 90 91 92 93 94

    # Define a reading comprehension finetune task by PaddleHub's API
    reading_comprehension_task = hub.ReadingComprehensionTask(
        data_reader=reader,
        feature=seq_output,
        feed_list=feed_list,
        config=config)

    # Data to be predicted
S
Steffy-zxf 已提交
95
    data = dataset.dev_examples[:10]
K
kinghuin 已提交
96
    reading_comprehension_task.predict(data=data)