Model_Info: name: "bert-large-cased-whole-word-masking-finetuned-squad" description: "BERT large model (cased) whole word masking finetuned on SQuAD" description_en: "BERT large model (cased) whole word masking finetuned on SQuAD" icon: "" from_repo: "https://huggingface.co/bert-large-cased-whole-word-masking-finetuned-squad" Task: - tag_en: "Natural Language Processing" tag: "自然语言处理" sub_tag_en: "Question Answering" sub_tag: "回答问题" Example: Datasets: "bookcorpus,wikipedia"
Publisher: "huggingface"
License: "apache-2.0" Language: "English" Paper: - title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding' url: 'http://arxiv.org/abs/1810.04805v2' IfTraining: 0 IfOnlineDemo: 0