Model_Info: name: "csarron/roberta-base-squad-v1" description: "RoBERTa-base fine-tuned on SQuAD v1" description_en: "RoBERTa-base fine-tuned on SQuAD v1" icon: "" from_repo: "https://huggingface.co/csarron/roberta-base-squad-v1" Task: - tag_en: "Natural Language Processing" tag: "自然语言处理" sub_tag_en: "Question Answering" sub_tag: "回答问题" Example: Datasets: "squad"
Publisher: "csarron"
License: "mit" Language: "English" Paper: - title: 'RoBERTa: A Robustly Optimized BERT Pretraining Approach' url: 'http://arxiv.org/abs/1907.11692v1' IfTraining: 0 IfOnlineDemo: 0