info.yaml 767 字节
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
Model_Info:
   name: "bert-large-cased-whole-word-masking-finetuned-squad"
   description: "BERT large model (cased) whole word masking finetuned on SQuAD"
   description_en: "BERT large model (cased) whole word masking finetuned on SQuAD"
   icon: ""
   from_repo: "https://huggingface.co/bert-large-cased-whole-word-masking-finetuned-squad"

Task:
- tag_en: "Natural Language Processing"
  tag: "自然语言处理"
  sub_tag_en: "Question Answering"
  sub_tag: "回答问题"

Example:

Datasets: "bookcorpus,wikipedia"
17
Publisher: "huggingface"
18 19 20 21 22 23 24
License: "apache-2.0"
Language: "English"
Paper:
   - title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
     url: 'http://arxiv.org/abs/1810.04805v2'
IfTraining: 0
IfOnlineDemo: 0