info.yaml 724 字节
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
Model_Info:
   name: "hfl/rbt3"
   description: "This is a re-trained 3-layer RoBERTa-wwm-ext model."
   description_en: "This is a re-trained 3-layer RoBERTa-wwm-ext model."
   icon: ""
   from_repo: "https://huggingface.co/hfl/rbt3"

Task:
- tag_en: "Natural Language Processing"
  tag: "自然语言处理"
  sub_tag_en: "Fill-Mask"
  sub_tag: "槽位填充"

Example:

Datasets: ""
Pulisher: "hfl"
License: "apache-2.0"
Language: "Chinese"
Paper:
   - title: 'Pre-Training with Whole Word Masking for Chinese BERT'
     url: 'http://arxiv.org/abs/1906.08101v3'
   - title: 'Revisiting Pre-Trained Models for Chinese Natural Language Processing'
     url: 'http://arxiv.org/abs/2004.13922v2'
IfTraining: 0
IfOnlineDemo: 0