info.yaml 749 字节
Newer Older
1 2 3 4 5 6
Datasets: ''
Example: null
IfOnlineDemo: 0
IfTraining: 0
Language: Chinese
License: apache-2.0
7
Model_Info:
8 9 10 11 12
  description: This is a re-trained 3-layer RoBERTa-wwm-ext model.
  description_en: This is a re-trained 3-layer RoBERTa-wwm-ext model.
  from_repo: https://huggingface.co/hfl/rbt3
  icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
  name: hfl/rbt3
13
Paper:
14 15 16 17 18 19 20 21 22 23
- title: Pre-Training with Whole Word Masking for Chinese BERT
  url: http://arxiv.org/abs/1906.08101v3
- title: Revisiting Pre-Trained Models for Chinese Natural Language Processing
  url: http://arxiv.org/abs/2004.13922v2
Publisher: hfl
Task:
- sub_tag: 槽位填充
  sub_tag_en: Fill-Mask
  tag: 自然语言处理
  tag_en: Natural Language Processing