Model_Info: name: "hfl/rbt3" description: "This is a re-trained 3-layer RoBERTa-wwm-ext model." description_en: "This is a re-trained 3-layer RoBERTa-wwm-ext model." icon: "" from_repo: "https://huggingface.co/hfl/rbt3" Task: - tag_en: "Natural Language Processing" tag: "自然语言处理" sub_tag_en: "Fill-Mask" sub_tag: "槽位填充" Example: Datasets: "" Pulisher: "hfl" License: "apache-2.0" Language: "Chinese" Paper: - title: 'Pre-Training with Whole Word Masking for Chinese BERT' url: 'http://arxiv.org/abs/1906.08101v3' - title: 'Revisiting Pre-Trained Models for Chinese Natural Language Processing' url: 'http://arxiv.org/abs/2004.13922v2' IfTraining: 0 IfOnlineDemo: 0