info.yaml 616 字节
Newer Older
1 2 3 4 5 6
Datasets: conll2003
Example: null
IfOnlineDemo: 0
IfTraining: 0
Language: English
License: mit
7
Model_Info:
8 9 10 11 12
  description: bert-base-NER
  description_en: bert-base-NER
  from_repo: https://huggingface.co/dslim/bert-base-NER
  icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
  name: dslim/bert-base-NER
13
Paper:
14 15 16 17 18 19 20 21
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
  url: http://arxiv.org/abs/1810.04805v2
Publisher: dslim
Task:
- sub_tag: Token分类
  sub_tag_en: Token Classification
  tag: 自然语言处理
  tag_en: Natural Language Processing