info.yaml 1.0 KB
Newer Older
1 2 3 4 5 6
Datasets: openwebtext
Example: null
IfOnlineDemo: 0
IfTraining: 0
Language: English
License: apache-2.0
7
Model_Info:
8 9 10 11 12
  description: DistilGPT2
  description_en: DistilGPT2
  from_repo: https://huggingface.co/distilgpt2
  icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
  name: distilgpt2
13
Paper:
14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
- title: 'DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter'
  url: http://arxiv.org/abs/1910.01108v4
- title: Can Model Compression Improve NLP Fairness
  url: http://arxiv.org/abs/2201.08542v1
- title: Mitigating Gender Bias in Distilled Language Models via Counterfactual Role
    Reversal
  url: http://arxiv.org/abs/2203.12574v1
- title: Quantifying the Carbon Emissions of Machine Learning
  url: http://arxiv.org/abs/1910.09700v2
- title: Distilling the Knowledge in a Neural Network
  url: http://arxiv.org/abs/1503.02531v1
Publisher: huggingface
Task:
- sub_tag: 文本生成
  sub_tag_en: Text Generation
  tag: 自然语言处理
  tag_en: Natural Language Processing