未验证 提交 c9918cd8 编写于 作者: C chenxiaozeng 提交者: GitHub

Change community models icon (#5671)

* update ERNIE-Layout introduction and ERNIE's tags

* change community models icon
上级 73f54a75
......@@ -10,7 +10,7 @@ Model_Info:
description_en: 'roberta-large-ner-english: model fine-tuned from roberta-large
for NER task'
from_repo: https://huggingface.co/Jean-Baptiste/roberta-large-ner-english
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: Jean-Baptiste/roberta-large-ner-english
Paper: null
Publisher: Jean-Baptiste
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Mengzi-BERT base fin model (Chinese)
description_en: Mengzi-BERT base fin model (Chinese)
from_repo: https://huggingface.co/Langboat/mengzi-bert-base-fin
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: Langboat/mengzi-bert-base-fin
Paper:
- title: 'Mengzi: Towards Lightweight yet Ingenious Pre-trained Models for Chinese'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: bert-base-spanish-wwm-cased-xnli
description_en: bert-base-spanish-wwm-cased-xnli
from_repo: https://huggingface.co/Recognai/bert-base-spanish-wwm-cased-xnli
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: Recognai/bert-base-spanish-wwm-cased-xnli
Paper: null
Publisher: Recognai
......
......@@ -8,7 +8,7 @@ Model_Info:
description: macaw-large
description_en: macaw-large
from_repo: https://huggingface.co/allenai/macaw-large
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: allenai/macaw-large
Paper: null
Publisher: allenai
......
......@@ -8,7 +8,7 @@ Model_Info:
description: SPECTER
description_en: SPECTER
from_repo: https://huggingface.co/allenai/specter
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: allenai/specter
Paper:
- title: 'SPECTER: Document-level Representation Learning using Citation-informed
......
......@@ -8,7 +8,7 @@ Model_Info:
description: ''
description_en: ''
from_repo: https://huggingface.co/alvaroalon2/biobert_chemical_ner
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: alvaroalon2/biobert_chemical_ner
Paper: null
Publisher: alvaroalon2
......
......@@ -8,7 +8,7 @@ Model_Info:
description: ''
description_en: ''
from_repo: https://huggingface.co/alvaroalon2/biobert_diseases_ner
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: alvaroalon2/biobert_diseases_ner
Paper: null
Publisher: alvaroalon2
......
......@@ -8,7 +8,7 @@ Model_Info:
description: ''
description_en: ''
from_repo: https://huggingface.co/alvaroalon2/biobert_genetic_ner
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: alvaroalon2/biobert_genetic_ner
Paper: null
Publisher: alvaroalon2
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Passage Reranking Multilingual BERT 🔃 🌍
description_en: Passage Reranking Multilingual BERT 🔃 🌍
from_repo: https://huggingface.co/amberoad/bert-multilingual-passage-reranking-msmarco
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: amberoad/bert-multilingual-passage-reranking-msmarco
Paper:
- title: Passage Re-ranking with BERT
......
......@@ -8,7 +8,7 @@ Model_Info:
description: GerPT2
description_en: GerPT2
from_repo: https://huggingface.co/benjamin/gerpt2-large
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: benjamin/gerpt2-large
Paper: null
Publisher: benjamin
......
......@@ -8,7 +8,7 @@ Model_Info:
description: 'KcBERT: Korean comments BERT'
description_en: 'KcBERT: Korean comments BERT'
from_repo: https://huggingface.co/beomi/kcbert-base
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: beomi/kcbert-base
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: pip install transformers sentencepiece
description_en: pip install transformers sentencepiece
from_repo: https://huggingface.co/cointegrated/rubert-tiny
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cointegrated/rubert-tiny
Paper: null
Publisher: cointegrated
......
......@@ -8,7 +8,7 @@ Model_Info:
description: pip install transformers sentencepiece
description_en: pip install transformers sentencepiece
from_repo: https://huggingface.co/cointegrated/rubert-tiny2
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cointegrated/rubert-tiny2
Paper: null
Publisher: cointegrated
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for MS Marco
description_en: Cross-Encoder for MS Marco
from_repo: https://huggingface.co/cross-encoder/ms-marco-MiniLM-L-12-v2
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/ms-marco-MiniLM-L-12-v2
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for MS Marco
description_en: Cross-Encoder for MS Marco
from_repo: https://huggingface.co/cross-encoder/ms-marco-TinyBERT-L-2
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/ms-marco-TinyBERT-L-2
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for Natural Language Inference
description_en: Cross-Encoder for Natural Language Inference
from_repo: https://huggingface.co/cross-encoder/nli-MiniLM2-L6-H768
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/nli-MiniLM2-L6-H768
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for Natural Language Inference
description_en: Cross-Encoder for Natural Language Inference
from_repo: https://huggingface.co/cross-encoder/nli-distilroberta-base
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/nli-distilroberta-base
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for Natural Language Inference
description_en: Cross-Encoder for Natural Language Inference
from_repo: https://huggingface.co/cross-encoder/nli-roberta-base
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/nli-roberta-base
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for Quora Duplicate Questions Detection
description_en: Cross-Encoder for Quora Duplicate Questions Detection
from_repo: https://huggingface.co/cross-encoder/quora-distilroberta-base
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/quora-distilroberta-base
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for Quora Duplicate Questions Detection
description_en: Cross-Encoder for Quora Duplicate Questions Detection
from_repo: https://huggingface.co/cross-encoder/quora-roberta-base
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/quora-roberta-base
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for Quora Duplicate Questions Detection
description_en: Cross-Encoder for Quora Duplicate Questions Detection
from_repo: https://huggingface.co/cross-encoder/stsb-TinyBERT-L-4
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/stsb-TinyBERT-L-4
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for Quora Duplicate Questions Detection
description_en: Cross-Encoder for Quora Duplicate Questions Detection
from_repo: https://huggingface.co/cross-encoder/stsb-distilroberta-base
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/stsb-distilroberta-base
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for Quora Duplicate Questions Detection
description_en: Cross-Encoder for Quora Duplicate Questions Detection
from_repo: https://huggingface.co/cross-encoder/stsb-roberta-base
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/stsb-roberta-base
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Cross-Encoder for Quora Duplicate Questions Detection
description_en: Cross-Encoder for Quora Duplicate Questions Detection
from_repo: https://huggingface.co/cross-encoder/stsb-roberta-large
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: cross-encoder/stsb-roberta-large
Paper: null
Publisher: cross-encoder
......
......@@ -8,7 +8,7 @@ Model_Info:
description: 🤗 + 📚 dbmdz German BERT models
description_en: 🤗 + 📚 dbmdz German BERT models
from_repo: https://huggingface.co/dbmdz/bert-base-german-cased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dbmdz/bert-base-german-cased
Paper: null
Publisher: dbmdz
......
......@@ -8,7 +8,7 @@ Model_Info:
description: 🤗 + 📚 dbmdz German BERT models
description_en: 🤗 + 📚 dbmdz German BERT models
from_repo: https://huggingface.co/dbmdz/bert-base-german-uncased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dbmdz/bert-base-german-uncased
Paper: null
Publisher: dbmdz
......
......@@ -8,7 +8,7 @@ Model_Info:
description: 🤗 + 📚 dbmdz BERT and ELECTRA models
description_en: 🤗 + 📚 dbmdz BERT and ELECTRA models
from_repo: https://huggingface.co/dbmdz/bert-base-italian-uncased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dbmdz/bert-base-italian-uncased
Paper: null
Publisher: dbmdz
......
......@@ -8,7 +8,7 @@ Model_Info:
description: 🤗 + 📚 dbmdz BERT and ELECTRA models
description_en: 🤗 + 📚 dbmdz BERT and ELECTRA models
from_repo: https://huggingface.co/dbmdz/bert-base-italian-xxl-cased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dbmdz/bert-base-italian-xxl-cased
Paper: null
Publisher: dbmdz
......
......@@ -8,7 +8,7 @@ Model_Info:
description: 🤗 + 📚 dbmdz Turkish BERT model
description_en: 🤗 + 📚 dbmdz Turkish BERT model
from_repo: https://huggingface.co/dbmdz/bert-base-turkish-128k-cased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dbmdz/bert-base-turkish-128k-cased
Paper: null
Publisher: dbmdz
......
......@@ -8,7 +8,7 @@ Model_Info:
description: 🤗 + 📚 dbmdz Turkish BERT model
description_en: 🤗 + 📚 dbmdz Turkish BERT model
from_repo: https://huggingface.co/dbmdz/bert-base-turkish-cased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dbmdz/bert-base-turkish-cased
Paper: null
Publisher: dbmdz
......
......@@ -8,7 +8,7 @@ Model_Info:
description: 🤗 + 📚 dbmdz Turkish BERT model
description_en: 🤗 + 📚 dbmdz Turkish BERT model
from_repo: https://huggingface.co/dbmdz/bert-base-turkish-uncased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dbmdz/bert-base-turkish-uncased
Paper: null
Publisher: dbmdz
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Aeona | Chatbot
description_en: Aeona | Chatbot
from_repo: https://huggingface.co/deepparag/Aeona
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: deepparag/Aeona
Paper: null
Publisher: deepparag
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Overview
description_en: Overview
from_repo: https://huggingface.co/deepset/roberta-base-squad2-distilled
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: deepset/roberta-base-squad2-distilled
Paper: null
Publisher: deepset
......
......@@ -8,7 +8,7 @@ Model_Info:
description: bert-base-NER
description_en: bert-base-NER
from_repo: https://huggingface.co/dslim/bert-base-NER
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dslim/bert-base-NER
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: bert-base-NER
description_en: bert-base-NER
from_repo: https://huggingface.co/dslim/bert-large-NER
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dslim/bert-large-NER
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: bert-base-romanian-cased-v1
description_en: bert-base-romanian-cased-v1
from_repo: https://huggingface.co/dumitrescustefan/bert-base-romanian-cased-v1
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dumitrescustefan/bert-base-romanian-cased-v1
Paper: null
Publisher: dumitrescustefan
......
......@@ -8,7 +8,7 @@ Model_Info:
description: bert-base-romanian-uncased-v1
description_en: bert-base-romanian-uncased-v1
from_repo: https://huggingface.co/dumitrescustefan/bert-base-romanian-uncased-v1
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: dumitrescustefan/bert-base-romanian-uncased-v1
Paper: null
Publisher: dumitrescustefan
......
......@@ -8,7 +8,7 @@ Model_Info:
description: ClinicalBERT - Bio + Clinical BERT Model
description_en: ClinicalBERT - Bio + Clinical BERT Model
from_repo: https://huggingface.co/emilyalsentzer/Bio_ClinicalBERT
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: emilyalsentzer/Bio_ClinicalBERT
Paper:
- title: Publicly Available Clinical BERT Embeddings
......
......@@ -8,7 +8,7 @@ Model_Info:
description: ClinicalBERT - Bio + Discharge Summary BERT Model
description_en: ClinicalBERT - Bio + Discharge Summary BERT Model
from_repo: https://huggingface.co/emilyalsentzer/Bio_Discharge_Summary_BERT
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: emilyalsentzer/Bio_Discharge_Summary_BERT
Paper:
- title: Publicly Available Clinical BERT Embeddings
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Version 1.1 - LM-Adapted
description_en: Version 1.1 - LM-Adapted
from_repo: https://huggingface.co/google/t5-base-lm-adapt
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: google/t5-base-lm-adapt
Paper:
- title: GLU Variants Improve Transformer
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Version 1.1 - LM-Adapted
description_en: Version 1.1 - LM-Adapted
from_repo: https://huggingface.co/google/t5-large-lm-adapt
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: google/t5-large-lm-adapt
Paper:
- title: GLU Variants Improve Transformer
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Version 1.1 - LM-Adapted
description_en: Version 1.1 - LM-Adapted
from_repo: https://huggingface.co/google/t5-small-lm-adapt
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: google/t5-small-lm-adapt
Paper:
- title: GLU Variants Improve Transformer
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Version 1.1
description_en: Version 1.1
from_repo: https://huggingface.co/google/t5-v1_1-base
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: google/t5-v1_1-base
Paper:
- title: GLU Variants Improve Transformer
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Version 1.1
description_en: Version 1.1
from_repo: https://huggingface.co/google/t5-v1_1-large
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: google/t5-v1_1-large
Paper:
- title: GLU Variants Improve Transformer
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Version 1.1
description_en: Version 1.1
from_repo: https://huggingface.co/google/t5-v1_1-small
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: google/t5-v1_1-small
Paper:
- title: GLU Variants Improve Transformer
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Chinese BERT with Whole Word Masking
description_en: Chinese BERT with Whole Word Masking
from_repo: https://huggingface.co/hfl/chinese-bert-wwm-ext
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: hfl/chinese-bert-wwm-ext
Paper:
- title: Pre-Training with Whole Word Masking for Chinese BERT
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Chinese BERT with Whole Word Masking
description_en: Chinese BERT with Whole Word Masking
from_repo: https://huggingface.co/hfl/chinese-bert-wwm
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: hfl/chinese-bert-wwm
Paper:
- title: Pre-Training with Whole Word Masking for Chinese BERT
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Please use 'Bert' related functions to load this model!
description_en: Please use 'Bert' related functions to load this model!
from_repo: https://huggingface.co/hfl/chinese-roberta-wwm-ext-large
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: hfl/chinese-roberta-wwm-ext-large
Paper:
- title: Pre-Training with Whole Word Masking for Chinese BERT
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Please use 'Bert' related functions to load this model!
description_en: Please use 'Bert' related functions to load this model!
from_repo: https://huggingface.co/hfl/chinese-roberta-wwm-ext
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: hfl/chinese-roberta-wwm-ext
Paper:
- title: Pre-Training with Whole Word Masking for Chinese BERT
......
......@@ -8,7 +8,7 @@ Model_Info:
description: This is a re-trained 3-layer RoBERTa-wwm-ext model.
description_en: This is a re-trained 3-layer RoBERTa-wwm-ext model.
from_repo: https://huggingface.co/hfl/rbt3
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: hfl/rbt3
Paper:
- title: Pre-Training with Whole Word Masking for Chinese BERT
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT base model (cased)
description_en: BERT base model (cased)
from_repo: https://huggingface.co/bert-base-cased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-base-cased
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: German BERT
description_en: German BERT
from_repo: https://huggingface.co/bert-base-german-cased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-base-german-cased
Paper: null
Publisher: huggingface
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT multilingual base model (cased)
description_en: BERT multilingual base model (cased)
from_repo: https://huggingface.co/bert-base-multilingual-cased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-base-multilingual-cased
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT multilingual base model (uncased)
description_en: BERT multilingual base model (uncased)
from_repo: https://huggingface.co/bert-base-multilingual-uncased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-base-multilingual-uncased
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT base model (uncased)
description_en: BERT base model (uncased)
from_repo: https://huggingface.co/bert-base-uncased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-base-uncased
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT large model (cased) whole word masking finetuned on SQuAD
description_en: BERT large model (cased) whole word masking finetuned on SQuAD
from_repo: https://huggingface.co/bert-large-cased-whole-word-masking-finetuned-squad
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-large-cased-whole-word-masking-finetuned-squad
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT large model (cased) whole word masking
description_en: BERT large model (cased) whole word masking
from_repo: https://huggingface.co/bert-large-cased-whole-word-masking
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-large-cased-whole-word-masking
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT large model (cased)
description_en: BERT large model (cased)
from_repo: https://huggingface.co/bert-large-cased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-large-cased
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT large model (uncased) whole word masking finetuned on SQuAD
description_en: BERT large model (uncased) whole word masking finetuned on SQuAD
from_repo: https://huggingface.co/bert-large-uncased-whole-word-masking-finetuned-squad
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-large-uncased-whole-word-masking-finetuned-squad
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT large model (uncased) whole word masking
description_en: BERT large model (uncased) whole word masking
from_repo: https://huggingface.co/bert-large-uncased-whole-word-masking
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-large-uncased-whole-word-masking
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: BERT large model (uncased)
description_en: BERT large model (uncased)
from_repo: https://huggingface.co/bert-large-uncased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: bert-large-uncased
Paper:
- title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Model Card for DistilBERT base multilingual (cased)
description_en: Model Card for DistilBERT base multilingual (cased)
from_repo: https://huggingface.co/distilbert-base-multilingual-cased
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: distilbert-base-multilingual-cased
Paper:
- title: 'DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: DistilGPT2
description_en: DistilGPT2
from_repo: https://huggingface.co/distilgpt2
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: distilgpt2
Paper:
- title: 'DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: Model Card for DistilRoBERTa base
description_en: Model Card for DistilRoBERTa base
from_repo: https://huggingface.co/distilroberta-base
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: distilroberta-base
Paper:
- title: 'DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter'
......
......@@ -8,7 +8,7 @@ Model_Info:
description: GPT-2 Large
description_en: GPT-2 Large
from_repo: https://huggingface.co/gpt2-large
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: gpt2-large
Paper:
- title: Quantifying the Carbon Emissions of Machine Learning
......
......@@ -8,7 +8,7 @@ Model_Info:
description: GPT-2 Medium
description_en: GPT-2 Medium
from_repo: https://huggingface.co/gpt2-medium
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: gpt2-medium
Paper:
- title: Quantifying the Carbon Emissions of Machine Learning
......
......@@ -8,7 +8,7 @@ Model_Info:
description: GPT-2
description_en: GPT-2
from_repo: https://huggingface.co/gpt2
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: gpt2
Paper: null
Publisher: huggingface
......
......@@ -8,7 +8,7 @@ Model_Info:
description: IndoBERT Base Model (phase1 - uncased)
description_en: IndoBERT Base Model (phase1 - uncased)
from_repo: https://huggingface.co/indobenchmark/indobert-base-p1
icon: https://paddlenlp.bj.bcebos.com/models/community/transformer-layer.png
icon: https://paddlenlp.bj.bcebos.com/images/community_icon.png
name: indobenchmark/indobert-base-p1
Paper:
- title: 'IndoNLU: Benchmark and Resources for Evaluating Indonesian Natural Language
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册