diff --git a/modelcenter/community/allenai/macaw-large/introduction_cn.ipynb b/modelcenter/community/allenai/macaw-large/introduction_cn.ipynb index 905d2152e0249a2ef550a52e72d7dfdbfb37df48..0a947b84d22e0a3b1b8a42b087a8b6357760353d 100644 --- a/modelcenter/community/allenai/macaw-large/introduction_cn.ipynb +++ b/modelcenter/community/allenai/macaw-large/introduction_cn.ipynb @@ -53,7 +53,8 @@ "\n", "model = AutoModel.from_pretrained(\"allenai/macaw-large\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/allenai/macaw-large/introduction_en.ipynb b/modelcenter/community/allenai/macaw-large/introduction_en.ipynb index 440ebda1e1109ce40b4923cc8f0e45535a43e9f9..3cf340277aa99c9302f7605cffb1d7e9a08577a3 100644 --- a/modelcenter/community/allenai/macaw-large/introduction_en.ipynb +++ b/modelcenter/community/allenai/macaw-large/introduction_en.ipynb @@ -53,7 +53,8 @@ "\n", "model = AutoModel.from_pretrained(\"allenai/macaw-large\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/allenai/specter/introduction_cn.ipynb b/modelcenter/community/allenai/specter/introduction_cn.ipynb index 32ced08c68082a3703bb86db8953dd25422275b0..53c1a20942295722e64c60c83db163b196f329b1 100644 --- a/modelcenter/community/allenai/specter/introduction_cn.ipynb +++ b/modelcenter/community/allenai/specter/introduction_cn.ipynb @@ -44,9 +44,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertModel\n", "\n", - "model = AutoModel.from_pretrained(\"allenai/specter\")\n", + "model = BertModel.from_pretrained(\"allenai/specter\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/allenai/specter/introduction_en.ipynb b/modelcenter/community/allenai/specter/introduction_en.ipynb index a77c6968c20725126df481f285e40f5c212e18c1..c210f33561f01da9a147eb290c824e0fa5009db6 100644 --- a/modelcenter/community/allenai/specter/introduction_en.ipynb +++ b/modelcenter/community/allenai/specter/introduction_en.ipynb @@ -44,9 +44,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertModel\n", "\n", - "model = AutoModel.from_pretrained(\"allenai/specter\")\n", + "model = BertModel.from_pretrained(\"allenai/specter\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/cross-encoder/ms-marco-MiniLM-L-12-v2/introduction_cn.ipynb b/modelcenter/community/cross-encoder/ms-marco-MiniLM-L-12-v2/introduction_cn.ipynb index 1f1adfe080b64a0f419ef6b2be1bd3a3b82b936e..405f5ab56f2819dc913f3f6d57e0ccc659d5d64b 100644 --- a/modelcenter/community/cross-encoder/ms-marco-MiniLM-L-12-v2/introduction_cn.ipynb +++ b/modelcenter/community/cross-encoder/ms-marco-MiniLM-L-12-v2/introduction_cn.ipynb @@ -50,9 +50,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertForSequenceClassification\n", "\n", - "model = AutoModel.from_pretrained(\"cross-encoder/ms-marco-MiniLM-L-12-v2\")\n", + "model = BertForSequenceClassification.from_pretrained(\"cross-encoder/ms-marco-MiniLM-L-12-v2\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/cross-encoder/ms-marco-MiniLM-L-12-v2/introduction_en.ipynb b/modelcenter/community/cross-encoder/ms-marco-MiniLM-L-12-v2/introduction_en.ipynb index 0389e1e264635fd2f3fd865742e8f52dde21ff36..6868bfa726cab8a7d866a24c83a2e2f487eab876 100644 --- a/modelcenter/community/cross-encoder/ms-marco-MiniLM-L-12-v2/introduction_en.ipynb +++ b/modelcenter/community/cross-encoder/ms-marco-MiniLM-L-12-v2/introduction_en.ipynb @@ -50,9 +50,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertForSequenceClassification\n", "\n", - "model = AutoModel.from_pretrained(\"cross-encoder/ms-marco-MiniLM-L-12-v2\")\n", + "model = BertForSequenceClassification.from_pretrained(\"cross-encoder/ms-marco-MiniLM-L-12-v2\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/cross-encoder/ms-marco-TinyBERT-L-2/introduction_cn.ipynb b/modelcenter/community/cross-encoder/ms-marco-TinyBERT-L-2/introduction_cn.ipynb index 84472703becaacc5af9714e66d2f46b94a530acd..4ced619643f38743c11c1b40b082f20954cac634 100644 --- a/modelcenter/community/cross-encoder/ms-marco-TinyBERT-L-2/introduction_cn.ipynb +++ b/modelcenter/community/cross-encoder/ms-marco-TinyBERT-L-2/introduction_cn.ipynb @@ -50,9 +50,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertModel\n", "\n", - "model = AutoModel.from_pretrained(\"cross-encoder/ms-marco-TinyBERT-L-2\")\n", + "model = BertModel.from_pretrained(\"cross-encoder/ms-marco-TinyBERT-L-2\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] @@ -103,7 +103,11 @@ ] } ], - "metadata": {}, + "metadata": { + "language_info": { + "name": "python" + } + }, "nbformat": 4, "nbformat_minor": 5 } diff --git a/modelcenter/community/cross-encoder/ms-marco-TinyBERT-L-2/introduction_en.ipynb b/modelcenter/community/cross-encoder/ms-marco-TinyBERT-L-2/introduction_en.ipynb index b3862b25839c0e2e0824103032b27d887d769b13..99f02830fbfb73bc5517d649236ae0c6c8d81d78 100644 --- a/modelcenter/community/cross-encoder/ms-marco-TinyBERT-L-2/introduction_en.ipynb +++ b/modelcenter/community/cross-encoder/ms-marco-TinyBERT-L-2/introduction_en.ipynb @@ -50,9 +50,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertModel\n", "\n", - "model = AutoModel.from_pretrained(\"cross-encoder/ms-marco-TinyBERT-L-2\")\n", + "model = BertModel.from_pretrained(\"cross-encoder/ms-marco-TinyBERT-L-2\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] @@ -103,7 +103,11 @@ ] } ], - "metadata": {}, + "metadata": { + "language_info": { + "name": "python" + } + }, "nbformat": 4, "nbformat_minor": 5 } diff --git a/modelcenter/community/cross-encoder/nli-MiniLM2-L6-H768/introduction_cn.ipynb b/modelcenter/community/cross-encoder/nli-MiniLM2-L6-H768/introduction_cn.ipynb index b7393f4f2f689100921d70305aa82b9e87b1fb1d..8eef3fe39c0f43abbf98d10692ffdad5990208d5 100644 --- a/modelcenter/community/cross-encoder/nli-MiniLM2-L6-H768/introduction_cn.ipynb +++ b/modelcenter/community/cross-encoder/nli-MiniLM2-L6-H768/introduction_cn.ipynb @@ -61,9 +61,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import RobertaForSequenceClassification\n", "\n", - "model = AutoModel.from_pretrained(\"cross-encoder/nli-MiniLM2-L6-H768\")\n", + "model = RobertaForSequenceClassification.from_pretrained(\"cross-encoder/nli-MiniLM2-L6-H768\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/cross-encoder/nli-MiniLM2-L6-H768/introduction_en.ipynb b/modelcenter/community/cross-encoder/nli-MiniLM2-L6-H768/introduction_en.ipynb index c62c2aff004312798f3301c3b6e50b48564731f1..859faaee7390513224522e4b134a56fc436ff2b0 100644 --- a/modelcenter/community/cross-encoder/nli-MiniLM2-L6-H768/introduction_en.ipynb +++ b/modelcenter/community/cross-encoder/nli-MiniLM2-L6-H768/introduction_en.ipynb @@ -61,9 +61,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import RobertaForSequenceClassification\n", "\n", - "model = AutoModel.from_pretrained(\"cross-encoder/nli-MiniLM2-L6-H768\")\n", + "model = RobertaForSequenceClassification.from_pretrained(\"cross-encoder/nli-MiniLM2-L6-H768\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/cross-encoder/stsb-TinyBERT-L-4/introduction_cn.ipynb b/modelcenter/community/cross-encoder/stsb-TinyBERT-L-4/introduction_cn.ipynb index 53b39a12b9d6688a13bee885f22dc558f9b83a8b..52c0e0e466c1cae2135980287c695749a9287ad2 100644 --- a/modelcenter/community/cross-encoder/stsb-TinyBERT-L-4/introduction_cn.ipynb +++ b/modelcenter/community/cross-encoder/stsb-TinyBERT-L-4/introduction_cn.ipynb @@ -156,9 +156,9 @@ ], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertForSequenceClassification\n", "\n", - "model = AutoModel.from_pretrained(\"cross-encoder/stsb-TinyBERT-L-4\")\n", + "model = BertForSequenceClassification.from_pretrained(\"cross-encoder/stsb-TinyBERT-L-4\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/cross-encoder/stsb-TinyBERT-L-4/introduction_en.ipynb b/modelcenter/community/cross-encoder/stsb-TinyBERT-L-4/introduction_en.ipynb index 0183f00b99bcf4f57e2b7e641928103ba9628d10..4a4c4259c49d97c3c0fae3705a8d8a4bd569f896 100644 --- a/modelcenter/community/cross-encoder/stsb-TinyBERT-L-4/introduction_en.ipynb +++ b/modelcenter/community/cross-encoder/stsb-TinyBERT-L-4/introduction_en.ipynb @@ -52,9 +52,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertForSequenceClassification\n", "\n", - "model = AutoModel.from_pretrained(\"cross-encoder/stsb-TinyBERT-L-4\")\n", + "model = BertForSequenceClassification.from_pretrained(\"cross-encoder/stsb-TinyBERT-L-4\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/emilyalsentzer/Bio_ClinicalBERT/introduction_cn.ipynb b/modelcenter/community/emilyalsentzer/Bio_ClinicalBERT/introduction_cn.ipynb index 7064ebfbc0cf914da1739fb2647defb91a11c2d0..46d871dd42b98bfd367bd19303846c6bdd003d8f 100644 --- a/modelcenter/community/emilyalsentzer/Bio_ClinicalBERT/introduction_cn.ipynb +++ b/modelcenter/community/emilyalsentzer/Bio_ClinicalBERT/introduction_cn.ipynb @@ -50,9 +50,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertForMaskedLM\n", "\n", - "model = AutoModel.from_pretrained(\"emilyalsentzer/Bio_ClinicalBERT\")\n", + "model = BertForMaskedLM.from_pretrained(\"emilyalsentzer/Bio_ClinicalBERT\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/emilyalsentzer/Bio_ClinicalBERT/introduction_en.ipynb b/modelcenter/community/emilyalsentzer/Bio_ClinicalBERT/introduction_en.ipynb index fab51acf8dcc32f955310509b092a8d228657c83..faab51f8528012dcc42be601a88783a562d7436b 100644 --- a/modelcenter/community/emilyalsentzer/Bio_ClinicalBERT/introduction_en.ipynb +++ b/modelcenter/community/emilyalsentzer/Bio_ClinicalBERT/introduction_en.ipynb @@ -50,9 +50,9 @@ "outputs": [], "source": [ "import paddle\n", - "from paddlenlp.transformers import AutoModel\n", + "from paddlenlp.transformers import BertForMaskedLM\n", "\n", - "model = AutoModel.from_pretrained(\"emilyalsentzer/Bio_ClinicalBERT\")\n", + "model = BertForMaskedLM.from_pretrained(\"emilyalsentzer/Bio_ClinicalBERT\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", "print(model(input_ids))" ] diff --git a/modelcenter/community/google/t5-base-lm-adapt/introduction_cn.ipynb b/modelcenter/community/google/t5-base-lm-adapt/introduction_cn.ipynb index 8e4a107f98c9710ec2d8a01652ef5bab7e90efa5..8f6246a2619deeb300bd2d02ecba108db33be12c 100644 --- a/modelcenter/community/google/t5-base-lm-adapt/introduction_cn.ipynb +++ b/modelcenter/community/google/t5-base-lm-adapt/introduction_cn.ipynb @@ -66,7 +66,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-base-lm-adapt\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-base-lm-adapt/introduction_en.ipynb b/modelcenter/community/google/t5-base-lm-adapt/introduction_en.ipynb index c1a177787bace69a5b7f32d56c47dd73fcaaed08..adf7431ee71e209ff44a77f984ad55132d6d84ca 100644 --- a/modelcenter/community/google/t5-base-lm-adapt/introduction_en.ipynb +++ b/modelcenter/community/google/t5-base-lm-adapt/introduction_en.ipynb @@ -66,7 +66,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-base-lm-adapt\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-large-lm-adapt/introduction_cn.ipynb b/modelcenter/community/google/t5-large-lm-adapt/introduction_cn.ipynb index ce6a76d6967ca309d287645ba3c89fcce983e823..b554bc5dc50454f38b9378a76d1a94960a7b7bdc 100644 --- a/modelcenter/community/google/t5-large-lm-adapt/introduction_cn.ipynb +++ b/modelcenter/community/google/t5-large-lm-adapt/introduction_cn.ipynb @@ -74,7 +74,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-large-lm-adapt\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-large-lm-adapt/introduction_en.ipynb b/modelcenter/community/google/t5-large-lm-adapt/introduction_en.ipynb index fdc71cd138926da6979d64bad9d23f67d5914135..ce7308d4a030ad7975407112daafb7fb064ab524 100644 --- a/modelcenter/community/google/t5-large-lm-adapt/introduction_en.ipynb +++ b/modelcenter/community/google/t5-large-lm-adapt/introduction_en.ipynb @@ -74,7 +74,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-large-lm-adapt\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-small-lm-adapt/introduction_cn.ipynb b/modelcenter/community/google/t5-small-lm-adapt/introduction_cn.ipynb index 641edd7a0924ca16d7d1b2fa9ba620ed8e1cf799..01dac05e055b6d85f3b00eafb2490690f20d22d3 100644 --- a/modelcenter/community/google/t5-small-lm-adapt/introduction_cn.ipynb +++ b/modelcenter/community/google/t5-small-lm-adapt/introduction_cn.ipynb @@ -74,7 +74,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-small-lm-adapt\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-small-lm-adapt/introduction_en.ipynb b/modelcenter/community/google/t5-small-lm-adapt/introduction_en.ipynb index 21c1e824d139596eb4f84f841e1c1414eca2f1cc..d4ed2c5aadd79a1d5c41dfe514166131a22c8238 100644 --- a/modelcenter/community/google/t5-small-lm-adapt/introduction_en.ipynb +++ b/modelcenter/community/google/t5-small-lm-adapt/introduction_en.ipynb @@ -74,7 +74,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-small-lm-adapt\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-v1_1-base/introduction_cn.ipynb b/modelcenter/community/google/t5-v1_1-base/introduction_cn.ipynb index fb4d969aa56526b40ede0b6a47b812304e514db9..c80189717f8ccfc241616153f5029590ee89b802 100644 --- a/modelcenter/community/google/t5-v1_1-base/introduction_cn.ipynb +++ b/modelcenter/community/google/t5-v1_1-base/introduction_cn.ipynb @@ -65,7 +65,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-v1_1-base\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-v1_1-base/introduction_en.ipynb b/modelcenter/community/google/t5-v1_1-base/introduction_en.ipynb index 4084d70fab28230aacbe2d3576f7c70af1c097af..efd5de43bed698af9cc95669fbb2e308d7fa17de 100644 --- a/modelcenter/community/google/t5-v1_1-base/introduction_en.ipynb +++ b/modelcenter/community/google/t5-v1_1-base/introduction_en.ipynb @@ -65,7 +65,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-v1_1-base\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-v1_1-large/introduction_cn.ipynb b/modelcenter/community/google/t5-v1_1-large/introduction_cn.ipynb index 1ad787e785db45c64ad232e6d8742db04bae908f..06ece4334defcdea806e0f72c7c2300545b0ede7 100644 --- a/modelcenter/community/google/t5-v1_1-large/introduction_cn.ipynb +++ b/modelcenter/community/google/t5-v1_1-large/introduction_cn.ipynb @@ -65,7 +65,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-v1_1-large\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-v1_1-large/introduction_en.ipynb b/modelcenter/community/google/t5-v1_1-large/introduction_en.ipynb index 4efcfb2f376056a749dbe870708cbe17aea5bb38..8cf2ec83c00fa06776c1f12d5495a537d6a5104e 100644 --- a/modelcenter/community/google/t5-v1_1-large/introduction_en.ipynb +++ b/modelcenter/community/google/t5-v1_1-large/introduction_en.ipynb @@ -65,7 +65,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-v1_1-large\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-v1_1-small/introduction_cn.ipynb b/modelcenter/community/google/t5-v1_1-small/introduction_cn.ipynb index ebcc34ea3ac005d55e3a000df91bc1359847a60f..68386d8fb07a7095c899f584aca3c9bcfb5d6b2a 100644 --- a/modelcenter/community/google/t5-v1_1-small/introduction_cn.ipynb +++ b/modelcenter/community/google/t5-v1_1-small/introduction_cn.ipynb @@ -67,7 +67,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-v1_1-small\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, { diff --git a/modelcenter/community/google/t5-v1_1-small/introduction_en.ipynb b/modelcenter/community/google/t5-v1_1-small/introduction_en.ipynb index e89ecb34792133edff852bb34e62a500917e512c..a7eb5dc013fdf57f3ee9fb2e1e3b518941b85f3a 100644 --- a/modelcenter/community/google/t5-v1_1-small/introduction_en.ipynb +++ b/modelcenter/community/google/t5-v1_1-small/introduction_en.ipynb @@ -67,7 +67,8 @@ "\n", "model = AutoModel.from_pretrained(\"google/t5-v1_1-small\")\n", "input_ids = paddle.randint(100, 200, shape=[1, 20])\n", - "print(model(input_ids))" + "decoder_input_ids = paddle.randint(100, 200, shape=[1, 20])\n", + "print(model(input_ids, decoder_input_ids=decoder_input_ids))" ] }, {