From 8f60b36e24175c9221e7bb07782cc59a859a059c Mon Sep 17 00:00:00 2001 From: cinderellaTiger <114470934+cinderellaTiger@users.noreply.github.com> Date: Mon, 20 Feb 2023 15:12:49 +0800 Subject: [PATCH] Update introduction_en.ipynb (#5721) --- modelcenter/PP-HelixFold/introduction_en.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modelcenter/PP-HelixFold/introduction_en.ipynb b/modelcenter/PP-HelixFold/introduction_en.ipynb index 1f00040a..9f3ad10d 100644 --- a/modelcenter/PP-HelixFold/introduction_en.ipynb +++ b/modelcenter/PP-HelixFold/introduction_en.ipynb @@ -7,11 +7,11 @@ "source": [ "## 1. PP-HelixFold Introduction\n", "\n", - "AlphaFold2 is an accurate protein structure prediction pipeline. PP-HelixFold provides an efficient and improved implementation of the complete training and inference pipelines of AlphaFold2 in GPU and DCU. Compared with the computational performance of AlphaFold2 reported in the paper and OpenFold implemented through PyTorch, PP-HelixFold reduces the training time from about 11 days to 7.5 days. Training PP-HelixFold from scratch can achieve competitive accuracy with AlphaFold2.\n", + "AlphaFold2 is an accurate protein structure prediction pipeline. PP-HelixFold provides an efficient and improved implementation of the complete training and inference pipelines of AlphaFold2 in GPU and DCU. Compared with the computational performance of AlphaFold2 reported in the paper and OpenFold implemented through PyTorch, PP-HelixFold reduces the training time from about 11 days originally to 5.12 days, and only 2.89 days when using hybrid parallelism. Training HelixFold from scratch can achieve competitive accuracy with AlphaFold2.\n", "\n", "
\n", - "\n", - "\n", + "\n", + "\n", "
\n", "\n", "\n", -- GitLab