From 58204ac6e5fb4d845a94e12593ac7ed7ea8cb3a5 Mon Sep 17 00:00:00 2001 From: WangFengTu Date: Mon, 29 Jun 2020 16:22:28 +0800 Subject: [PATCH] use function DecompressStream to decompress to speed up Signed-off-by: WangFengTu --- iSulad-img.spec | 2 +- ...ompressStream-to-decompress-to-speed.patch | 152 ++++++++++++++++++ patch/series-patch.conf | 1 + 3 files changed, 154 insertions(+), 1 deletion(-) create mode 100644 patch/0058-use-function-DecompressStream-to-decompress-to-speed.patch diff --git a/iSulad-img.spec b/iSulad-img.spec index f5cce7c..645ea2c 100644 --- a/iSulad-img.spec +++ b/iSulad-img.spec @@ -1,5 +1,5 @@ %global _version 2.0.0 -%global _release 20200619.162225.gitb2154cf4 +%global _release 20200629.162148.git8767d1a1 Name: iSulad-img Version: %{_version} Release: %{_release} diff --git a/patch/0058-use-function-DecompressStream-to-decompress-to-speed.patch b/patch/0058-use-function-DecompressStream-to-decompress-to-speed.patch new file mode 100644 index 0000000..663c84a --- /dev/null +++ b/patch/0058-use-function-DecompressStream-to-decompress-to-speed.patch @@ -0,0 +1,152 @@ +From 0f8d19210ce69278b5c876c49a9cddb5af82c58f Mon Sep 17 00:00:00 2001 +From: WangFengTu +Date: Mon, 29 Jun 2020 15:00:48 +0800 +Subject: [PATCH] use function DecompressStream to decompress to speed up + +Signed-off-by: WangFengTu +--- + .../github.com/containers/image/copy/copy.go | 4 ++- + .../containers/image/docker/tarfile/src.go | 31 +++++++++++-------- + .../containers/image/tarball/tarball_src.go | 21 +++++-------- + 3 files changed, 29 insertions(+), 27 deletions(-) + +diff --git a/vendor/github.com/containers/image/copy/copy.go b/vendor/github.com/containers/image/copy/copy.go +index da119d5..0b5399c 100644 +--- a/vendor/github.com/containers/image/copy/copy.go ++++ b/vendor/github.com/containers/image/copy/copy.go +@@ -20,6 +20,7 @@ import ( + "github.com/containers/image/signature" + "github.com/containers/image/transports" + "github.com/containers/image/types" ++ "github.com/containers/storage/pkg/archive" + "github.com/klauspost/pgzip" + "github.com/opencontainers/go-digest" + "github.com/pkg/errors" +@@ -741,7 +742,8 @@ func diffIDComputationGoroutine(dest chan<- diffIDResult, layerStream io.ReadClo + // computeDiffID reads all input from layerStream, uncompresses it using decompressor if necessary, and returns its digest. + func computeDiffID(stream io.Reader, decompressor compression.DecompressorFunc) (digest.Digest, error) { + if decompressor != nil { +- s, err := decompressor(stream) ++ // decompressor is too slow, DecompressStream is faster ++ s, err := archive.DecompressStream(stream) + if err != nil { + return "", err + } +diff --git a/vendor/github.com/containers/image/docker/tarfile/src.go b/vendor/github.com/containers/image/docker/tarfile/src.go +index c8b4f4e..1743ae7 100644 +--- a/vendor/github.com/containers/image/docker/tarfile/src.go ++++ b/vendor/github.com/containers/image/docker/tarfile/src.go +@@ -17,6 +17,7 @@ import ( + "github.com/containers/image/manifest" + "github.com/containers/image/pkg/compression" + "github.com/containers/image/types" ++ "github.com/containers/storage/pkg/archive" + "github.com/opencontainers/go-digest" + "github.com/pkg/errors" + ) +@@ -56,21 +57,34 @@ func NewSourceFromFile(path, repoTag string) (*Source, error) { + if err != nil { + return nil, errors.Wrapf(err, "error opening file %q", path) + } +- defer file.Close() + + // If the file is already not compressed we can just return the file itself + // as a source. Otherwise we pass the stream to NewSourceFromStream. +- stream, isCompressed, err := compression.AutoDecompress(file) ++ _, isCompressed, err := compression.AutoDecompress(file) + if err != nil { ++ file.Close() + return nil, errors.Wrapf(err, "Error detecting compression for file %q", path) + } +- defer stream.Close() ++ file.Close() + if !isCompressed { + return &Source{ + tarPath: path, + repoTag: repoTag, + }, nil + } ++ ++ file, err = os.Open(path) ++ if err != nil { ++ return nil, errors.Wrapf(err, "error opening file %v", path) ++ } ++ defer file.Close() ++ ++ stream, err := archive.DecompressStream(file) ++ if err != nil { ++ return nil, errors.Wrapf(err, "Error decompression file %v", path) ++ } ++ defer stream.Close() ++ + return NewSourceFromStream(stream, repoTag) + } + +@@ -93,20 +107,11 @@ func NewSourceFromStream(inputStream io.Reader, repoTag string) (*Source, error) + } + }() + +- // In order to be compatible with docker-load, we need to support +- // auto-decompression (it's also a nice quality-of-life thing to avoid +- // giving users really confusing "invalid tar header" errors). +- uncompressedStream, _, err := compression.AutoDecompress(inputStream) +- if err != nil { +- return nil, errors.Wrap(err, "Error auto-decompressing input") +- } +- defer uncompressedStream.Close() +- + // Copy the plain archive to the temporary file. + // + // TODO: This can take quite some time, and should ideally be cancellable + // using a context.Context. +- if _, err := io.Copy(tarCopyFile, uncompressedStream); err != nil { ++ if _, err := io.Copy(tarCopyFile, inputStream); err != nil { + return nil, errors.Wrapf(err, "error copying contents to temporary file %q", tarCopyFile.Name()) + } + succeeded = true +diff --git a/vendor/github.com/containers/image/tarball/tarball_src.go b/vendor/github.com/containers/image/tarball/tarball_src.go +index a3c5453..4c25d78 100644 +--- a/vendor/github.com/containers/image/tarball/tarball_src.go ++++ b/vendor/github.com/containers/image/tarball/tarball_src.go +@@ -12,8 +12,8 @@ import ( + "strings" + "time" + +- "github.com/containers/image/pkg/compression" + "github.com/containers/image/types" ++ "github.com/containers/storage/pkg/archive" + digest "github.com/opencontainers/go-digest" + imgspecs "github.com/opencontainers/image-spec/specs-go" + imgspecv1 "github.com/opencontainers/image-spec/specs-go/v1" +@@ -71,21 +71,16 @@ func (r *tarballReference) NewImageSource(ctx context.Context, sys *types.System + blobIDdigester := digest.Canonical.Digester() + reader = io.TeeReader(reader, blobIDdigester.Hash()) + +- // Set up to digest the file after we maybe decompress it. +- diffIDdigester := digest.Canonical.Digester() +- uncompressed, iscompressed, err := compression.AutoDecompress(reader) ++ uncompressed, err := archive.DecompressStream(reader) + if err != nil { +- return nil, fmt.Errorf("error decompress %q: %v", filename, err) ++ return nil, fmt.Errorf("error decompression file %v: %v", filename, err) + } + defer uncompressed.Close() +- if iscompressed { +- // It is compressed, so the diffID is the digest of the uncompressed version +- reader = io.TeeReader(uncompressed, diffIDdigester.Hash()) +- } else { +- // It is not compressed, so the diffID and the blobID are going to be the same +- diffIDdigester = blobIDdigester +- uncompressed = nil +- } ++ ++ // It is compressed, so the diffID is the digest of the uncompressed version ++ diffIDdigester := digest.Canonical.Digester() ++ reader = io.TeeReader(uncompressed, diffIDdigester.Hash()) ++ + // TODO: This can take quite some time, and should ideally be cancellable using ctx.Done(). + n, err := io.Copy(ioutil.Discard, reader) + if err != nil { +-- +2.20.1 + diff --git a/patch/series-patch.conf b/patch/series-patch.conf index 327eec3..ab85245 100644 --- a/patch/series-patch.conf +++ b/patch/series-patch.conf @@ -55,3 +55,4 @@ 0055-Change-copyright-from-MulanPSL-to-MulanPSL2.patch 0056-make-sure-created-time-is-larger-or-equal-than-1970.patch 0057-support-more-compressed-type-when-import-tarball.patch +0058-use-function-DecompressStream-to-decompress-to-speed.patch -- GitLab