diff --git a/changelog.html b/changelog.html index 50483e607e0b0d6a7efd46d6805c5b43cc099adf..57107f8810fcfba4c576ccd14a6988917f960fb6 100644 --- a/changelog.html +++ b/changelog.html @@ -58,6 +58,9 @@ Upcoming changes
  • Race condition in triggers could cause various NullPointerExceptions. (issue 29790) +
  • + Archiving of large artifacts. Tar implementation cannot handle files having a size >8GB. + (issue 10629)
  • Allow plugins to augment or replace the plugin manager UI. (PR 1788) @@ -292,7 +295,7 @@ Upcoming changes Since 1.598 overrides of Descriptor.getId were not correctly handled by form binding, breaking at least the CloudBees Templates plugin. (issue 26781)
  • - Reverted in 1.611. Archiving of large artifacts. Tar implementation cannot handle files having a size >8GB. + Reverted in 1.611, reimplemented in 1.627. Archiving of large artifacts. Tar implementation cannot handle files having a size >8GB. (issue 10629)
  • The queue state was not updated between scheduling builds. diff --git a/core/pom.xml b/core/pom.xml index ed8e8cd3083b99543c6f649c7345c31a6d1675f2..41c9fec6696d65044360cacb8db1af24d0b823a9 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -276,6 +276,11 @@ THE SOFTWARE. commons-beanutils 1.8.3 + + org.apache.commons + commons-compress + 1.10 + javax.mail mail diff --git a/core/src/main/java/hudson/FilePath.java b/core/src/main/java/hudson/FilePath.java index ca137d2e11605a7989cf1357fc94ec1529fa9761..07fdc9d17ced01bd5a3153e4cfb8988571b90e2e 100644 --- a/core/src/main/java/hudson/FilePath.java +++ b/core/src/main/java/hudson/FilePath.java @@ -33,7 +33,6 @@ import hudson.model.AbstractProject; import hudson.model.Computer; import hudson.model.Item; import hudson.model.TaskListener; -import hudson.org.apache.tools.tar.TarInputStream; import hudson.os.PosixAPI; import hudson.os.PosixException; import hudson.remoting.Callable; @@ -70,7 +69,6 @@ import org.apache.commons.io.input.CountingInputStream; import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.Project; import org.apache.tools.ant.types.FileSet; -import org.apache.tools.tar.TarEntry; import org.apache.tools.zip.ZipEntry; import org.apache.tools.zip.ZipFile; import org.kohsuke.stapler.Stapler; @@ -120,6 +118,8 @@ import static hudson.Util.*; import javax.annotation.Nonnull; import javax.annotation.Nullable; import jenkins.security.MasterToSlaveCallable; +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.jenkinsci.remoting.RoleChecker; import org.jenkinsci.remoting.RoleSensitive; @@ -2268,12 +2268,15 @@ public final class FilePath implements Serializable { /** * Reads from a tar stream and stores obtained files to the base dir. + * @since TODO supports large files > 10 GB, migration to commons-compress */ private void readFromTar(String name, File baseDir, InputStream in) throws IOException { - TarInputStream t = new TarInputStream(in); + TarArchiveInputStream t = new TarArchiveInputStream(in); + + // TarInputStream t = new TarInputStream(in); try { - TarEntry te; - while ((te = t.getNextEntry()) != null) { + TarArchiveEntry te; + while ((te = t.getNextTarEntry()) != null) { File f = new File(baseDir,te.getName()); if(te.isDirectory()) { mkdirs(f); @@ -2282,8 +2285,7 @@ public final class FilePath implements Serializable { if (parent != null) mkdirs(parent); writing(f); - byte linkFlag = (Byte) LINKFLAG_FIELD.get(te); - if (linkFlag==TarEntry.LF_SYMLINK) { + if (te.isSymbolicLink()) { new FilePath(f).symlinkTo(te.getLinkName(), TaskListener.NULL); } else { IOUtils.copy(t,f); @@ -2300,8 +2302,6 @@ public final class FilePath implements Serializable { } catch (InterruptedException e) { Thread.currentThread().interrupt(); // process this later throw new IOException("Failed to extract "+name,e); - } catch (IllegalAccessException e) { - throw new IOException("Failed to extract "+name,e); } finally { t.close(); } @@ -2725,20 +2725,6 @@ public final class FilePath implements Serializable { } }; - private static final Field LINKFLAG_FIELD = getTarEntryLinkFlagField(); - - private static Field getTarEntryLinkFlagField() { - try { - Field f = TarEntry.class.getDeclaredField("linkFlag"); - f.setAccessible(true); - return f; - } catch (SecurityException e) { - throw new AssertionError(e); - } catch (NoSuchFieldException e) { - throw new AssertionError(e); - } - } - /** * Gets the {@link FilePath} representation of the "~" directory * (User's home directory in the Unix sense) of the given channel. diff --git a/core/src/main/java/hudson/org/apache/tools/tar/TarInputStream.java b/core/src/main/java/hudson/org/apache/tools/tar/TarInputStream.java index f2d10c1c39960b09cb7ee9fd89505ba8fc9cb79a..d7d8a5b54da3e24bd7f83ea3f1d1f1306d63d77c 100644 --- a/core/src/main/java/hudson/org/apache/tools/tar/TarInputStream.java +++ b/core/src/main/java/hudson/org/apache/tools/tar/TarInputStream.java @@ -37,8 +37,9 @@ import java.io.ByteArrayOutputStream; * methods are provided to position at each successive entry in * the archive, and the read each entry as a normal input stream * using read(). - * + * @deprecated Use {@link org.apache.commons.compress.archivers.tar.TarArchiveInputStream} instead */ +@Deprecated public class TarInputStream extends FilterInputStream { // CheckStyle:VisibilityModifier OFF - bc diff --git a/core/src/main/java/hudson/org/apache/tools/tar/TarOutputStream.java b/core/src/main/java/hudson/org/apache/tools/tar/TarOutputStream.java index f23e44262847de611c21231223966e8f78d1c5b7..48f4876bd45c3cfee3fb0bd9a7d102df85162d19 100644 --- a/core/src/main/java/hudson/org/apache/tools/tar/TarOutputStream.java +++ b/core/src/main/java/hudson/org/apache/tools/tar/TarOutputStream.java @@ -35,8 +35,11 @@ import java.io.IOException; * The TarOutputStream writes a UNIX tar archive as an OutputStream. * Methods are provided to put entries, and then write their contents * by writing to this stream using write(). + * + * @deprecated Use {@link org.apache.commons.compress.archivers.tar.TarArchiveOutputStream} instead * */ +@Deprecated public class TarOutputStream extends FilterOutputStream { /** Fail if a long file name is required in the archive. */ public static final int LONGFILE_ERROR = 0; diff --git a/core/src/main/java/hudson/util/io/TarArchiver.java b/core/src/main/java/hudson/util/io/TarArchiver.java index 7e7a3dd4f0ec1a14981f98fc64a2cfdfbb40a87d..600a16cba67faf106568089f383d269cde9423ac 100644 --- a/core/src/main/java/hudson/util/io/TarArchiver.java +++ b/core/src/main/java/hudson/util/io/TarArchiver.java @@ -37,6 +37,8 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import java.lang.reflect.Field; +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import static org.apache.tools.tar.TarConstants.LF_SYMLINK; @@ -47,24 +49,17 @@ import static org.apache.tools.tar.TarConstants.LF_SYMLINK; */ final class TarArchiver extends Archiver { private final byte[] buf = new byte[8192]; - private final TarOutputStream tar; + private final TarArchiveOutputStream tar; TarArchiver(OutputStream out) { - tar = new TarOutputStream(new BufferedOutputStream(out) { - // TarOutputStream uses TarBuffer internally, - // which flushes the stream for each block. this creates unnecessary - // data stream fragmentation, and flush request to a remote, which slows things down. - @Override - public void flush() throws IOException { - // so don't do anything in flush - } - }); - tar.setLongFileMode(TarOutputStream.LONGFILE_GNU); + tar = new TarArchiveOutputStream(out); + tar.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR); + tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); } @Override public void visitSymlink(File link, String target, String relativePath) throws IOException { - TarEntry e = new TarEntry(relativePath, LF_SYMLINK); + TarArchiveEntry e = new TarArchiveEntry(relativePath, LF_SYMLINK); try { int mode = IOUtils.mode(link); if (mode != -1) { @@ -73,16 +68,11 @@ final class TarArchiver extends Archiver { } catch (PosixException x) { // ignore } + + e.setLinkName(target); - try { - StringBuffer linkName = (StringBuffer) LINKNAME_FIELD.get(e); - linkName.setLength(0); - linkName.append(target); - } catch (IllegalAccessException x) { - throw new IOException("Failed to set linkName", x); - } - - tar.putNextEntry(e); + tar.putArchiveEntry(e); + tar.closeArchiveEntry(); entriesWritten++; } @@ -97,14 +87,14 @@ final class TarArchiver extends Archiver { if(file.isDirectory()) relativePath+='/'; - TarEntry te = new TarEntry(relativePath); + TarArchiveEntry te = new TarArchiveEntry(relativePath); int mode = IOUtils.mode(file); if (mode!=-1) te.setMode(mode); te.setModTime(file.lastModified()); if(!file.isDirectory()) te.setSize(file.length()); - tar.putNextEntry(te); + tar.putArchiveEntry(te); if (!file.isDirectory()) { FileInputStream in = new FileInputStream(file); @@ -117,25 +107,11 @@ final class TarArchiver extends Archiver { } } - tar.closeEntry(); + tar.closeArchiveEntry(); entriesWritten++; } public void close() throws IOException { tar.close(); } - - private static final Field LINKNAME_FIELD = getTarEntryLinkNameField(); - - private static Field getTarEntryLinkNameField() { - try { - Field f = TarEntry.class.getDeclaredField("linkName"); - f.setAccessible(true); - return f; - } catch (SecurityException e) { - throw new AssertionError(e); - } catch (NoSuchFieldException e) { - throw new AssertionError(e); - } - } } diff --git a/core/src/test/java/hudson/FilePathTest.java b/core/src/test/java/hudson/FilePathTest.java index 801e184906a8d7ee1582cd0551132c0f7fc363f5..daae3fcf6fc6b1253f28d475852684db2f183312 100644 --- a/core/src/test/java/hudson/FilePathTest.java +++ b/core/src/test/java/hudson/FilePathTest.java @@ -371,7 +371,7 @@ public class FilePathTest { // Decompress FilePath outDir = new FilePath(temp.newFolder(filePrefix + "_out")); final FilePath outFile = outDir.child(tempFile.getName()); - tmpDirPath.child( filePrefix + ".tar").untar(outDir, TarCompression.NONE); + tmpDirPath.child(tarFile.getName()).untar(outDir, TarCompression.NONE); assertEquals("Result file after the roundtrip differs from the initial file", new FilePath(tempFile).digest(), outFile.digest()); } @@ -659,4 +659,35 @@ public class FilePathTest { // test conflict subdir src.moveAllChildrenTo(dst); } + + @Issue("JENKINS-10629") + @Test + public void testEOFbrokenFlush() throws IOException, InterruptedException { + final File srcFolder = temp.newFolder("src"); + // simulate magic structure with magic sizes: + // |- dir/pom.xml (2049) + // |- pom.xml (2049) + // \- small.tar (1537) + final File smallTar = new File(srcFolder, "small.tar"); + givenSomeContentInFile(smallTar, 1537); + final File dir = new File(srcFolder, "dir"); + dir.mkdirs(); + final File pomFile = new File(dir, "pom.xml"); + givenSomeContentInFile(pomFile, 2049); + FileUtils.copyFileToDirectory(pomFile, srcFolder); + + final File archive = temp.newFile("archive.tar"); + + // Compress archive + final FilePath tmpDirPath = new FilePath(srcFolder); + int tarred = tmpDirPath.tar(new FileOutputStream(archive), "**"); + assertEquals("One file should have been compressed", 3, tarred); + + // Decompress + final File dstFolder = temp.newFolder("dst"); + dstFolder.mkdirs(); + FilePath outDir = new FilePath(dstFolder); + // and now fail when flush is bad! + tmpDirPath.child("../" + archive.getName()).untar(outDir, TarCompression.NONE); + } }