From c4de710a3b01fe6a21b981c9396ff8fd85344974 Mon Sep 17 00:00:00 2001
From: "Gary D. Gregory" This method is aware of the boundaries of the current entry in the archive and will deal with them as if they were this stream's start and EOF.
* This implementation may return 0 if the underlying {@link SeekableByteChannel} is non-blocking and currently hasn't got any bytes available. *
+ * + * @param b the buffer into which the data is read. + * @param off the start offset in array b at which the data is written. + * @param len the maximum number of bytes to read. + * @return the total number of bytes read into the buffer, or -1 if EOF is reached. + * @throws NullPointerException if b is null. + * @throws IndexOutOfBoundsException if {@code off} or {@code len} are negative, or if {@code off + len} is greater than {@code b.length}. + * @throws IOException if an I/O error occurs. */ @Override public int read(final byte[] b, final int off, final int len) throws IOException { + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java index 5a35824c151..5db75811c10 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java @@ -55,6 +55,7 @@ import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveException; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.file.attribute.FileTimes; import org.apache.commons.io.output.CountingOutputStream; @@ -87,6 +88,7 @@ public void write(final byte[] b) throws IOException { @Override public void write(final byte[] b, final int off, final int len) throws IOException { + IOUtils.checkFromIndexSize(b, off, len); if (len > BUF_SIZE) { channel.write(ByteBuffer.wrap(b, off, len)); } else { diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java index 80dd6859fb1..55d5705b0cb 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java @@ -634,10 +634,14 @@ public boolean markSupported() { * @param offset The offset at which to place bytes read. * @param numToRead The number of bytes to read. * @return The number of bytes read, or -1 at EOF. + * @throws NullPointerException if {@code buf} is null + * @throws IndexOutOfBoundsException if {@code offset} or {@code numToRead} are negative, + * or if {@code offset + numToRead} is greater than {@code buf.length}. * @throws IOException on error */ @Override public int read(final byte[] buf, final int offset, int numToRead) throws IOException { + org.apache.commons.io.IOUtils.checkFromIndexSize(buf, offset, numToRead); if (numToRead == 0) { return 0; } diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java index 4ab04832a72..7024810fce8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java @@ -39,6 +39,7 @@ import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; import org.apache.commons.compress.utils.FixedLengthBlockOutputStream; import org.apache.commons.io.Charsets; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.file.attribute.FileTimes; import org.apache.commons.io.output.CountingOutputStream; import org.apache.commons.lang3.ArrayFill; @@ -627,10 +628,14 @@ private void transferModTime(final TarArchiveEntry from, final TarArchiveEntry t * @param wBuf The buffer to write to the archive. * @param wOffset The offset in the buffer from which to get bytes. * @param numToWrite The number of bytes to write. + * @throws NullPointerException if {@code wBuf} is null + * @throws IndexOutOfBoundsException if {@code wOffset} or {@code numToWrite} are negative, + * or if {@code wOffset + numToWrite} is greater than {@code wBuf.length}. * @throws IOException on error */ @Override public void write(final byte[] wBuf, final int wOffset, final int numToWrite) throws IOException { + IOUtils.checkFromIndexSize(wBuf, wOffset, numToWrite); if (!haveUnclosedEntry) { throw new IllegalStateException("No current tar entry"); } diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveSparseZeroInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveSparseZeroInputStream.java index 513ed072222..263619a2a27 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveSparseZeroInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveSparseZeroInputStream.java @@ -18,7 +18,11 @@ */ package org.apache.commons.compress.archivers.tar; +import java.io.IOException; import java.io.InputStream; +import java.util.Arrays; + +import org.apache.commons.io.IOUtils; /** * This is an InputStream that always return 0, this is used when reading the "holes" of a sparse file @@ -35,6 +39,16 @@ public int read() { return 0; } + @Override + public int read(byte[] b, int off, int len) throws IOException { + IOUtils.checkFromIndexSize(b, off, len); + if (len == 0) { + return 0; + } + Arrays.fill(b, off, off + len, (byte) 0); + return len; + } + /** * Returns the input. * diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java index d02e625688a..1ea753abf1f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java @@ -1033,6 +1033,7 @@ private void pushback(final byte[] buf, final int offset, final int length) thro @Override public int read(final byte[] buffer, final int offset, final int length) throws IOException { + org.apache.commons.io.IOUtils.checkFromIndexSize(buffer, offset, length); if (length == 0) { return 0; } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 07d3492c5aa..4cbdf4f1e47 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -41,6 +41,7 @@ import org.apache.commons.compress.archivers.ArchiveException; import org.apache.commons.compress.archivers.ArchiveOutputStream; import org.apache.commons.io.Charsets; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.ArrayUtils; /** @@ -1568,10 +1569,14 @@ private int versionNeededToExtractMethod(final int zipMethod) { * @param b the byte array to write. * @param offset the start position to write from. * @param length the number of bytes to write. + * @throws NullPointerException if {@code b} is null + * @throws IndexOutOfBoundsException if {@code offset} or {@code length} are negative, + * or if {@code offset + length} is greater than {@code b.length}. * @throws IOException on error. */ @Override public void write(final byte[] b, final int offset, final int length) throws IOException { + IOUtils.checkFromIndexSize(b, offset, length); if (entry == null) { throw new IllegalStateException("No current entry"); } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStream.java index accc0b8a041..834859bc4ed 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStream.java @@ -33,6 +33,7 @@ import java.util.TreeMap; import org.apache.commons.compress.archivers.ArchiveException; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.file.PathUtils; /** @@ -252,10 +253,14 @@ public void write(final byte[] b) throws IOException { * @param b data to write * @param off offset of the start of data in param b * @param len the length of data to write + * @throws NullPointerException if {@code b} is null + * @throws IndexOutOfBoundsException if {@code off} or {@code len} are negative, + * or if {@code off + len} is greater than {@code b.length}. * @throws IOException if an I/O error occurs. */ @Override public void write(final byte[] b, final int off, final int len) throws IOException { + IOUtils.checkFromIndexSize(b, off, len); if (len <= 0) { return; } diff --git a/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java index 40110dbdbe0..2f3784bc539 100644 --- a/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java @@ -33,6 +33,7 @@ import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.BitInputStream; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.CloseShieldInputStream; /** @@ -712,28 +713,15 @@ public int read() throws IOException { throw new CompressorException("Stream closed"); } - /* - * (non-Javadoc) - * - * @see InputStream#read(byte[], int, int) - */ @Override public int read(final byte[] dest, final int offs, final int len) throws IOException { - if (offs < 0) { - throw new IndexOutOfBoundsException("offs(" + offs + ") < 0."); - } - if (len < 0) { - throw new IndexOutOfBoundsException("len(" + len + ") < 0."); - } - if (offs + len > dest.length) { - throw new IndexOutOfBoundsException("offs(" + offs + ") + len(" + len + ") > dest.length(" + dest.length + ")."); + IOUtils.checkFromIndexSize(dest, offs, len); + if (len == 0) { + return 0; } if (this.bin == null) { throw new CompressorException("Stream closed"); } - if (len == 0) { - return 0; - } final int hi = offs + len; int destOffs = offs; diff --git a/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.java b/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.java index d3a9fd9089e..fb48afbb59f 100644 --- a/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.java @@ -23,6 +23,7 @@ import java.util.Arrays; import org.apache.commons.compress.compressors.CompressorOutputStream; +import org.apache.commons.io.IOUtils; /** * An output stream that compresses into the BZip2 format into another stream. @@ -1149,15 +1150,7 @@ private void sendMTFValues7() throws IOException { @Override public void write(final byte[] buf, int offs, final int len) throws IOException { - if (offs < 0) { - throw new IndexOutOfBoundsException("offs(" + offs + ") < 0."); - } - if (len < 0) { - throw new IndexOutOfBoundsException("len(" + len + ") < 0."); - } - if (offs + len > buf.length) { - throw new IndexOutOfBoundsException("offs(" + offs + ") + len(" + len + ") > buf.length(" + buf.length + ")."); - } + IOUtils.checkFromIndexSize(buf, offs, len); checkOpen(); for (final int hi = offs + len; offs < hi;) { write0(buf[offs++]); diff --git a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java index 9a0974da97e..274c8ef4437 100644 --- a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java @@ -110,12 +110,8 @@ public int read() throws IOException { return ret; } - /** {@inheritDoc} */ @Override public int read(final byte[] buf, final int off, final int len) throws IOException { - if (len == 0) { - return 0; - } final int ret = in.read(buf, off, len); count(ret); return ret; diff --git a/src/main/java/org/apache/commons/compress/compressors/deflate64/Deflate64CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/deflate64/Deflate64CompressorInputStream.java index 0b5623b79d6..d0f35c51586 100644 --- a/src/main/java/org/apache/commons/compress/compressors/deflate64/Deflate64CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/deflate64/Deflate64CompressorInputStream.java @@ -102,11 +102,9 @@ public int read() throws IOException { } } - /** - * @throws java.io.EOFException if the underlying stream is exhausted before the end of deflated data was reached. - */ @Override public int read(final byte[] b, final int off, final int len) throws IOException { + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } diff --git a/src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java index 916f7672098..681d7961608 100644 --- a/src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java @@ -445,6 +445,7 @@ public int read() throws IOException { */ @Override public int read(final byte[] b, int off, int len) throws IOException { + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } diff --git a/src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.java b/src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.java index a62cee24426..e36b4cb7021 100644 --- a/src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.java @@ -29,6 +29,7 @@ import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorOutputStream; +import org.apache.commons.io.IOUtils; /** * Compressed output stream using the gzip format. This implementation improves over the standard {@link GZIPOutputStream} class by allowing the configuration @@ -127,6 +128,7 @@ public void write(final byte[] buffer) throws IOException { */ @Override public void write(final byte[] buffer, final int offset, final int length) throws IOException { + IOUtils.checkFromIndexSize(buffer, offset, length); checkOpen(); if (deflater.finished()) { throw new CompressorException("Cannot write more data, the end of the compressed data stream has been reached."); diff --git a/src/main/java/org/apache/commons/compress/compressors/lz4/BlockLZ4CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lz4/BlockLZ4CompressorInputStream.java index cb8d13364a0..5dffa27cc68 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lz4/BlockLZ4CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lz4/BlockLZ4CompressorInputStream.java @@ -24,6 +24,7 @@ import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.lz77support.AbstractLZ77CompressorInputStream; import org.apache.commons.compress.utils.ByteUtils; +import org.apache.commons.io.IOUtils; /** * CompressorInputStream for the LZ4 block format. @@ -89,11 +90,9 @@ private boolean initializeBackReference() throws IOException { return true; } - /** - * {@inheritDoc} - */ @Override public int read(final byte[] b, final int off, final int len) throws IOException { + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } diff --git a/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java index 9128e534e9d..243fc5d2e4c 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java @@ -235,9 +235,9 @@ public int read() throws IOException { return read(oneByte, 0, 1) == -1 ? -1 : oneByte[0] & 0xFF; } - /** {@inheritDoc} */ @Override public int read(final byte[] b, final int off, final int len) throws IOException { + org.apache.commons.io.IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } diff --git a/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorOutputStream.java b/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorOutputStream.java index d53088dde81..f9f2c5b7acc 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorOutputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorOutputStream.java @@ -24,6 +24,7 @@ import org.apache.commons.compress.compressors.CompressorOutputStream; import org.apache.commons.compress.utils.ByteUtils; +import org.apache.commons.io.IOUtils; /** * CompressorOutputStream for the LZ4 frame format. @@ -271,6 +272,7 @@ private void flushBlock() throws IOException { @Override public void write(final byte[] data, int off, int len) throws IOException { + IOUtils.checkFromIndexSize(data, off, len); if (params.withContentChecksum) { contentHash.update(data, off, len); } diff --git a/src/main/java/org/apache/commons/compress/compressors/lz77support/LZ77Compressor.java b/src/main/java/org/apache/commons/compress/compressors/lz77support/LZ77Compressor.java index 9105187ac94..07e19766416 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lz77support/LZ77Compressor.java +++ b/src/main/java/org/apache/commons/compress/compressors/lz77support/LZ77Compressor.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.Objects; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.ArrayFill; /** @@ -414,9 +415,12 @@ public void compress(final byte[] data) throws IOException { * @param data the data to compress - must not be null * @param off the start offset of the data * @param len the number of bytes to compress + * @throws NullPointerException if data is {@code null} + * @throws IndexOutOfBoundsException if {@code off} or {@code len} are negative, or if {@code off + len} is bigger than {@code data.length}. * @throws IOException if the callback throws an exception */ public void compress(final byte[] data, int off, int len) throws IOException { + IOUtils.checkFromIndexSize(data, off, len); final int wSize = params.getWindowSize(); while (len > wSize) { // chop into windowSize sized chunks doCompress(data, off, wSize); diff --git a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java index 2015b96de87..54bd8841130 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java @@ -27,6 +27,7 @@ import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.BitInputStream; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; /** *
@@ -276,6 +277,7 @@ public int read() throws IOException {
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
+ IOUtils.checkFromIndexSize(b, off, len);
if (len == 0) {
return 0;
}
diff --git a/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java
index f83f5251ea9..733be9582c2 100644
--- a/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java
+++ b/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java
@@ -198,9 +198,9 @@ public int read() throws IOException {
return read(oneByte, 0, 1) == -1 ? -1 : oneByte[0] & 0xFF;
}
- /** {@inheritDoc} */
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
+ org.apache.commons.io.IOUtils.checkFromIndexSize(b, off, len);
if (len == 0) {
return 0;
}
diff --git a/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorOutputStream.java b/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorOutputStream.java
index ce3be3c3c4b..b5e733b7e2f 100644
--- a/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorOutputStream.java
@@ -26,6 +26,7 @@
import org.apache.commons.compress.compressors.CompressorOutputStream;
import org.apache.commons.compress.compressors.lz77support.Parameters;
import org.apache.commons.compress.utils.ByteUtils;
+import org.apache.commons.io.IOUtils;
/**
* CompressorOutputStream for the framing Snappy format.
@@ -125,6 +126,7 @@ private void flushBuffer() throws IOException {
@Override
public void write(final byte[] data, int off, int len) throws IOException {
+ IOUtils.checkFromIndexSize(data, off, len);
int blockDataRemaining = buffer.length - currentIndex;
while (len > 0) {
final int copyLen = Math.min(len, blockDataRemaining);
diff --git a/src/main/java/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.java
index 03790331499..ec9b31fbcd7 100644
--- a/src/main/java/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.java
+++ b/src/main/java/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.java
@@ -24,6 +24,7 @@
import org.apache.commons.compress.compressors.CompressorException;
import org.apache.commons.compress.compressors.lz77support.AbstractLZ77CompressorInputStream;
import org.apache.commons.compress.utils.ByteUtils;
+import org.apache.commons.io.IOUtils;
/**
* CompressorInputStream for the raw Snappy format.
@@ -192,11 +193,9 @@ public long getUncompressedSize() {
return uncompressedSize;
}
- /**
- * {@inheritDoc}
- */
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
+ IOUtils.checkFromIndexSize(b, off, len);
if (len == 0) {
return 0;
}
diff --git a/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorInputStream.java
index 9f8e2356efc..bc97d0f2d7a 100644
--- a/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorInputStream.java
+++ b/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorInputStream.java
@@ -240,9 +240,6 @@ public int read() throws IOException {
@Override
public int read(final byte[] buf, final int off, final int len) throws IOException {
- if (len == 0) {
- return 0;
- }
try {
final int ret = in.read(buf, off, len);
count(ret);
diff --git a/src/main/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStream.java
index 8b218738997..32b934568f4 100644
--- a/src/main/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStream.java
+++ b/src/main/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStream.java
@@ -113,9 +113,6 @@ public int read(final byte[] b) throws IOException {
@Override
public int read(final byte[] buf, final int off, final int len) throws IOException {
- if (len == 0) {
- return 0;
- }
final int ret = decIS.read(buf, off, len);
count(ret);
return ret;
diff --git a/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java b/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java
index 2808964d938..c66b24a1cdf 100644
--- a/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java
@@ -22,6 +22,8 @@
import java.io.InputStream;
import java.nio.ByteBuffer;
+import org.apache.commons.io.IOUtils;
+
/**
* NIO backed bounded input stream for reading a predefined amount of data.
*
@@ -69,6 +71,10 @@ public synchronized int read() throws IOException {
@Override
public synchronized int read(final byte[] b, final int off, final int len) throws IOException {
+ IOUtils.checkFromIndexSize(b, off, len);
+ if (len == 0) {
+ return 0;
+ }
if (loc >= end) {
return -1;
}
diff --git a/src/main/java/org/apache/commons/compress/utils/FixedLengthBlockOutputStream.java b/src/main/java/org/apache/commons/compress/utils/FixedLengthBlockOutputStream.java
index 805b61413c9..ce7d341d1bd 100644
--- a/src/main/java/org/apache/commons/compress/utils/FixedLengthBlockOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/utils/FixedLengthBlockOutputStream.java
@@ -201,6 +201,7 @@ private void padBlock() {
@Override
public void write(final byte[] b, final int offset, final int length) throws IOException {
+ IOUtils.checkFromIndexSize(b, offset, length);
if (!isOpen()) {
throw new ClosedChannelException();
}
From 54e7aa46269463a694e9b06d44e2f45b863ca43c Mon Sep 17 00:00:00 2001
From: "Gary D. Gregory"
- * GNU.sparse.map: Map of non-null data chunks. It is a string consisting of comma-separated values "offset,size[,offset-1,size-1...]"
- *
- * Will internally invoke {@link #parseFromPAX01SparseHeaders} and map IOExceptions to a RzuntimeException, You should use
- * {@link #parseFromPAX01SparseHeaders} directly instead.
- *
- * For PAX Format 0.1, the sparse headers are stored in a single variable: GNU.sparse.map
- *
- * GNU.sparse.map: Map of non-null data chunks. It is a string consisting of comma-separated values "offset,size[,offset-1,size-1...]"
- * Ensures that all {@code ArchiveInputStream} implementations and other
+ * archive handlers expose a consistent set of configuration options. Note: This overload exists to support legacy constructors that did not declare
+ * {@link IOException}. For new constructors, prefer
+ * {@link #ArchiveInputStream(AbstractArchiveBuilder)} and propagate I/O errors to callers.
@@ -263,6 +251,14 @@ public Builder setMaxMemoryLimitKiB(final int maxMemoryLimitKiB) {
return this;
}
+ Builder setOptions(final SevenZFileOptions options) {
+ Objects.requireNonNull(options, "options");
+ this.maxMemoryLimitKiB = options.getMaxMemoryLimitInKb();
+ this.useDefaultNameForUnnamedEntries = options.getUseDefaultNameForUnnamedEntries();
+ this.tryToRecoverBrokenArchives = options.getTryToRecoverBrokenArchives();
+ return this;
+ }
+
/**
* Sets the password.
*
@@ -301,7 +297,9 @@ public Builder setPassword(final String password) {
*
* @param seekableByteChannel the input channel.
* @return {@code this} instance.
+ * @deprecated Since 1.29.0, use {@link #setChannel} instead.
*/
+ @Deprecated
public Builder setSeekableByteChannel(final SeekableByteChannel seekableByteChannel) {
return setChannel(seekableByteChannel);
}
@@ -421,10 +419,6 @@ public static boolean matches(final byte[] buffer, final int ignored) {
return ArrayUtils.startsWith(buffer, SIGNATURE);
}
- private static SeekableByteChannel newByteChannel(final File file) throws IOException {
- return Files.newByteChannel(file.toPath(), EnumSet.of(StandardOpenOption.READ));
- }
-
private static long readUint64(final ByteBuffer in) throws IOException {
// long rather than int as it might get shifted beyond the range of an int
final long firstByte = getUnsignedByte(in);
@@ -496,7 +490,7 @@ public static int toNonNegativeInt(final String description, final long value) t
*/
@Deprecated
public SevenZFile(final File fileName) throws IOException {
- this(fileName, SevenZFileOptions.DEFAULT);
+ this(builder().setFile(fileName));
}
/**
@@ -510,7 +504,7 @@ public SevenZFile(final File fileName) throws IOException {
@SuppressWarnings("resource") // caller closes
@Deprecated
public SevenZFile(final File file, final byte[] password) throws IOException {
- this(newByteChannel(file), file.getAbsolutePath(), password, true, SevenZFileOptions.DEFAULT);
+ this(builder().setFile(file).setPassword(password));
}
/**
@@ -524,7 +518,7 @@ public SevenZFile(final File file, final byte[] password) throws IOException {
*/
@Deprecated
public SevenZFile(final File file, final char[] password) throws IOException {
- this(file, password, SevenZFileOptions.DEFAULT);
+ this(builder().setFile(file).setPassword(password));
}
/**
@@ -540,8 +534,7 @@ public SevenZFile(final File file, final char[] password) throws IOException {
@SuppressWarnings("resource") // caller closes
@Deprecated
public SevenZFile(final File file, final char[] password, final SevenZFileOptions options) throws IOException {
- this(newByteChannel(file), // NOSONAR
- file.getAbsolutePath(), AES256SHA256Decoder.utf16Decode(password), true, options);
+ this(builder().setFile(file).setPassword(password).setOptions(options));
}
/**
@@ -555,7 +548,7 @@ public SevenZFile(final File file, final char[] password, final SevenZFileOption
*/
@Deprecated
public SevenZFile(final File file, final SevenZFileOptions options) throws IOException {
- this(file, null, options);
+ this(builder().setFile(file).setOptions(options));
}
/**
@@ -571,7 +564,7 @@ public SevenZFile(final File file, final SevenZFileOptions options) throws IOExc
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel) throws IOException {
- this(channel, SevenZFileOptions.DEFAULT);
+ this(builder().setChannel(channel));
}
/**
@@ -588,7 +581,7 @@ public SevenZFile(final SeekableByteChannel channel) throws IOException {
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel, final byte[] password) throws IOException {
- this(channel, DEFAULT_FILE_NAME, password);
+ this(builder().setChannel(channel).setPassword(password));
}
/**
@@ -605,7 +598,7 @@ public SevenZFile(final SeekableByteChannel channel, final byte[] password) thro
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel, final char[] password) throws IOException {
- this(channel, password, SevenZFileOptions.DEFAULT);
+ this(builder().setChannel(channel).setPassword(password));
}
/**
@@ -623,7 +616,7 @@ public SevenZFile(final SeekableByteChannel channel, final char[] password) thro
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel, final char[] password, final SevenZFileOptions options) throws IOException {
- this(channel, DEFAULT_FILE_NAME, password, options);
+ this(builder().setChannel(channel).setPassword(password).setOptions(options));
}
/**
@@ -640,7 +633,7 @@ public SevenZFile(final SeekableByteChannel channel, final char[] password, fina
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel, final SevenZFileOptions options) throws IOException {
- this(channel, DEFAULT_FILE_NAME, null, options);
+ this(builder().setChannel(channel).setOptions(options));
}
/**
@@ -657,7 +650,7 @@ public SevenZFile(final SeekableByteChannel channel, final SevenZFileOptions opt
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel, final String fileName) throws IOException {
- this(channel, fileName, SevenZFileOptions.DEFAULT);
+ this(builder().setChannel(channel).setName(fileName));
}
/**
@@ -675,52 +668,30 @@ public SevenZFile(final SeekableByteChannel channel, final String fileName) thro
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel, final String fileName, final byte[] password) throws IOException {
- this(channel, fileName, password, false, SevenZFileOptions.DEFAULT);
+ this(builder().setChannel(channel).setName(fileName).setPassword(password));
}
- private SevenZFile(final SeekableByteChannel channel, final String fileName, final byte[] password, final boolean closeOnError, final int maxMemoryLimitKiB,
- final boolean useDefaultNameForUnnamedEntries, final boolean tryToRecoverBrokenArchives) throws IOException {
- boolean succeeded = false;
- this.channel = channel;
- this.fileName = fileName;
- this.maxMemoryLimitKiB = maxMemoryLimitKiB;
- this.useDefaultNameForUnnamedEntries = useDefaultNameForUnnamedEntries;
- this.tryToRecoverBrokenArchives = tryToRecoverBrokenArchives;
+ private SevenZFile(Builder builder) throws IOException {
+ this.channel = builder.getChannel(SeekableByteChannel.class);
try {
+ this.fileName = builder.getName();
+ this.maxMemoryLimitKiB = builder.maxMemoryLimitKiB;
+ this.useDefaultNameForUnnamedEntries = builder.useDefaultNameForUnnamedEntries;
+ this.tryToRecoverBrokenArchives = builder.tryToRecoverBrokenArchives;
+ final byte[] password = builder.password;
archive = readHeaders(password);
- if (password != null) {
- this.password = Arrays.copyOf(password, password.length);
- } else {
- this.password = null;
- }
- succeeded = true;
+ this.password = password != null ? Arrays.copyOf(password, password.length) : null;
} catch (final ArithmeticException | IllegalArgumentException e) {
- throw new ArchiveException(e);
- } finally {
- if (!succeeded && closeOnError) {
- this.channel.close();
+ final ArchiveException archiveException = new ArchiveException(e);
+ try {
+ channel.close();
+ } catch (final IOException suppressed) {
+ archiveException.addSuppressed(suppressed);
}
+ throw archiveException;
}
}
- /**
- * Constructs a new instance.
- *
- * @param channel the channel to read.
- * @param fileName name of the archive - only used for error reporting.
- * @param password optional password if the archive is encrypted.
- * @param closeOnError closes the channel on error.
- * @param options options.
- * @throws IOException if reading the archive fails
- * @deprecated Use {@link Builder#get()}.
- */
- @Deprecated
- private SevenZFile(final SeekableByteChannel channel, final String fileName, final byte[] password, final boolean closeOnError,
- final SevenZFileOptions options) throws IOException {
- this(channel, fileName, password, closeOnError, options.getMaxMemoryLimitInKb(), options.getUseDefaultNameForUnnamedEntries(),
- options.getTryToRecoverBrokenArchives());
- }
-
/**
* Reads a SeekableByteChannel as 7z archive
*
@@ -736,7 +707,7 @@ private SevenZFile(final SeekableByteChannel channel, final String fileName, fin
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel, final String fileName, final char[] password) throws IOException {
- this(channel, fileName, password, SevenZFileOptions.DEFAULT);
+ this(builder().setChannel(channel).setName(fileName).setPassword(password));
}
/**
@@ -755,7 +726,7 @@ public SevenZFile(final SeekableByteChannel channel, final String fileName, fina
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel, final String fileName, final char[] password, final SevenZFileOptions options) throws IOException {
- this(channel, fileName, AES256SHA256Decoder.utf16Decode(password), false, options);
+ this(builder().setChannel(channel).setName(fileName).setPassword(password).setOptions(options));
}
/**
@@ -773,7 +744,7 @@ public SevenZFile(final SeekableByteChannel channel, final String fileName, fina
*/
@Deprecated
public SevenZFile(final SeekableByteChannel channel, final String fileName, final SevenZFileOptions options) throws IOException {
- this(channel, fileName, null, false, options);
+ this(builder().setChannel(channel).setName(fileName).setOptions(options));
}
private InputStream buildDecoderStack(final Folder folder, final long folderOffset, final int firstPackStreamIndex, final SevenZArchiveEntry entry)
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/AbstractTarBuilder.java b/src/main/java/org/apache/commons/compress/archivers/tar/AbstractTarBuilder.java
index 5555f5ccc2b..0f471a1ca1f 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/AbstractTarBuilder.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/AbstractTarBuilder.java
@@ -19,7 +19,7 @@
package org.apache.commons.compress.archivers.tar;
-import org.apache.commons.io.build.AbstractStreamBuilder;
+import org.apache.commons.compress.archivers.AbstractArchiveBuilder;
/**
* Abstracts TAR builder operations.
@@ -28,7 +28,7 @@
* @param the type of builder subclass.
* @since 1.29.0
*/
-public abstract class AbstractTarBuilder This interface provides a higher-level abstraction over archive files, similar to
+ * {@link ZipFile}, but generalized for a variety of archive formats. Implementations are {@link Closeable} and should be closed once they are no longer
+ * needed in order to release any underlying system resources. The order of entries is format-dependent but guaranteed to be consistent
+ * across multiple invocations on the same archive. The order of entries is format-dependent but stable for a given archive. The returned stream must be closed after use to free
+ * associated resources. The caller is responsible for closing the returned stream after use.
@@ -1045,6 +1057,7 @@ public Iterable The order of entries is format-dependent but stable for a given archive. The returned stream must be closed after use to free
- * associated resources. The order of entries is format-dependent but stable for a given archive. The returned stream must be closed after use to free
+ * associated resources.
@@ -252,6 +247,11 @@ public Builder setMaxMemoryLimitKiB(final int maxMemoryLimitKiB) {
return this;
}
+ Builder setName(final String name) {
+ this.name = name;
+ return this;
+ }
+
Builder setOptions(final SevenZFileOptions options) {
Objects.requireNonNull(options, "options");
this.maxMemoryLimitKiB = options.getMaxMemoryLimitInKb();
@@ -482,6 +482,27 @@ public static int toNonNegativeInt(final String description, final long value) t
private final boolean tryToRecoverBrokenArchives;
+ private SevenZFile(Builder builder) throws IOException {
+ this.channel = builder.getChannel(SeekableByteChannel.class);
+ try {
+ this.fileName = builder.getName();
+ this.maxMemoryLimitKiB = builder.maxMemoryLimitKiB;
+ this.useDefaultNameForUnnamedEntries = builder.useDefaultNameForUnnamedEntries;
+ this.tryToRecoverBrokenArchives = builder.tryToRecoverBrokenArchives;
+ final byte[] password = builder.password;
+ archive = readHeaders(password);
+ this.password = password != null ? Arrays.copyOf(password, password.length) : null;
+ } catch (final ArithmeticException | IllegalArgumentException e) {
+ final ArchiveException archiveException = new ArchiveException(e);
+ try {
+ channel.close();
+ } catch (final IOException suppressed) {
+ archiveException.addSuppressed(suppressed);
+ }
+ throw archiveException;
+ }
+ }
+
/**
* Reads a file as unencrypted 7z archive.
*
@@ -672,27 +693,6 @@ public SevenZFile(final SeekableByteChannel channel, final String fileName, fina
this(builder().setChannel(channel).setName(fileName).setPassword(password));
}
- private SevenZFile(Builder builder) throws IOException {
- this.channel = builder.getChannel(SeekableByteChannel.class);
- try {
- this.fileName = builder.getName();
- this.maxMemoryLimitKiB = builder.maxMemoryLimitKiB;
- this.useDefaultNameForUnnamedEntries = builder.useDefaultNameForUnnamedEntries;
- this.tryToRecoverBrokenArchives = builder.tryToRecoverBrokenArchives;
- final byte[] password = builder.password;
- archive = readHeaders(password);
- this.password = password != null ? Arrays.copyOf(password, password.length) : null;
- } catch (final ArithmeticException | IllegalArgumentException e) {
- final ArchiveException archiveException = new ArchiveException(e);
- try {
- channel.close();
- } catch (final IOException suppressed) {
- archiveException.addSuppressed(suppressed);
- }
- throw archiveException;
- }
- }
-
/**
* Reads a SeekableByteChannel as 7z archive
*
@@ -1037,15 +1037,6 @@ public Iterable
@@ -2216,6 +2207,15 @@ private boolean skipEntriesWhenNeeded(final int entryIndex, final boolean isInSa
return true;
}
+ /**
+ * {@inheritDoc}
+ * @since 1.29.0
+ */
+ @Override
+ public IOStream extends SevenZArchiveEntry> stream() {
+ return IOStream.of(archive.files);
+ }
+
@Override
public String toString() {
return archive.toString();
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java
index ddd76fc1b08..2a0734727e8 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java
@@ -453,15 +453,6 @@ public List
diff --git a/src/test/java/org/apache/commons/compress/archivers/AbstractArchiveFileTest.java b/src/test/java/org/apache/commons/compress/archivers/AbstractArchiveFileTest.java
index 36cc19f95e5..4443c4696c3 100644
--- a/src/test/java/org/apache/commons/compress/archivers/AbstractArchiveFileTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/AbstractArchiveFileTest.java
@@ -42,13 +42,14 @@
*/
public abstract class AbstractArchiveFileTest This interface provides a higher-level abstraction over archive files, similar to
- * {@link ZipFile}, but generalized for a variety of archive formats.
+ * This interface provides an abstraction over archive files, similar to {@link ZipFile}, but generalized for a variety of archive formats.
+ * Implementations are {@link Closeable} and should be closed once they are no longer
- * needed in order to release any underlying system resources.
+ * Implementations are {@link Closeable} and should be closed once they are no longer needed in order to release any underlying system resources.
+ * The order of entries is format-dependent but guaranteed to be consistent
- * across multiple invocations on the same archive.
+ * The order of entries is format-dependent but guaranteed to be consistent across multiple invocations on the same archive.
+ * The caller is responsible for closing the returned stream after use.
+ * The caller is responsible for closing the returned stream after use.
+ * The order of entries is format-dependent but stable for a given archive. The returned stream must be closed after use to free
- * associated resources.
+ * The order of entries is format-dependent but stable for a given archive.
+ *
+ * The returned stream must be closed after use to free associated resources.
+ *
- * GNU.sparse.size=size
- * GNU.sparse.numblocks=numblocks
- * repeat numblocks times
- * GNU.sparse.offset=offset
- * GNU.sparse.numbytes=numbytes
- * end repeat
- *
- *
Most operating systems and file systems impose relatively small limits on + * file name or path length, which are sufficient for everyday use. By contrast, + * many archive formats permit much longer names: for example, TAR can encode + * names of several gigabytes, while ZIP allows up to 64 KiB.
+ * + *This setting applies an upper bound on entry name length after encoding + * with the {@link #setCharset configured charset}. If an entry name exceeds this + * limit, an {@link ArchiveException} will be thrown during reading.
+ * + *The default is {@link Short#MAX_VALUE}, which already exceeds the limits + * of most operating systems.
+ * + * @param maxEntryNameLength The maximum entry name length in bytes; must be positive + * @return {@code this} instance. + * @throws IllegalArgumentException If {@code maxEntryNameLength} is not positive. + */ + public B setMaxEntryNameLength(final int maxEntryNameLength) { + if (maxEntryNameLength <= 0) { + throw new IllegalArgumentException("maxEntryNameLength must be positive"); + } + this.maxEntryNameLength = maxEntryNameLength; + return asThis(); + } + + /** + * Gets the maximum length of an archive entry name. + * + * @return The maximum length of an archive entry name. + */ + public int getMaxEntryNameLength() { + return maxEntryNameLength; + } } diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java index 15d907aa105..da5475f7acd 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java @@ -95,7 +95,9 @@ public Iterator- * GNU.sparse.size=size - * GNU.sparse.numblocks=numblocks - * repeat numblocks times - * GNU.sparse.offset=offset - * GNU.sparse.numbytes=numbytes - * end repeat - *- *
- * For PAX Format 0.1, the sparse headers are stored in a single variable : GNU.sparse.map - *
- *- * GNU.sparse.map: Map of non-null data chunks. It is a string consisting of comma-separated values "offset,size[,offset-1,size-1...]" - *
- * - * @param inputStream input stream to read keys and values - * @param sparseHeaders used in PAX Format 0.0 & 0.1, as it may appear multiple times, the sparse headers need to be stored in an array, not a map - * @param globalPaxHeaders global PAX headers of the tar archive - * @param headerSize total size of the PAX header - * @return map of PAX headers values found inside the current (local or global) PAX headers tar entry. - * @throws IOException if an I/O error occurs. - * @since 1.21 + * Parses and processes the contents of a PAX header block. + * + *This method reads key–value pairs from the given input stream, applies + * them to the provided global PAX headers, and performs additional handling:
+ * + *n^~5{P&s$zqFJ8+5__c z_xSJkAV_T0K$)NZzcrG#|GEqCAOEd}!7%LPzxIIq{{#LTz6c^aHBjcK|NBMo_Fs1a z{^P&XEI4MJ{MR0k|9`}P$0tE%qXx?S^#3i9yZzT)fdBY!GzykcC;znv Xg3-BNReI~&(>Eyrmfc$?6|2-cCscjl4^V9z~N9y)pcLDz6zs(?+ z2A%xZ9+3Zk#(&dSL2Q=>%KY^IO%b~N*Ij`B`0p|Yt~n?FwFl(?U+~}cS&-YLfige+ z-zPG+|GEqCAOB6pz&7UOzxIIq|1196z6*kTG*ITJ|JnuZzwQG3$A6D0@J%`Sub-3u zf5U&@he2|S2Fm>Z@n3fV{^P&J5EzHl|IgQ78~U60tNj1R%dhNyY1jInUH1$4mIi3x I4d1|j00iJfd;kCd literal 0 HcmV?d00001 diff --git a/src/test/resources/synthetic/long-name/long-name.7z b/src/test/resources/synthetic/long-name/long-name.7z new file mode 100644 index 0000000000000000000000000000000000000000..8a88f1f4d91e5defeb772deb79dbb4dc8a69fff4 GIT binary patch literal 65587 zcmeIuu?d7w6a>KkKvNGf6$~xm>ah(2`DIvzHehB4hE`z%!l7Ilvzsg%&Uf6_zD>ok zoKyOzN%5|E@9rq0iu{ROU-8_75+Fc;009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7csf%N7Ft_X}@ literal 0 HcmV?d00001 diff --git a/src/test/resources/synthetic/long-name/long-name.arj b/src/test/resources/synthetic/long-name/long-name.arj new file mode 100644 index 0000000000000000000000000000000000000000..27d3cec2c89d44f2a4f2efcdef218db751d9eda4 GIT binary patch literal 2669 zcmYdzrOP14$jHFJ#KeFd *giqrs^dYWic>>_~qPUU`TkS!3EaF0Mv>VB#wg7 l5Eu=C(GVC7fzc2c4S~@R7!83D83GIpY#KSTpcW$o0{}RO*+2jQ literal 0 HcmV?d00001 diff --git a/src/test/resources/synthetic/long-name/long-name.dump b/src/test/resources/synthetic/long-name/long-name.dump new file mode 100644 index 0000000000000000000000000000000000000000..561a47fae50484a03c25ef28bbd083f468502a43 GIT binary patch literal 267264 zcmeI&ce9vf@y78J6vf^ <_xocIOClA8K3K$e}0D*&z;?8ulcMqzm1Ll&z^s6Y#jB-wtx5kDODygZvuOE z-Mo9x3!b~}&ON*LKDPG-)&Kq xTzZvML{|9^k(`GW`cZ?10om-&$iFoFO33A|`i2b+_w{Qr~uZ<-E2GJ*9?;IK_C zRsK~|{{MF5!Gj0RJyhLZ|KX;8f5wjsJv*Z{-e3ZYPe56J<;KRg9r1t1&xh@Zf6!fU z#I}utyS)D_{(blhFoCU{K<|{|m1t&u{SrI{%e@`Tr;U-{R@>>zTl^6HwO6|8WbX|3^d}|M7p>?d0{Xo Mti2qwXettg_Sabr)dig(Yf%N}nQOAG$UvxQne9I@$`LFED|3BdWmd~FDn81<~ zP}a-;aSNpXFONF@ 8mw{NIKD%WWnv&zL~xzp^j?e~bS! zJ^^nqfyE}Ete5}e7D)eJ6?OjiKmISam^?gX0-gWLzWo0U{!jS^Ji-K)nt-xi{*PNA z{eN}T@gM(}+DqP@Gl9;3WncdP8vo~f1YThR3r#>-FaO6akp90W>iCcU3#}#3PMSdH zzp^j?e}(^(z5>rMfn_G3te5}e7D)eJ8+H80|7Et4S7%M2^IzGQ|G&ilS)YM-n7|?v zP}a-;aSNpXuZueV Hiy|j{o?-z)JGu#0hl%EBo^QXZSzy zOYjsEm_Gq!z5E}yK>Gj2sN+BW&)-O1oH>Ece`R0({}lgcehS`V0@Ej;te5}e7D)f! z6m|T^|LF_KgHtEa`LFED|DWLh)NjFKOknl|l=bp|+yd$Uo1>2Z_&<9ed2j9nI{%e@ z`Tt}5pZhU*jR{PifU;ixk6R%9e@oQyAO9z>BhO8qKt{^S4LZREAt6X^U`_T~Q%@qhN`;5{ZVbppzI`9E%f^#5&9$AA2vx{N$F zeFB~T%D(*n0sc?_9z4hdW==p^FaO6akp90t>iCcUGk1}<=1-vWU)h)c-^c&?AA}c~ zz{CkC>*fEr1=9a_L>>R}f8r|g)B+Re{8#qn|M&2JfiJ?7Okmywl=bp|+yd$Ukx|Eg z{GYdpytKpwI{%e@`Tt$~U*ePSCKH%80cE}XAGbjI|IVo6KmJc!L>^jX0-gWLzWo0V z{x9-Pc$5junt-xi{*PNA{eM@~@gM(Z?IG_hGl9;3Wncb(8~>O2D7?x9CQU$DFaO6a zkp90r>iCcUlh%-D7Meikzp^j?zlHw`eHET%0&^yyte5}e7D)fMM;-t1f6f;2%2E^P z{NEmb8TtQB{9o#`@GcXWGJ);!O8Gx-f%N}9QOAG$pR$BJve*PV|CN3D{|)?K?7Q$V z6PPgpWxf30e(2})|Gn`u{^NhF0e@CJfzE$rU;cj`|5yCwX^jbNY$)sH|F{Lx|Mx{5 z|M8!Uta}2T|H{7n{~G?U`{UD{{REWt@_*a{>Hqtqj{o>?ACQ#~pz~kZm;YbI|CN7z z+H;?PvR?j=TOj>ED(d);|L*x%d;gvP%D(*n3jVMC^V6UC1eEpif7}A;{|BOu|M+hn zkJY!|`LFED|1abJ>c2k?dQU)EFaO6akp6!#>iCcU-sxC>_nrUBzWo0Z{;z)l=+Jrs z%6j=fZh`dwLs7?n{I?E=LGzve%D()67XJ-4fEJx6psbhw;}%H&KOA-Z$A9N+IP~86 zuk6eJ{rK;&0`zD+0cE}XAGbjIe{|IGAODS`VbOZ$zp^j?_u;?A4$!3U1eEpif7}A; z|1nX=fBg4NhDYa}|H{7ne+K_OmVhp8C!nmC|Kk=&|BsD2{^P%GFiaZn{8#qn|I_$y zvIVs1Iss+9{2#YK`u~xr<3Ikp=E9}#&VOZJ{y&BPE^9!arV~)s%l~l;r2jjjj{o>? z8Vj4YJO7n^`Tr#T+w1|2dQL!DFaO6akp6!(>iCcUo~iKZy7OPzm;X=Tzt1AjspSNe z_40q*0_p#8QOAG$w+w|*)1CjyzWje2|BW_*Rvjmxte5}e7D)e(k2?P2zhfqxdhYyJ z_T~R$`0um|^lCT(Wxf0#w?O)TLe%je{|zHy)pF;*vM>K1#eb_^pjp2ODC_0_xCPSx zk3}8-@!u~IULAM-EBo^Q5&ZXB2D-JIfU;ixk6R%9KQZd~kN Hn!w$AA2{3WH&@o&U <^TQo@3<25Y%~F7z5E}yK>B}r)bStxjiO-LYUjVQFaPhuf6JYqX`cxw>*fEr1=9aB zqK^Og?~?@2PCNgVeffVc{(CM3UE54RSug*`Es*}78Fl=}f14nfHrn~G?92aq@ZWSR zXxn81%6j=fZh`dwtf=EZ{=4MBwa?CfWncc^jsLD|LEk15P}a-;aSNpXXGb0X@!uo{ zwrzI)EBo?)5B}Tk1&w=5Kv^&U$1RZlpA&Wb$A6C$_;%U(uk6eJyYSz4G3eZ40?K;% zKW>5ae`nP3AO9^vVBBQqzp^j?zli_Fn?dUi6WAF|mjB}xNdM1`I{xFoLk66C?EF{u z<^P@d@4OoHZZH95z5L&K=;!qRy!aXa@juppKP#R<=fAQq|L?&66@PhJV*(o+%6j=f zZh`dw<59 K{$NzPIe7durfU;ixk6R%9|3uXBAOGzGvho3R{ww?P z|2F(z`PZjC_X#NL<^Q+^(*N_Lj{o@Yo{zQn-}$fX%m3Z@zxK~hf94ZV*318K3#9)S zL>>R}-#i|xZ@=?j*_Z#f;{WQuKMi_MKv^&U$1RZlUl?`#$A9m1tiSute`R0(--7?^ zUjRC^o`AAm{*PNA{l6&c_>ceA;V@{v^IzGQ|2N~m!3NNx^8}Rj@_*a{>Ho!1$AA2H z&W1zpo&U <^N6iZ?OY3={o^s zz5E}yK>GiwsN+BW`zFJq^Ui-|U;f{S{~k*~m$nm7*318K3#9*-L>>R}-!>Q~jd%Vl z`||$__;0cWwCOqlWxf0#w?O)TY1Hu_|6Oz8(s$>-vM>KXkN+-fK%b@)P}a-;aSNpX zmqi`_@!vESHf?wQEBo^QbNFww2Q=zA0cE}XAGbjIe|gmLAOAg5;nQ{Jzp^j?Z@_<_ zMW9p52`KC3|F{Lx|0|-7|M+hi3Ztew|CN3De?9&iZ33-2PC!{N|Hmzm{$Ck&{KtRC zOgQ!2`LFED|LgGIX%* <3IizM#8G)&VOZJ{$Gp#R=Ys6eiKmE z%l~l;r2n6eI{xFoUn0CZ?)+Ew<^N~#-)kA@)@}mIdig(Yf%O0CsN+BW+Xcd`;m&_$ zU;cjv|IN06cHJhRte5}e7D)fEi8}t{zgr&M`tAHz_T~RI`0ut3^lLT&Wxf0#w?O*; znW*DG{+q?YuHDXmWncbZjsJH0K*L@WP}a-;aSNpXpN%^H 5a|JtbIKmJ>V!LZrRe`R0(UxojM8$ruX6HwO6|8WbX|JOww|MA}` z3y!^Z{ww?P|4RIKTnTzMnt-xi{*PNA{l7ly_>ccaQLt>a^IzGQ|5xC@ HiH;$AA3yNrGpmo&U <^Q+^(tpW^|M>5a z0p}h&|F!k<|6=@iUJZITn1Hfg{@--y=k)*P_! y|GGau-Pun-Sug*`Es*}-8g=}~fBS%} zd;p#Q%D()+0RLD1^=Z$20?K;%KW>5ae|OaJAOGF+vG)Eu|CN3De?I=N{qxhG`2>{p z@_*a{>Hlp}$AA1ckH_lU@BCNx<^Lz}fA!y=2E8Yste5}e7D)eZk2?P2zjr#;-+kx5 zvM>KXj{oal06MgufU;ixk6R%9za#4SkN?);FlfH>U)h)c=i$G>2GFAO1eEpif7}A; z|D93CfBbjOhC}b2|H{7nKNtTUR)8LjC!nmC|Kk=&|GyY@{KtReXjrt~`LFED|DE`6 zu>&;eI{{_A{2#YK`hQo{@gM(vli|^M=fAQq|IfjHk0qc>+X*P^<^Q+^(*HeC$AA2{ z4TeeMo&U Qdm;d7yNdNDSI{xFoYc5>+?)+Ew<^Ngu@3IE;X*vOA zz5E}yK>B}A)bStxO=DrxcIUscFaOWPf15p^QO^k|>*fEr1=9a}qmKXh@0kjpt~>vg zeffU|{`)Kfomx&nSug*`Es*}-7j^u{f6Gu9HQo8I?92bt@!x0@Xw`87%6j=fZh`dw z{;1 cy-+Quk6eJQ}Eww8R*t-0?K;%KW>5a|G}u^KmOYV z!mQ!We`R0(pN#)z+d#W+6HwO6|8WbX{|`kS|MA}~4{rT-{ww?P|0Mi(TL=0zn}D)j z{*PNA{eL*>_>cc)aj Hi~9$AA3yN`qgwo&U ^BI@{$|2|3Z?6mV=*_Zzx#edJGplh26DC_0_xCPSxC!>!4_-_*g z(?&c0m3{fY1OH98g0@{Ipsbhw;}%H&pNcyE I?&SN7%qNATZuE$G{10?K;% zKW>5a|LLgXKmMD |D9KZ-VG+8te5}$4*i_|KN~;eKmNxW@Mpyn==@jq<^RRc mJ%8}P{%tGnp4OOv_XG~#)c)qAEC2r>|C^@6k4#{F6Zj8DJVbl| literal 0 HcmV?d00001 diff --git a/src/test/resources/synthetic/long-name/long-name.zip b/src/test/resources/synthetic/long-name/long-name.zip new file mode 100644 index 0000000000000000000000000000000000000000..64ddc64c59e67afb083b3fe0a23ab896cd190903 GIT binary patch literal 65632 zcmeIuI}v~|5Jl0)S3z1Npd?K!pah(U)C>~N-Pz`qr`2UG`K#Z_B?1Hp5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV8qiX)a62U;X~Bya*5=K!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U nAV7cs0RjXF5FkK+009C72oNAZfB=E3)ossc8h1X)F+O<#;@t~d literal 0 HcmV?d00001 diff --git a/src/test/resources/synthetic/long-name/newc-fail.cpio b/src/test/resources/synthetic/long-name/newc-fail.cpio new file mode 100644 index 0000000000000000000000000000000000000000..feedcbd78686fee9a4469e87b661c6e0c692520d GIT binary patch literal 113 ncmXpoH!wFaG(Z9thKVLfTx2GSI%El4Z1XfQuz<-kFfafB=@k}2 literal 0 HcmV?d00001 diff --git a/src/test/resources/synthetic/long-name/newc.cpio b/src/test/resources/synthetic/long-name/newc.cpio new file mode 100644 index 00000000000..e7b0f8cb47f --- /dev/null +++ b/src/test/resources/synthetic/long-name/newc.cpio @@ -0,0 +1 @@ +07070100000000000081a40000000000000000000000010000000000000000000000000000000000000000000000000000800000000000aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa \ No newline at end of file diff --git a/src/test/resources/synthetic/long-name/odc-fail.cpio b/src/test/resources/synthetic/long-name/odc-fail.cpio new file mode 100644 index 0000000000000000000000000000000000000000..4038c1e9045c316ae2d49c11ac85e4cb63fb3d38 GIT binary patch literal 78 hcmXpoHvmB-U}#`qW@3UQ0AT`!(SSJ^APF)s002$f4v+u< literal 0 HcmV?d00001 diff --git a/src/test/resources/synthetic/long-name/odc.cpio b/src/test/resources/synthetic/long-name/odc.cpio new file mode 100644 index 00000000000..d2a72984f58 --- /dev/null +++ b/src/test/resources/synthetic/long-name/odc.cpio @@ -0,0 +1 @@ +0707070000000000001006440000000000000000010000000000000000010000000000000000aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa \ No newline at end of file diff --git a/src/test/resources/synthetic/long-name/pax-fail.tar b/src/test/resources/synthetic/long-name/pax-fail.tar new file mode 100644 index 0000000000000000000000000000000000000000..296c37056df6a429eaafacd17bdfc7aa41592d2c GIT binary patch literal 528 zcmWGYtnf%pOi3-$&&khAXP^l%G%zqTF=1dZfSL}Y4b9;IE(cO#XlP(!2Ba&X>S;=q z7MCOzF&G%+m*=Gx;jt;bD8IA-k37s%FdC22!6ap5Xku<+VQgk%u27Izl3@z~x_lNp literal 0 HcmV?d00001 diff --git a/src/test/resources/synthetic/long-name/pax.tar b/src/test/resources/synthetic/long-name/pax.tar new file mode 100644 index 0000000000000000000000000000000000000000..2af1df4cac5d115cde6c1826e00e7fcfd1e864d1 GIT binary patch literal 35328 zcmeIuv2MaJ5CBlm{fhoUFHH*S(7j`S!BV4is0cDd@c9sd7lv$9NPRct;=7YieqZ_c z+2`-Ry!?)vA>AdKh*j666xZ8pKG&mZFU4A$PffI|^LRR5TKD} z <=dFN|stAVA;|1Wwy#p%?%F literal 0 HcmV?d00001 From f14dd4dbc6d98c6aa1ffbea645d7bfc6b294f96a Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Thu, 16 Oct 2025 00:52:29 +0200 Subject: [PATCH 19/40] Restrict visibility of unintentionally exposed APIs (#730) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR removes or hides several members that leaked into the public surface, were never meant for external use and were **never released**. There is **no functional behavior change** to the library; this only corrects API visibility and duplication. ## What changed * **`SevenZFile.SOFT_MAX_ARRAY_LENGTH`** * **Change:** Removed. * **Rationale:** Duplicates the constant already available in Commons IO’s `IOUtils`. * **`SevenZFile.toNonNegativeInt(...)`** * **Change:** Visibility reduced (internal helper). * **Rationale:** Not part of the supported API; only used internally. * **`SeekableInMemoryByteChannel.getSize()`** * **Change:** Removed public alias. * **Rationale:** Only used in tests; behavior diverges from `size()` after channel closure and shouldn’t be exposed. * **`ElementValue.BYTES`** * **Change:** Migrated to caller class. * **Rationale:** Had a single call site in another package; not a public contract. --- .../compress/archivers/sevenz/SevenZFile.java | 13 +++---------- .../harmony/unpack200/MetadataBandGroup.java | 5 ++++- .../unpack200/bytecode/AnnotationsAttribute.java | 3 --- .../org/apache/commons/compress/utils/IOUtils.java | 7 +------ .../compress/utils/SeekableInMemoryByteChannel.java | 9 --------- .../commons/compress/archivers/zip/ZipFileTest.java | 4 +++- 6 files changed, 11 insertions(+), 30 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index 75fe65a1cd4..a8f75d2a96d 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -20,7 +20,6 @@ import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.EOFException; import java.io.File; @@ -340,13 +339,6 @@ public Builder setUseDefaultNameForUnnamedEntries(final boolean useDefaultNameFo /** Shared with SevenZOutputFile and tests, neither mutates it. */ static final byte[] SIGNATURE = { (byte) '7', (byte) 'z', (byte) 0xBC, (byte) 0xAF, (byte) 0x27, (byte) 0x1C }; - /** - * The maximum array size defined privately in {@link ByteArrayOutputStream}. - * - * @since 1.29.0 - */ - public static int SOFT_MAX_ARRAY_LENGTH = Integer.MAX_VALUE - 8; - /** * Creates a new Builder. * @@ -456,7 +448,7 @@ private static long skipBytesFully(final ByteBuffer input, long bytesToSkip) { * @return The given value as an int. * @throws IOException Thrown if the given value is not in {@code [0, Integer.MAX_VALUE]}. */ - public static int toNonNegativeInt(final String description, final long value) throws IOException { + private static int toNonNegativeInt(final String description, final long value) throws IOException { if (value > Integer.MAX_VALUE || value < 0) { throw new ArchiveException("Cannot handle %s %,d", description, value); } @@ -1128,7 +1120,8 @@ private boolean hasCurrentEntryBeenRead() { private Archive initializeArchive(final StartHeader startHeader, final byte[] password, final boolean verifyCrc) throws IOException { final int nextHeaderSizeInt = toNonNegativeInt("startHeader.nextHeaderSize", startHeader.nextHeaderSize); - MemoryLimitException.checkKiB(bytesToKiB(nextHeaderSizeInt), Math.min(bytesToKiB(SOFT_MAX_ARRAY_LENGTH), maxMemoryLimitKiB)); + MemoryLimitException.checkKiB(bytesToKiB(nextHeaderSizeInt), Math.min(bytesToKiB(org.apache.commons.io.IOUtils.SOFT_MAX_ARRAY_LENGTH), + maxMemoryLimitKiB)); channel.position(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset); if (verifyCrc) { final long position = channel.position(); diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/MetadataBandGroup.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/MetadataBandGroup.java index 43292679ffb..1811efdeea2 100644 --- a/src/main/java/org/apache/commons/compress/harmony/unpack200/MetadataBandGroup.java +++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/MetadataBandGroup.java @@ -44,6 +44,9 @@ */ public class MetadataBandGroup { + /** Size in bytes of an {@link ElementValue} instance: header, Object, int, int. */ + private static final int ELEMENT_VALUE_BYTES = 8 + 8 + Integer.BYTES + Integer.BYTES; + private static CPUTF8 rvaUTF8; private static CPUTF8 riaUTF8; @@ -220,7 +223,7 @@ private Object getNextValue(final int t) throws Pack200Exception { return cases_RU[cases_RU_Index++]; case '[': final int arraySize = casearray_N[casearray_N_Index++]; - final ElementValue[] nestedArray = new ElementValue[Pack200Exception.checkObjectArray(arraySize, ElementValue.BYTES)]; + final ElementValue[] nestedArray = new ElementValue[Pack200Exception.checkObjectArray(arraySize, ELEMENT_VALUE_BYTES)]; for (int i = 0; i < arraySize; i++) { final int nextT = T[T_index++]; nestedArray[i] = new ElementValue(nextT, getNextValue(nextT)); diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/AnnotationsAttribute.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/AnnotationsAttribute.java index fd09af326a0..e08110deaa7 100644 --- a/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/AnnotationsAttribute.java +++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/AnnotationsAttribute.java @@ -127,9 +127,6 @@ public void writeBody(final DataOutputStream dos) throws IOException { */ public static class ElementValue { - /** Size in bytes of an instance: header, Object, int, int. */ - public static final int BYTES = 8 + 8 + Integer.BYTES + Integer.BYTES; - private final Object value; private final int tag; diff --git a/src/main/java/org/apache/commons/compress/utils/IOUtils.java b/src/main/java/org/apache/commons/compress/utils/IOUtils.java index 1eedd52b7c5..0bdd1a36bfc 100644 --- a/src/main/java/org/apache/commons/compress/utils/IOUtils.java +++ b/src/main/java/org/apache/commons/compress/utils/IOUtils.java @@ -48,11 +48,6 @@ public final class IOUtils { */ public static final LinkOption[] EMPTY_LINK_OPTIONS = {}; - /** - * The {@code SOFT_MAX_ARRAY_LENGTH} constant from Java's internal ArraySupport class. - */ - private static final int SOFT_MAX_ARRAY_LENGTH = Integer.MAX_VALUE - 8; - /** * Closes the given Closeable and swallows any IOException that may occur. * @@ -235,7 +230,7 @@ public static void readFully(final ReadableByteChannel channel, final ByteBuffer */ public static byte[] readRange(final InputStream input, final int length) throws IOException { final ByteArrayOutputStream output = new ByteArrayOutputStream(); - org.apache.commons.io.IOUtils.copyLarge(input, output, 0, MemoryLimitException.checkBytes(length, SOFT_MAX_ARRAY_LENGTH)); + org.apache.commons.io.IOUtils.copyLarge(input, output, 0, MemoryLimitException.checkBytes(length, org.apache.commons.io.IOUtils.SOFT_MAX_ARRAY_LENGTH)); return output.toByteArray(); } diff --git a/src/main/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannel.java b/src/main/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannel.java index 7c6a8e542e8..2f998c0e26e 100644 --- a/src/main/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannel.java +++ b/src/main/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannel.java @@ -99,15 +99,6 @@ private void ensureOpen() throws ClosedChannelException { } } - /** - * Like {@link #size()} but never throws {@link ClosedChannelException}. - * - * @return See {@link #size()}. - */ - public long getSize() { - return size; - } - @Override public boolean isOpen() { return !closed.get(); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java index e047753c493..6dc78d31581 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java @@ -66,6 +66,7 @@ import org.apache.commons.lang3.ArrayFill; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.SystemUtils; +import org.apache.commons.lang3.reflect.FieldUtils; import org.junit.Assume; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assumptions; @@ -500,7 +501,8 @@ void testEntryAlignment() throws Exception { } - try (ZipFile zf = ZipFile.builder().setByteArray(Arrays.copyOfRange(zipContent.array(), 0, (int) zipContent.getSize())).get()) { + try (ZipFile zf = ZipFile.builder().setByteArray(Arrays.copyOfRange(zipContent.array(), 0, (int) FieldUtils.readDeclaredField(zipContent, "size", + true))).get()) { final ZipArchiveEntry inflatedEntry = zf.getEntry("inflated.txt"); final ResourceAlignmentExtraField inflatedAlignmentEx = (ResourceAlignmentExtraField) inflatedEntry .getExtraField(ResourceAlignmentExtraField.ID); From 592ad68bacbdab29bd7616ec31a4704db7d0d30b Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory" Date: Wed, 15 Oct 2025 18:54:44 -0400 Subject: [PATCH 20/40] Post merge clean ups - Use final - Fix errant curly - Sort members - Reduce vertical space --- .../archivers/AbstractArchiveBuilder.java | 18 +- .../archivers/ArchiveInputStream.java | 26 +-- .../dump/DumpArchiveInputStream.java | 10 +- .../compress/archivers/sevenz/SevenZFile.java | 1 + .../compress/archivers/tar/TarUtils.java | 7 +- .../archivers/MaxNameEntryLengthTest.java | 195 ++++++++---------- 6 files changed, 117 insertions(+), 140 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/AbstractArchiveBuilder.java b/src/main/java/org/apache/commons/compress/archivers/AbstractArchiveBuilder.java index 74a878576f8..f5b122a3b66 100644 --- a/src/main/java/org/apache/commons/compress/archivers/AbstractArchiveBuilder.java +++ b/src/main/java/org/apache/commons/compress/archivers/AbstractArchiveBuilder.java @@ -42,6 +42,15 @@ protected AbstractArchiveBuilder() { // empty } + /** + * Gets the maximum length of an archive entry name. + * + * @return The maximum length of an archive entry name. + */ + public int getMaxEntryNameLength() { + return maxEntryNameLength; + } + /** * Sets the maximum length, in bytes, of an archive entry name. * @@ -68,13 +77,4 @@ public B setMaxEntryNameLength(final int maxEntryNameLength) { this.maxEntryNameLength = maxEntryNameLength; return asThis(); } - - /** - * Gets the maximum length of an archive entry name. - * - * @return The maximum length of an archive entry name. - */ - public int getMaxEntryNameLength() { - return maxEntryNameLength; - } } diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java index da5475f7acd..f41b834cd54 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java @@ -107,19 +107,6 @@ public ArchiveInputStream() { this(new NullInputStream(), Charset.defaultCharset().name()); } - /** - * Constructs a new instance. - * - * @param inputStream the underlying input stream, or {@code null} if this instance is to be created without an underlying stream. - * @param charsetName charset name. - * @since 1.26.0 - */ - protected ArchiveInputStream(final InputStream inputStream, final String charsetName) { - super(inputStream == null ? new NullInputStream() : inputStream); - this.charset = Charsets.toCharset(charsetName); - this.maxEntryNameLength = Short.MAX_VALUE; - } - /** * Constructs a new instance from a builder. * @@ -148,6 +135,19 @@ protected ArchiveInputStream(final InputStream inputStream, final AbstractArchiv this.maxEntryNameLength = builder.getMaxEntryNameLength(); } + /** + * Constructs a new instance. + * + * @param inputStream the underlying input stream, or {@code null} if this instance is to be created without an underlying stream. + * @param charsetName charset name. + * @since 1.26.0 + */ + protected ArchiveInputStream(final InputStream inputStream, final String charsetName) { + super(inputStream == null ? new NullInputStream() : inputStream); + this.charset = Charsets.toCharset(charsetName); + this.maxEntryNameLength = Short.MAX_VALUE; + } + /** * Tests whether this stream is able to read the given entry. * diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java index c0c595caf0b..a34fd7bb1e4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java @@ -214,6 +214,11 @@ public DumpArchiveInputStream(final InputStream is, final String encoding) throw this(is, builder().setCharset(encoding)); } + private DumpArchiveEntry checkEntry(final DumpArchiveEntry entry) throws ArchiveException, MemoryLimitException { + ArchiveUtils.checkEntryNameLength(entry.getName().length(), getMaxEntryNameLength(), "DUMP"); + return entry; + } + /** * Closes the stream for this entry. */ @@ -586,9 +591,4 @@ private void readDirectoryEntry(DumpArchiveEntry entry) throws IOException { size -= DumpArchiveConstants.TP_SIZE; } } - - private DumpArchiveEntry checkEntry(DumpArchiveEntry entry) throws ArchiveException, MemoryLimitException { - ArchiveUtils.checkEntryNameLength(entry.getName().length(), getMaxEntryNameLength(), "DUMP"); - return entry; - } } diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index a8f75d2a96d..c068d302088 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -454,6 +454,7 @@ private static int toNonNegativeInt(final String description, final long value) } return (int) value; } + private final String fileName; private SeekableByteChannel channel; private final Archive archive; diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java index 06c1b408323..c7451e81576 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java @@ -714,7 +714,6 @@ static Map
*/ private void buildSparseInputStreams() throws IOException { - currentSparseInputStreamIndex = -1; - sparseInputStreams = new ArrayList<>(); + final ListparsePaxHeaders(final InputStream inputStream, final final String keyword = coll.toString(StandardCharsets.UTF_8); // Get rest of entry final int restLen = len - read; - // Validate entry length // 1. Ignore empty keywords if (restLen <= 1) { // only NL @@ -745,8 +744,7 @@ static Map parsePaxHeaders(final InputStream inputStream, final try { offset = ParsingUtils.parseLongValue(value); } catch (final IOException ex) { - throw new ArchiveException( - "Failed to read PAX header: Offset %s contains a non-numeric value.", + throw new ArchiveException("Failed to read PAX header: Offset %s contains a non-numeric value.", TarGnuSparseKeys.OFFSET); } if (offset < 0) { @@ -763,8 +761,7 @@ static Map parsePaxHeaders(final InputStream inputStream, final try { numbytes = ParsingUtils.parseLongValue(value); } catch (final IOException ex) { - throw new ArchiveException( - "Failed to read PAX header: Numbytes %s contains a non-numeric value.", + throw new ArchiveException("Failed to read PAX header: Numbytes %s contains a non-numeric value.", TarGnuSparseKeys.NUMBYTES); } if (numbytes < 0) { diff --git a/src/test/java/org/apache/commons/compress/archivers/MaxNameEntryLengthTest.java b/src/test/java/org/apache/commons/compress/archivers/MaxNameEntryLengthTest.java index cb8281b2375..16ef55bc726 100644 --- a/src/test/java/org/apache/commons/compress/archivers/MaxNameEntryLengthTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/MaxNameEntryLengthTest.java @@ -109,24 +109,6 @@ static Stream testTruncatedStreams() throws IOException { SOFT_MAX_ARRAY_LENGTH)); } - @ParameterizedTest - @MethodSource - void testTruncatedStreams(final ArchiveInputStream> archiveInputStream, final long expectedLength) { - // If the file name length exceeds available memory, the stream fails fast with MemoryLimitException. - // Otherwise, it fails with EOFException when the stream ends unexpectedly. - if (Runtime.getRuntime().totalMemory() < expectedLength) { - final MemoryLimitException exception = - assertThrows(MemoryLimitException.class, archiveInputStream::getNextEntry); - final String message = exception.getMessage(); - assertNotNull(message); - assertTrue( - message.contains(String.format("%,d", expectedLength)), - "Message mentions expected length (" + expectedLength + "): " + message); - } else { - assertThrows(EOFException.class, archiveInputStream::getNextEntry); - } - } - static Stream testTruncatedTarFiles() throws IOException { return Stream.of( Arguments.of(TarFile.builder() @@ -137,14 +119,6 @@ static Stream testTruncatedTarFiles() throws IOException { .setURI(getURI("synthetic/long-name/gnu-fail.tar")))); } - @ParameterizedTest - @MethodSource - void testTruncatedTarFiles(final TarFile.Builder tarFileBuilder) { - // Since the real size of the archive is known, the truncation is detected - // much earlier and before trying to read file names. - assertThrows(EOFException.class, () -> tarFileBuilder.get().getEntries()); - } - static Stream testValidStreams() throws IOException { return Stream.of( Arguments.of( @@ -178,29 +152,6 @@ static Stream testValidStreams() throws IOException { Short.MAX_VALUE)); } - @ParameterizedTest - @MethodSource - void testValidStreams(final AbstractArchiveBuilder >, ?> builder, final int expectedLength) - throws IOException { - try (ArchiveInputStream> archiveInputStream = builder.get()) { - final ArchiveEntry entry = archiveInputStream.getNextEntry(); - assertNotNull(entry); - final String name = entry.getName(); - assertEquals(expectedLength, name.length(), "Unexpected name length"); - final String expected = StringUtils.repeat("a", expectedLength); - assertEquals(expected, name); - } - // Impose a file name length limit and verify that it is enforced. - builder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT); - try (ArchiveInputStream> archiveInputStream = builder.get()) { - final ArchiveException exception = assertThrows(ArchiveException.class, archiveInputStream::getNextEntry); - final String message = exception.getMessage(); - assertNotNull(message); - assertTrue(message.contains("file name length")); - assertTrue(message.contains(String.format("%,d", expectedLength))); - } - } - static Stream testValidTarFiles() throws IOException { return Stream.of( Arguments.of(TarFile.builder().setURI(getURI("synthetic/long-name/pax.tar")), Short.MAX_VALUE), @@ -209,46 +160,25 @@ static Stream testValidTarFiles() throws IOException { @ParameterizedTest @MethodSource - void testValidTarFiles(final TarFile.Builder tarFileBuilder, final int expectedLength) throws IOException { - try (TarFile tarFile = tarFileBuilder.get()) { - for (final ArchiveEntry entry : tarFile.getEntries()) { - assertNotNull(entry); - final String name = entry.getName(); - assertEquals(expectedLength, name.length(), "Unexpected name length"); - final String expected = StringUtils.repeat("a", expectedLength); - assertEquals(expected, name); - } + void testTruncatedStreams(final ArchiveInputStream> archiveInputStream, final long expectedLength) { + // If the file name length exceeds available memory, the stream fails fast with MemoryLimitException. + // Otherwise, it fails with EOFException when the stream ends unexpectedly. + if (Runtime.getRuntime().totalMemory() < expectedLength) { + final MemoryLimitException exception = assertThrows(MemoryLimitException.class, archiveInputStream::getNextEntry); + final String message = exception.getMessage(); + assertNotNull(message); + assertTrue(message.contains(String.format("%,d", expectedLength)), "Message mentions expected length (" + expectedLength + "): " + message); + } else { + assertThrows(EOFException.class, archiveInputStream::getNextEntry); } - // Impose a file name length limit and verify that it is enforced. - tarFileBuilder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT); - final ArchiveException exception = assertThrows(ArchiveException.class, () -> tarFileBuilder.get()); - final String message = exception.getMessage(); - assertNotNull(message); - assertTrue(message.contains("file name length")); - assertTrue(message.contains(String.format("%,d", expectedLength))); } - @Test - void testValidZipFile() throws IOException { - final ZipFile.Builder builder = ZipFile.builder().setURI(getURI("synthetic/long-name/long-name.zip")); - final int expectedLength = Short.MAX_VALUE; - try (ZipFile zipFile = builder.get(); - IOStream extends ZipArchiveEntry> entries = zipFile.stream()) { - entries.forEach(entry -> { - assertNotNull(entry); - final String name = entry.getName(); - assertEquals(expectedLength, name.length(), "Unexpected name length"); - final String expected = StringUtils.repeat("a", expectedLength); - assertEquals(expected, name); - }); - } - // Impose a file name length limit and verify that it is enforced. - builder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT); - final ArchiveException exception = assertThrows(ArchiveException.class, builder::get); - final String message = exception.getMessage(); - assertNotNull(message); - assertTrue(message.contains("file name length"), "Message mentions file name length: " + message); - assertTrue(message.contains(String.format("%,d", expectedLength))); + @ParameterizedTest + @MethodSource + void testTruncatedTarFiles(final TarFile.Builder tarFileBuilder) { + // Since the real size of the archive is known, the truncation is detected + // much earlier and before trying to read file names. + assertThrows(EOFException.class, () -> tarFileBuilder.get().getEntries()); } @Test @@ -264,9 +194,7 @@ void testValid7ZipFile() throws IOException { assertEquals(expected, name); } // SevenZFile parses the whole archive at once, so the builder throws the exception. - final ArchiveException exception = - assertThrows(ArchiveException.class, () -> builder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT) - .get()); + final ArchiveException exception = assertThrows(ArchiveException.class, () -> builder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT).get()); final String message = exception.getMessage(); assertNotNull(message); assertTrue(message.contains("file name length")); @@ -281,41 +209,28 @@ void testValidDumpStreams(final String resourceName) throws IOException { final int nameSegmentLength = 255; // length of each segment final int totalEntries = 1 + expectedDepth + 1; // root + 127 dirs + 1 file final int maxInode = rootInode + totalEntries - 1; - final String nameSegment = StringUtils.repeat('a', nameSegmentLength); - - final DumpArchiveInputStream.Builder builder = - DumpArchiveInputStream.builder().setURI(getURI(resourceName)); - + final DumpArchiveInputStream.Builder builder = DumpArchiveInputStream.builder().setURI(getURI(resourceName)); try (DumpArchiveInputStream in = builder.get()) { for (int expectedInode = rootInode; expectedInode <= maxInode; expectedInode++) { final boolean isRegularFile = expectedInode == maxInode; final DumpArchiveEntry entry = in.getNextEntry(); - assertNotNull(entry, "Entry " + expectedInode + " should exist"); - // Type checks: root + 127 are directories, last is a regular file. assertEquals(!isRegularFile, entry.isDirectory(), "isDirectory() mismatch"); - final int depth = expectedInode - rootInode; // 0 for root, 1..127 for dirs, 128 for file’s dir count - final String expectedNameDirs = StringUtils.repeat(nameSegment + "/", depth); final int expectedLength = (nameSegmentLength + 1) * depth - (isRegularFile ? 1 : 0); - final String actualName = entry.getName(); - assertEquals(expectedInode, entry.getIno(), "inode"); assertEquals(expectedLength, actualName.length(), "name length"); assertEquals(expectedNameDirs.substring(0, expectedLength), actualName, "full name"); - // Structure checks: every path component is exactly 255×'a' - String[] parts = actualName.split("/"); if (parts.length > 0 && parts[parts.length - 1].isEmpty()) { // Trailing slash yields an empty final component; ignore it. parts = Arrays.copyOf(parts, parts.length - 1); } - // For directories: depth components; for file: depth components (including file itself) assertEquals(depth, parts.length, "component count"); for (int i = 0; i < parts.length; i++) { @@ -323,15 +238,12 @@ void testValidDumpStreams(final String resourceName) throws IOException { assertEquals(nameSegment, parts[i], "segment[" + i + "] content"); } } - // Stream should now be exhausted. assertNull(in.getNextEntry(), "No more entries expected after " + totalEntries); } - - try (DumpArchiveInputStream in = - builder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT).get()) { + try (DumpArchiveInputStream in = builder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT).get()) { int expectedLength; - for (int depth = 0; ; depth++) { + for (int depth = 0;; depth++) { expectedLength = depth * (nameSegmentLength + 1); if (expectedLength > PORTABLE_NAME_LIMIT) { break; @@ -345,4 +257,71 @@ void testValidDumpStreams(final String resourceName) throws IOException { assertTrue(message.contains(String.format("%,d", expectedLength))); } } + + @ParameterizedTest + @MethodSource + void testValidStreams(final AbstractArchiveBuilder >, ?> builder, final int expectedLength) + throws IOException { + try (ArchiveInputStream> archiveInputStream = builder.get()) { + final ArchiveEntry entry = archiveInputStream.getNextEntry(); + assertNotNull(entry); + final String name = entry.getName(); + assertEquals(expectedLength, name.length(), "Unexpected name length"); + final String expected = StringUtils.repeat("a", expectedLength); + assertEquals(expected, name); + } + // Impose a file name length limit and verify that it is enforced. + builder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT); + try (ArchiveInputStream> archiveInputStream = builder.get()) { + final ArchiveException exception = assertThrows(ArchiveException.class, archiveInputStream::getNextEntry); + final String message = exception.getMessage(); + assertNotNull(message); + assertTrue(message.contains("file name length")); + assertTrue(message.contains(String.format("%,d", expectedLength))); + } + } + + @ParameterizedTest + @MethodSource + void testValidTarFiles(final TarFile.Builder tarFileBuilder, final int expectedLength) throws IOException { + try (TarFile tarFile = tarFileBuilder.get()) { + for (final ArchiveEntry entry : tarFile.getEntries()) { + assertNotNull(entry); + final String name = entry.getName(); + assertEquals(expectedLength, name.length(), "Unexpected name length"); + final String expected = StringUtils.repeat("a", expectedLength); + assertEquals(expected, name); + } + } + // Impose a file name length limit and verify that it is enforced. + tarFileBuilder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT); + final ArchiveException exception = assertThrows(ArchiveException.class, () -> tarFileBuilder.get()); + final String message = exception.getMessage(); + assertNotNull(message); + assertTrue(message.contains("file name length")); + assertTrue(message.contains(String.format("%,d", expectedLength))); + } + + @Test + void testValidZipFile() throws IOException { + final ZipFile.Builder builder = ZipFile.builder().setURI(getURI("synthetic/long-name/long-name.zip")); + final int expectedLength = Short.MAX_VALUE; + try (ZipFile zipFile = builder.get(); + IOStream extends ZipArchiveEntry> entries = zipFile.stream()) { + entries.forEach(entry -> { + assertNotNull(entry); + final String name = entry.getName(); + assertEquals(expectedLength, name.length(), "Unexpected name length"); + final String expected = StringUtils.repeat("a", expectedLength); + assertEquals(expected, name); + }); + } + // Impose a file name length limit and verify that it is enforced. + builder.setMaxEntryNameLength(PORTABLE_NAME_LIMIT); + final ArchiveException exception = assertThrows(ArchiveException.class, builder::get); + final String message = exception.getMessage(); + assertNotNull(message); + assertTrue(message.contains("file name length"), "Message mentions file name length: " + message); + assertTrue(message.contains(String.format("%,d", expectedLength))); + } } From 72d18c2aa401626230c9d1ce60d628341955cf2c Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Thu, 16 Oct 2025 16:37:12 +0200 Subject: [PATCH 21/40] Declare `IOException` on archive `InputStream` constructors (#731) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit > [!CAUTION] > **Source-incompatible** (callers may need to add `throws IOException` or a catch). > **Binary-compatible** (the `throws` clause isn’t part of the JVM descriptor). Several `ArchiveInputStream` implementations either - must read/validate bytes up front (e.g., magic headers), or - may fail immediately when the underlying stream is unreadable. Today we’re inconsistent: * Formats **without a global signature** (e.g., **CPIO**, **TAR**) historically didn’t read in the constructor, so no `IOException` was declared. * Other formats that **do need early bytes** either wrapped `IOException` in `ArchiveException` (**ARJ**, **DUMP**) or deferred the read to the first `getNextEntry()` (**AR**, **ZIP**). This makes error handling uneven for users and complicates eager validation. * All archive `InputStream` constructors now declare `throws IOException`. * **ARJ** and **DUMP**: stop wrapping `IOException` in `ArchiveException` during construction; propagate the original `IOException`. * **AR**: move reading of the global signature into the constructor (eager validation). No behavioral change is intended beyond surfacing `IOException` at construction time, where appropriate. For the ARJ format this was discussed in https://github.com/apache/commons-compress/pull/723#discussion_r2410298499. > [!NOTE] > Version `1.29.0` already introduces source-incompatible changes in other methods, by adding checked exceptions. --- src/changes/changes.xml | 2 +- .../archivers/ArchiveInputStream.java | 24 ++---- .../archivers/ar/ArArchiveInputStream.java | 40 +++++----- .../archivers/arj/ArjArchiveInputStream.java | 42 +++++------ .../cpio/CpioArchiveInputStream.java | 44 ++++++----- .../dump/DumpArchiveInputStream.java | 70 ++++++++--------- .../archivers/jar/JarArchiveInputStream.java | 12 ++- .../archivers/tar/TarArchiveInputStream.java | 75 ++++++++++++------- .../archivers/zip/ZipArchiveInputStream.java | 57 ++++++++------ .../compress/LegacyConstructorsTest.java | 8 +- .../archivers/ArchiveStreamFactoryTest.java | 12 ++- .../ar/ArArchiveInputStreamTest.java | 4 +- .../arj/ArjArchiveInputStreamTest.java | 14 +--- .../dump/DumpArchiveInputStreamTest.java | 30 +++----- .../zip/ZipArchiveInputStreamTest.java | 2 +- 15 files changed, 218 insertions(+), 218 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index ec19a5fef83..3e7f5463e12 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -106,6 +106,7 @@ The type attribute can be add,update,fix,remove. SeekableInMemoryByteChannel.position(), size(), and truncate() now comply with the SeekableByteChannel contract and throw ClosedChannelException. SeekableInMemoryByteChannel.position(long) now throws IllegalArgumentException instead of IOException when the position request is negative, complying with the SeekableByteChannel.position(long) contract. Makes TarUtils final and cleans up protected methods #712. +All archive input stream constructors now throw IOException. Add MemoryLimitException.MemoryLimitException(long, long). Add CompressException.CompressException(String, Object...). @@ -123,7 +124,6 @@ Thetype attribute can be add,update,fix,remove. Add Pack200Exception.addExact(int, long). Add GzipCompressorInputStream.Builder.setIgnoreExtraField(boolean). Add SnappyCompressorInputStream.getUncompressedSize() and deprecate getSize(). -Add ArchiveInputStream.ArchiveInputStream(InputStream, Charset) as a public constructor, it was private. Introduce builders for all ArchiveInputStream implementations and deprecate some constructors. TarFile now implements IOIterable<TarArchiveEntry>. Add a builder for the TarFile class and deprecate some constructors. diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java index f41b834cd54..c7a832681ba 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java @@ -101,7 +101,10 @@ public Iteratorunwrap() { /** * Constructs a new instance. + * + * @deprecated Since 1.29.0, use {@link #ArchiveInputStream(AbstractArchiveBuilder)} instead. */ + @Deprecated @SuppressWarnings("resource") public ArchiveInputStream() { this(new NullInputStream(), Charset.defaultCharset().name()); @@ -114,23 +117,8 @@ public ArchiveInputStream() { * @throws IOException Thrown if an I/O error occurs. * @since 1.29.0 */ - protected ArchiveInputStream(AbstractArchiveBuilder, ?> builder) throws IOException { - this(builder.getInputStream(), builder); - } - - /** - * Constructs a new instance from a builder and an input stream. - * - * Note: This overload exists to support legacy constructors that did not declare - * {@link IOException}. For new constructors, prefer - * {@link #ArchiveInputStream(AbstractArchiveBuilder)} and propagate I/O errors to callers.
- * - * @param inputStream The underlying input stream, or {@code null} if this instance is to be created without an underlying stream. - * @param builder The builder. - * @since 1.29.0 - */ - protected ArchiveInputStream(final InputStream inputStream, final AbstractArchiveBuilder, ?> builder) { - super(inputStream); + protected ArchiveInputStream(final AbstractArchiveBuilder, ?> builder) throws IOException { + super(builder.getInputStream()); this.charset = builder.getCharset(); this.maxEntryNameLength = builder.getMaxEntryNameLength(); } @@ -141,7 +129,9 @@ protected ArchiveInputStream(final InputStream inputStream, final AbstractArchiv * @param inputStream the underlying input stream, or {@code null} if this instance is to be created without an underlying stream. * @param charsetName charset name. * @since 1.26.0 + * @deprecated Since 1.29.0, use {@link #ArchiveInputStream(AbstractArchiveBuilder)} instead. */ + @Deprecated protected ArchiveInputStream(final InputStream inputStream, final String charsetName) { super(inputStream == null ? new NullInputStream() : inputStream); this.charset = Charsets.toCharset(charsetName); diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java index cde63b81929..2f3fa6f0189 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java @@ -170,19 +170,20 @@ public static boolean matches(final byte[] buffer, final int ignored) { private ArArchiveInputStream(final Builder builder) throws IOException { super(builder); + // Fail-fast if there is no signature + skipGlobalSignature(); } /** * Constructs an Ar input stream with the referenced stream * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the ar input stream + * @throws IOException if an I/O error has occurred */ - public ArArchiveInputStream(final InputStream inputStream) { - this(inputStream, builder()); - } - - private ArArchiveInputStream(final InputStream inputStream, final Builder builder) { - super(inputStream, builder); + public ArArchiveInputStream(final InputStream inputStream) throws IOException { + this(builder().setInputStream(inputStream)); } private int asInt(final byte[] byteArray, final int offset, final int len, final boolean treatBlankAsZero) throws IOException { @@ -312,8 +313,6 @@ public ArArchiveEntry getNextArEntry() throws IOException { */ @Override public ArArchiveEntry getNextEntry() throws IOException { - skipGlobalSignature(); - // Handle special GNU ar entries boolean foundGNUStringTable = false; do { @@ -490,20 +489,17 @@ private byte[] readGNUStringTable(final ArArchiveEntry entry) throws IOException * @throws IOException if an I/O error occurs while reading the stream or if the signature is invalid. */ private void skipGlobalSignature() throws IOException { - final long offset = getBytesRead(); - if (offset == 0) { - final byte[] expectedMagic = ArArchiveEntry.HEADER_BYTES; - final byte[] actualMagic = IOUtils.readRange(in, expectedMagic.length); - count(actualMagic.length); - if (expectedMagic.length != actualMagic.length) { - throw new EOFException(String.format( - "Premature end of ar archive: incomplete global header (expected %d bytes, got %d).", - expectedMagic.length, actualMagic.length)); - } - if (!Arrays.equals(expectedMagic, actualMagic)) { - throw new ArchiveException( - "Invalid global ar archive header: " + ArchiveUtils.toAsciiString(actualMagic)); - } + final byte[] expectedMagic = ArArchiveEntry.HEADER_BYTES; + final byte[] actualMagic = IOUtils.readRange(in, expectedMagic.length); + count(actualMagic.length); + if (expectedMagic.length != actualMagic.length) { + throw new EOFException(String.format( + "Premature end of ar archive: incomplete global header (expected %d bytes, got %d).", + expectedMagic.length, actualMagic.length)); + } + if (!Arrays.equals(expectedMagic, actualMagic)) { + throw new ArchiveException( + "Invalid global ar archive header: " + ArchiveUtils.toAsciiString(actualMagic)); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java index 26675adaef2..fa5654f4708 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -105,46 +105,42 @@ public static boolean matches(final byte[] signature, final int length) { private InputStream currentInputStream; private ArjArchiveInputStream(final Builder builder) throws IOException { - this(builder.getInputStream(), builder); + super(builder); + dis = new DataInputStream(in); + mainHeader = readMainHeader(); + if ((mainHeader.arjFlags & MainHeader.Flags.GARBLED) != 0) { + throw new ArchiveException("Encrypted ARJ files are unsupported"); + } + if ((mainHeader.arjFlags & MainHeader.Flags.VOLUME) != 0) { + throw new ArchiveException("Multi-volume ARJ files are unsupported"); + } } /** * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in, and using the CP437 character encoding. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the underlying stream, whose ownership is taken - * @throws ArchiveException if an exception occurs while reading + * @throws IOException if an exception occurs while reading */ - public ArjArchiveInputStream(final InputStream inputStream) throws ArchiveException { - this(inputStream, builder()); - } - - private ArjArchiveInputStream(final InputStream inputStream, final Builder builder) throws ArchiveException { - super(new DataInputStream(inputStream), builder); - dis = (DataInputStream) in; - try { - mainHeader = readMainHeader(); - if ((mainHeader.arjFlags & MainHeader.Flags.GARBLED) != 0) { - throw new ArchiveException("Encrypted ARJ files are unsupported"); - } - if ((mainHeader.arjFlags & MainHeader.Flags.VOLUME) != 0) { - throw new ArchiveException("Multi-volume ARJ files are unsupported"); - } - } catch (final IOException e) { - throw new ArchiveException(e.getMessage(), (Throwable) e); - } + public ArjArchiveInputStream(final InputStream inputStream) throws IOException { + this(builder().setInputStream(inputStream)); } /** * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the underlying stream, whose ownership is taken * @param charsetName the charset used for file names and comments in the archive. May be {@code null} to use the platform default. - * @throws ArchiveException if an exception occurs while reading + * @throws IOException if an exception occurs while reading * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public ArjArchiveInputStream(final InputStream inputStream, final String charsetName) throws ArchiveException { - this(inputStream, builder().setCharset(charsetName)); + public ArjArchiveInputStream(final InputStream inputStream, final String charsetName) throws IOException { + this(builder().setInputStream(inputStream).setCharset(charsetName)); } @Override diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java index 2d933e46ce7..d8a3565a424 100644 --- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java @@ -199,66 +199,74 @@ public static boolean matches(final byte[] signature, final int length) { private final ZipEncoding zipEncoding; private CpioArchiveInputStream(final Builder builder) throws IOException { - this(builder.getInputStream(), builder); + super(builder); + if (builder.blockSize <= 0) { + throw new IllegalArgumentException("blockSize must be bigger than 0"); + } + this.blockSize = builder.blockSize; + this.zipEncoding = ZipEncodingHelper.getZipEncoding(builder.getCharset()); } /** * Constructs the cpio input stream with a blocksize of {@link CpioConstants#BLOCK_SIZE BLOCK_SIZE} and expecting ASCII file names. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param in The cpio stream + * @throws IOException if an I/O error has occurred */ - public CpioArchiveInputStream(final InputStream in) { - this(in, builder()); - } - - private CpioArchiveInputStream(final InputStream in, final Builder builder) { - super(in, builder); - if (builder.blockSize <= 0) { - throw new IllegalArgumentException("blockSize must be bigger than 0"); - } - this.blockSize = builder.blockSize; - this.zipEncoding = ZipEncodingHelper.getZipEncoding(builder.getCharset()); + public CpioArchiveInputStream(final InputStream in) throws IOException { + this(builder().setInputStream(in)); } /** * Constructs the cpio input stream with a blocksize of {@link CpioConstants#BLOCK_SIZE BLOCK_SIZE} expecting ASCII file names. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param in The cpio stream * @param blockSize The block size of the archive. + * @throws IOException if an I/O error has occurred * @since 1.5 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public CpioArchiveInputStream(final InputStream in, final int blockSize) { - this(in, builder().setBlockSize(blockSize)); + public CpioArchiveInputStream(final InputStream in, final int blockSize) throws IOException { + this(builder().setInputStream(in).setBlockSize(blockSize)); } /** * Constructs the cpio input stream with a blocksize of {@link CpioConstants#BLOCK_SIZE BLOCK_SIZE}. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param in The cpio stream * @param blockSize The block size of the archive. * @param encoding The encoding of file names to expect - use null for the platform's default. * @throws IllegalArgumentException if {@code blockSize} is not bigger than 0 + * @throws IOException if an I/O error has occurred * @since 1.6 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public CpioArchiveInputStream(final InputStream in, final int blockSize, final String encoding) { - this(in, builder().setBlockSize(blockSize).setCharset(encoding)); + public CpioArchiveInputStream(final InputStream in, final int blockSize, final String encoding) throws IOException { + this(builder().setInputStream(in).setBlockSize(blockSize).setCharset(encoding)); } /** * Constructs the cpio input stream with a blocksize of {@link CpioConstants#BLOCK_SIZE BLOCK_SIZE}. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param in The cpio stream * @param encoding The encoding of file names to expect - use null for the platform's default. + * @throws IOException if an I/O error has occurred * @since 1.6 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public CpioArchiveInputStream(final InputStream in, final String encoding) { - this(in, builder().setCharset(encoding)); + public CpioArchiveInputStream(final InputStream in, final String encoding) throws IOException { + this(builder().setInputStream(in).setCharset(encoding)); } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java index a34fd7bb1e4..9e7bfbd7c7e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java @@ -142,48 +142,30 @@ public static boolean matches(final byte[] buffer, final int length) { private final ZipEncoding zipEncoding; private DumpArchiveInputStream(final Builder builder) throws IOException { - this(builder.getInputStream(), builder); - } - - /** - * Constructor using the platform's default encoding for file names. - * - * @param is stream to read from - * @throws ArchiveException on error - */ - public DumpArchiveInputStream(final InputStream is) throws ArchiveException { - this(is, builder()); - } - - private DumpArchiveInputStream(final InputStream is, final Builder builder) throws ArchiveException { - super(is, builder); - this.raw = new TapeInputStream(is); + super(builder); + this.raw = new TapeInputStream(in); this.hasHitEOF = false; this.zipEncoding = ZipEncodingHelper.getZipEncoding(builder.getCharset()); - try { - // read header, verify it's a dump archive. - final byte[] headerBytes = raw.readRecord(); + // read header, verify it's a dump archive. + final byte[] headerBytes = raw.readRecord(); - if (!DumpArchiveUtil.verify(headerBytes)) { - throw new UnrecognizedFormatException(); - } + if (!DumpArchiveUtil.verify(headerBytes)) { + throw new UnrecognizedFormatException(); + } - // get summary information - summary = new DumpArchiveSummary(headerBytes, this.zipEncoding); + // get summary information + summary = new DumpArchiveSummary(headerBytes, this.zipEncoding); - // reset buffer with actual block size. - raw.resetBlockSize(summary.getNTRec(), summary.isCompressed()); + // reset buffer with actual block size. + raw.resetBlockSize(summary.getNTRec(), summary.isCompressed()); - // allocate our read buffer. - blockBuffer = new byte[4 * DumpArchiveConstants.TP_SIZE]; + // allocate our read buffer. + blockBuffer = new byte[4 * DumpArchiveConstants.TP_SIZE]; - // skip past CLRI and BITS segments since we don't handle them yet. - readCLRI(); - readBITS(); - } catch (final IOException e) { - throw new ArchiveException(e.getMessage(), (Throwable) e); - } + // skip past CLRI and BITS segments since we don't handle them yet. + readCLRI(); + readBITS(); // put in a dummy record for the root node. final Dirent root = new Dirent(2, 2, 4, CURRENT_PATH_SEGMENT); @@ -200,18 +182,32 @@ private DumpArchiveInputStream(final InputStream is, final Builder builder) thro }); } + /** + * Constructor using the platform's default encoding for file names. + * + *Since 1.29.0: throws {@link IOException}.
+ * + * @param is stream to read from + * @throws IOException on error + */ + public DumpArchiveInputStream(final InputStream is) throws IOException { + this(builder().setInputStream(is)); + } + /** * Constructs a new instance. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param is stream to read from * @param encoding the encoding to use for file names, use null for the platform's default encoding - * @throws ArchiveException on error + * @throws IOException on error * @since 1.6 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public DumpArchiveInputStream(final InputStream is, final String encoding) throws ArchiveException { - this(is, builder().setCharset(encoding)); + public DumpArchiveInputStream(final InputStream is, final String encoding) throws IOException { + this(builder().setInputStream(is).setCharset(encoding)); } private DumpArchiveEntry checkEntry(final DumpArchiveEntry entry) throws ArchiveException, MemoryLimitException { diff --git a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java index de2775c7f60..1a3daaf84b8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java @@ -81,23 +81,27 @@ private JarArchiveInputStream(final Builder builder) throws IOException { /** * Creates an instance from the input stream using the default encoding. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to wrap */ - public JarArchiveInputStream(final InputStream inputStream) { - super(inputStream); + public JarArchiveInputStream(final InputStream inputStream) throws IOException { + this(jarInputStreamBuilder().setInputStream(inputStream)); } /** * Creates an instance from the input stream using the specified encoding. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to wrap * @param encoding the encoding to use * @since 1.10 * @deprecated Since 1.29.0, use {@link #jarInputStreamBuilder()}. */ @Deprecated - public JarArchiveInputStream(final InputStream inputStream, final String encoding) { - super(inputStream, encoding); + public JarArchiveInputStream(final InputStream inputStream, final String encoding) throws IOException { + this(jarInputStreamBuilder().setInputStream(inputStream).setCharset(encoding)); } @Override diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java index 688eb6d7217..2bd6ade2b5e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java @@ -176,101 +176,112 @@ public static boolean matches(final byte[] signature, final int length) { private final boolean lenient; - @SuppressWarnings("resource") // caller closes. private TarArchiveInputStream(final Builder builder) throws IOException { - this(builder.getInputStream(), builder); + super(builder); + this.zipEncoding = ZipEncodingHelper.getZipEncoding(builder.getCharset()); + this.recordBuffer = new byte[builder.getRecordSize()]; + this.blockSize = builder.getBlockSize(); + this.lenient = builder.isLenient(); } /** * Constructs a new instance. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to use. */ - public TarArchiveInputStream(final InputStream inputStream) { - this(inputStream, builder()); + public TarArchiveInputStream(final InputStream inputStream) throws IOException { + this(builder().setInputStream(inputStream)); } /** * Constructs a new instance with default values. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to use. * @param lenient when set to true illegal values for group/userid, mode, device numbers and timestamp will be ignored and the fields set to * {@link TarArchiveEntry#UNKNOWN}. When set to false such illegal fields cause an exception instead. + * @throws IOException if an I/O error occurs. * @since 1.19 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public TarArchiveInputStream(final InputStream inputStream, final boolean lenient) { - this(inputStream, builder().setLenient(lenient)); - } - - private TarArchiveInputStream(final InputStream inputStream, final Builder builder) { - super(inputStream, builder); - this.zipEncoding = ZipEncodingHelper.getZipEncoding(builder.getCharset()); - this.recordBuffer = new byte[builder.getRecordSize()]; - this.blockSize = builder.getBlockSize(); - this.lenient = builder.isLenient(); + public TarArchiveInputStream(final InputStream inputStream, final boolean lenient) throws IOException { + this(builder().setInputStream(inputStream).setLenient(lenient)); } /** * Constructs a new instance. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to use. * @param blockSize the block size to use. + * @throws IOException if an I/O error occurs. * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public TarArchiveInputStream(final InputStream inputStream, final int blockSize) { - this(inputStream, builder().setBlockSize(blockSize)); + public TarArchiveInputStream(final InputStream inputStream, final int blockSize) throws IOException { + this(builder().setInputStream(inputStream).setBlockSize(blockSize)); } /** * Constructs a new instance. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to use. * @param blockSize the block size to use. * @param recordSize the record size to use. + * @throws IOException if an I/O error occurs. * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public TarArchiveInputStream(final InputStream inputStream, final int blockSize, final int recordSize) { - this(inputStream, builder().setBlockSize(blockSize).setRecordSize(recordSize)); + public TarArchiveInputStream(final InputStream inputStream, final int blockSize, final int recordSize) throws IOException { + this(builder().setInputStream(inputStream).setBlockSize(blockSize).setRecordSize(recordSize)); } /** * Constructs a new instance. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to use. * @param blockSize the block size to use. * @param recordSize the record size to use. * @param encoding name of the encoding to use for file names. + * @throws IOException if an I/O error occurs. * @since 1.4 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public TarArchiveInputStream( - final InputStream inputStream, final int blockSize, final int recordSize, final String encoding) { - this( - inputStream, - builder().setBlockSize(blockSize).setRecordSize(recordSize).setCharset(encoding)); + public TarArchiveInputStream(final InputStream inputStream, final int blockSize, final int recordSize, final String encoding) throws IOException { + this(builder().setInputStream(inputStream).setBlockSize(blockSize).setRecordSize(recordSize).setCharset(encoding)); } /** * Constructs a new instance. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to use. * @param blockSize the block size to use. * @param recordSize the record size to use. * @param encoding name of the encoding to use for file names. * @param lenient when set to true illegal values for group/userid, mode, device numbers and timestamp will be ignored and the fields set to * {@link TarArchiveEntry#UNKNOWN}. When set to false such illegal fields cause an exception instead. + * @throws IOException if an I/O error occurs. * @since 1.19 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public TarArchiveInputStream(final InputStream inputStream, final int blockSize, final int recordSize, final String encoding, final boolean lenient) { + public TarArchiveInputStream(final InputStream inputStream, final int blockSize, final int recordSize, final String encoding, + final boolean lenient) throws IOException { // @formatter:off - this(inputStream, builder() + this(builder() + .setInputStream(inputStream) .setBlockSize(blockSize) .setRecordSize(recordSize) .setCharset(encoding) @@ -281,28 +292,34 @@ public TarArchiveInputStream(final InputStream inputStream, final int blockSize, /** * Constructs a new instance. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to use. * @param blockSize the block size to use. * @param encoding name of the encoding to use for file names. + * @throws IOException if an I/O error occurs. * @since 1.4 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public TarArchiveInputStream(final InputStream inputStream, final int blockSize, final String encoding) { - this(inputStream, builder().setBlockSize(blockSize).setCharset(encoding)); + public TarArchiveInputStream(final InputStream inputStream, final int blockSize, final String encoding) throws IOException { + this(builder().setInputStream(inputStream).setBlockSize(blockSize).setCharset(encoding)); } /** * Constructs a new instance. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the input stream to use. * @param encoding name of the encoding to use for file names. + * @throws IOException if an I/O error occurs. * @since 1.4 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public TarArchiveInputStream(final InputStream inputStream, final String encoding) { - this(inputStream, builder().setCharset(encoding)); + public TarArchiveInputStream(final InputStream inputStream, final String encoding) throws IOException { + this(builder().setInputStream(inputStream).setCharset(encoding)); } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java index d23de8b3967..bc61281517d 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java @@ -409,62 +409,70 @@ public static boolean matches(final byte[] buffer, final int length) { * @since 1.29.0 */ protected ZipArchiveInputStream(final AbstractBuilder, ?> builder) throws IOException { - this(builder.getInputStream(), builder); + super(builder); + this.in = new PushbackInputStream(in, buf.capacity()); + this.zipEncoding = ZipEncodingHelper.getZipEncoding(builder.getCharset()); + this.useUnicodeExtraFields = builder.isUseUnicodeExtraFields(); + this.supportStoredEntryDataDescriptor = builder.isSupportStoredEntryDataDescriptor(); + this.skipSplitSignature = builder.isSkipSplitSignature(); + // haven't read anything so far + buf.limit(0); } /** * Constructs an instance using UTF-8 encoding. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the stream to wrap. + * @throws IOException if an I/O error occurs. */ - public ZipArchiveInputStream(final InputStream inputStream) { - this(inputStream, builder()); - } - - private ZipArchiveInputStream(final InputStream inputStream, final AbstractBuilder, ?> builder) { - super(inputStream, builder); - this.in = new PushbackInputStream(inputStream, buf.capacity()); - this.zipEncoding = ZipEncodingHelper.getZipEncoding(builder.getCharset()); - this.useUnicodeExtraFields = builder.isUseUnicodeExtraFields(); - this.supportStoredEntryDataDescriptor = builder.isSupportStoredEntryDataDescriptor(); - this.skipSplitSignature = builder.isSkipSplitSignature(); - // haven't read anything so far - buf.limit(0); + public ZipArchiveInputStream(final InputStream inputStream) throws IOException { + this(builder().setInputStream(inputStream)); } /** * Constructs an instance using the specified encoding. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the stream to wrap. * @param encoding the encoding to use for file names, use null for the platform's default encoding. + * @throws IOException if an I/O error occurs. * @since 1.5 * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public ZipArchiveInputStream(final InputStream inputStream, final String encoding) { - this(inputStream, builder().setCharset(encoding)); + public ZipArchiveInputStream(final InputStream inputStream, final String encoding) throws IOException { + this(builder().setInputStream(inputStream).setCharset(encoding)); } /** * Constructs an instance using the specified encoding. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the stream to wrap. * @param encoding the encoding to use for file names, use null for the platform's default encoding. * @param useUnicodeExtraFields whether to use InfoZIP Unicode Extra Fields (if present) to set the file names. + * @throws IOException if an I/O error occurs. * @deprecated Since 1.29.0, use {@link #builder()}. */ @Deprecated - public ZipArchiveInputStream(final InputStream inputStream, final String encoding, final boolean useUnicodeExtraFields) { - this(inputStream, builder().setCharset(encoding).setUseUnicodeExtraFields(useUnicodeExtraFields)); + public ZipArchiveInputStream(final InputStream inputStream, final String encoding, final boolean useUnicodeExtraFields) throws IOException { + this(builder().setInputStream(inputStream).setCharset(encoding).setUseUnicodeExtraFields(useUnicodeExtraFields)); } /** * Constructs an instance using the specified encoding. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the stream to wrap. * @param encoding the encoding to use for file names, use null for the platform's default encoding. * @param useUnicodeExtraFields whether to use InfoZIP Unicode Extra Fields (if present) to set the file names. * @param supportStoredEntryDataDescriptor whether the stream will try to read STORED entries that use a data descriptor. + * @throws IOException if an I/O error occurs. * @since 1.1 * @deprecated Since 1.29.0, use {@link #builder()}. */ @@ -473,9 +481,10 @@ public ZipArchiveInputStream( final InputStream inputStream, final String encoding, final boolean useUnicodeExtraFields, - final boolean supportStoredEntryDataDescriptor) { + final boolean supportStoredEntryDataDescriptor) throws IOException { // @formatter:off - this(inputStream, builder() + this(builder() + .setInputStream(inputStream) .setCharset(encoding) .setUseUnicodeExtraFields(useUnicodeExtraFields) .setSupportStoredEntryDataDescriptor(supportStoredEntryDataDescriptor)); @@ -485,12 +494,15 @@ public ZipArchiveInputStream( /** * Constructs an instance using the specified encoding. * + *Since 1.29.0: throws {@link IOException}.
+ * * @param inputStream the stream to wrap. * @param encoding the encoding to use for file names, use null for the platform's default encoding. * @param useUnicodeExtraFields whether to use InfoZIP Unicode Extra Fields (if present) to set the file names. * @param supportStoredEntryDataDescriptor whether the stream will try to read STORED entries that use a data descriptor. * @param skipSplitSignature Whether the stream will try to skip the zip split signature(08074B50) at the beginning. * You will need to set this to true if you want to read a split archive. + * @throws IOException if an I/O error occurs. * @since 1.20 * @deprecated Since 1.29.0, use {@link #builder()}. */ @@ -500,9 +512,10 @@ public ZipArchiveInputStream( final String encoding, final boolean useUnicodeExtraFields, final boolean supportStoredEntryDataDescriptor, - final boolean skipSplitSignature) { + final boolean skipSplitSignature) throws IOException { // @formatter:off - this(inputStream, builder() + this(builder() + .setInputStream(inputStream) .setCharset(encoding) .setUseUnicodeExtraFields(useUnicodeExtraFields) .setSupportStoredEntryDataDescriptor(supportStoredEntryDataDescriptor) diff --git a/src/test/java/org/apache/commons/compress/LegacyConstructorsTest.java b/src/test/java/org/apache/commons/compress/LegacyConstructorsTest.java index d60863c6e09..d82ef793fe5 100644 --- a/src/test/java/org/apache/commons/compress/LegacyConstructorsTest.java +++ b/src/test/java/org/apache/commons/compress/LegacyConstructorsTest.java @@ -65,7 +65,7 @@ private static InputStream getNestedInputStream(final InputStream is) throws Ref return (InputStream) readField(is, "in", true); } - static StreamtestCpioConstructors() { + static Stream testCpioConstructors() throws IOException { final InputStream inputStream = mock(InputStream.class); return Stream.of( Arguments.of(new CpioArchiveInputStream(inputStream, 1024), inputStream, "US-ASCII", 1024), @@ -73,7 +73,7 @@ static Stream testCpioConstructors() { Arguments.of(new CpioArchiveInputStream(inputStream, "UTF-8"), inputStream, "UTF-8", 512)); } - static Stream testTarConstructors() { + static Stream testTarConstructors() throws IOException { final InputStream inputStream = mock(InputStream.class); final String defaultEncoding = Charset.defaultCharset().name(); final String otherEncoding = "UTF-8".equals(defaultEncoding) ? "US-ASCII" : "UTF-8"; @@ -87,7 +87,7 @@ static Stream testTarConstructors() { Arguments.of(new TarArchiveInputStream(inputStream, otherEncoding), inputStream, 10240, 512, otherEncoding, false)); } - static Stream testZipConstructors() { + static Stream testZipConstructors() throws IOException { final InputStream inputStream = mock(InputStream.class); return Stream.of( Arguments.of(new ZipArchiveInputStream(inputStream, "US-ASCII"), inputStream, "US-ASCII", true, false, false), @@ -101,7 +101,7 @@ void testArjConstructor() throws Exception { try (InputStream inputStream = Files.newInputStream(getPath("bla.arj")); ArjArchiveInputStream archiveInputStream = new ArjArchiveInputStream(inputStream, "US-ASCII")) { // Arj wraps the input stream in a DataInputStream - assertEquals(inputStream, getNestedInputStream(getNestedInputStream(archiveInputStream))); + assertEquals(inputStream, getNestedInputStream(archiveInputStream)); assertEquals(US_ASCII, archiveInputStream.getCharset()); } } diff --git a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java index abba17db4a1..6616469c038 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java @@ -96,10 +96,10 @@ public String toString() { */ private static final String ARJ_DEFAULT; private static final String DUMP_DEFAULT; - private static final String ZIP_DEFAULT = getCharsetName(new ZipArchiveInputStream(null)); - private static final String CPIO_DEFAULT = getCharsetName(new CpioArchiveInputStream(null)); - private static final String TAR_DEFAULT = getCharsetName(new TarArchiveInputStream(null)); - private static final String JAR_DEFAULT = getCharsetName(new JarArchiveInputStream(null)); + private static final String ZIP_DEFAULT = getCharsetName(ZipArchiveInputStream.builder()); + private static final String CPIO_DEFAULT = getCharsetName(CpioArchiveInputStream.builder()); + private static final String TAR_DEFAULT = getCharsetName(TarArchiveInputStream.builder()); + private static final String JAR_DEFAULT = getCharsetName(JarArchiveInputStream.builder()); static { String dflt; @@ -156,6 +156,10 @@ public String toString() { new TestData("bla.zip", ArchiveStreamFactory.ZIP, true, StandardCharsets.UTF_8.name(), FACTORY_SET_UTF8, "charset"), new TestData("bla.zip", ArchiveStreamFactory.ZIP, true, StandardCharsets.US_ASCII.name(), FACTORY_SET_ASCII, "charset"), }; + private static String getCharsetName(final AbstractArchiveBuilder, ?> builder) { + return builder.getCharset().name(); + } + private static String getCharsetName(final ArchiveInputStream> inputStream) { return inputStream.getCharset().name(); } diff --git a/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java index 2504eeb207e..46345b7829a 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java @@ -24,7 +24,6 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mock; import java.io.BufferedInputStream; import java.io.EOFException; @@ -221,8 +220,7 @@ public int read() throws IOException { @Test void testSingleArgumentConstructor() throws Exception { - final InputStream inputStream = mock(InputStream.class); - try (ArArchiveInputStream archiveStream = new ArArchiveInputStream(inputStream)) { + try (ArArchiveInputStream archiveStream = ArArchiveInputStream.builder().setURI(getURI("bla.ar")).get()) { assertEquals(US_ASCII, archiveStream.getCharset()); } } diff --git a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java index d076c8a3db5..f85abafaf8e 100644 --- a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java @@ -24,8 +24,6 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.io.InputStream; @@ -35,7 +33,6 @@ import java.util.TimeZone; import org.apache.commons.compress.AbstractTest; -import org.apache.commons.compress.archivers.ArchiveException; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.ByteArrayOutputStream; import org.junit.jupiter.api.Test; @@ -83,15 +80,8 @@ private void assertForEach(final ArjArchiveInputStream archive) throws IOExcepti @Test void testFirstHeaderSizeSetToZero() { - final ArchiveException ex = assertThrows(ArchiveException.class, () -> { - try (ArjArchiveInputStream archive = ArjArchiveInputStream.builder() - .setURI(getURI("org/apache/commons/compress/arj/zero_sized_headers-fail.arj")) - .get()) { - // Do nothing, ArchiveException already thrown - fail("ArchiveException not thrown."); - } - }); - assertTrue(ex.getCause() instanceof IOException); + assertThrows(IOException.class, + () -> ArjArchiveInputStream.builder().setURI(getURI("org/apache/commons/compress/arj/zero_sized_headers-fail.arj")).get().close()); } @Test diff --git a/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java index 01e79807512..98e575dfe8a 100644 --- a/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java @@ -24,7 +24,6 @@ import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTimeoutPreemptively; @@ -68,7 +67,6 @@ void checkUnsupportedRecordSizes(final int ntrec) throws Exception { final ArchiveException ex = assertThrows(ArchiveException.class, () -> DumpArchiveInputStream.builder() .setByteArray(createArchive(ntrec)) .get()); - assertInstanceOf(ArchiveException.class, ex.getCause()); assertTrue(ex.getMessage().contains(Integer.toString(ntrec)), "message should contain the invalid ntrec value"); } @@ -122,11 +120,14 @@ void testGetNextEntry() throws IOException { @Test void testInvalidCompressType() { - final ArchiveException ex = assertThrows(ArchiveException.class, () -> DumpArchiveInputStream.builder() + assertThrows(UnsupportedCompressionAlgorithmException.class, () -> DumpArchiveInputStream + // @formatter:off + .builder() .setURI(getURI("org/apache/commons/compress/dump/invalid_compression_type-fail.dump")) .get() - .close()); - assertInstanceOf(UnsupportedCompressionAlgorithmException.class, ex.getCause()); + .close() + // @formatter:on + ); } @Test @@ -176,26 +177,13 @@ void testMultiByteReadConsistentlyReturnsMinusOneAtEof() throws Exception { @Test void testNotADumpArchive() { - final ArchiveException ex = assertThrows( - ArchiveException.class, - () -> DumpArchiveInputStream.builder() - .setURI(getURI("bla.zip")) - .get() - .close(), - "expected an exception"); - assertTrue(ex.getCause() instanceof ShortFileException); + assertThrows(ShortFileException.class, () -> DumpArchiveInputStream.builder().setURI(getURI("bla.zip")).get().close(), "expected an exception"); } @Test - void testNotADumpArchiveButBigEnough() throws Exception { - final ArchiveException ex = assertThrows( - ArchiveException.class, - () -> DumpArchiveInputStream.builder() - .setURI(getURI("zip64support.tar.bz2")) - .get() - .close(), + void testNotADumpArchiveButBigEnough() { + assertThrows(UnrecognizedFormatException.class, () -> DumpArchiveInputStream.builder().setURI(getURI("zip64support.tar.bz2")).get().close(), "expected an exception"); - assertInstanceOf(UnrecognizedFormatException.class, ex.getCause()); } @Test diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java index b5f71797d93..b29ac9a9279 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java @@ -69,7 +69,7 @@ private static final class AirliftZipArchiveInputStream extends ZipArchiveInputS private boolean used; - private AirliftZipArchiveInputStream(final InputStream inputStream) { + private AirliftZipArchiveInputStream(final InputStream inputStream) throws IOException { super(inputStream); } From ac17e97ea26e3f262a66843d8e1ed7aabf761f33 Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory" Date: Thu, 16 Oct 2025 10:43:15 -0400 Subject: [PATCH 22/40] Javadoc --- .../archivers/ar/ArArchiveInputStream.java | 2 +- .../cpio/CpioArchiveInputStream.java | 10 ++++---- .../dump/DumpArchiveInputStream.java | 24 +++++++++---------- .../compress/archivers/sevenz/SevenZFile.java | 6 ++--- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java index 2f3fa6f0189..fdd36804071 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java @@ -180,7 +180,7 @@ private ArArchiveInputStream(final Builder builder) throws IOException { * Since 1.29.0: throws {@link IOException}.
* * @param inputStream the ar input stream - * @throws IOException if an I/O error has occurred + * @throws IOException if an I/O error has occurred. */ public ArArchiveInputStream(final InputStream inputStream) throws IOException { this(builder().setInputStream(inputStream)); diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java index d8a3565a424..33fdb6946b1 100644 --- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java @@ -213,7 +213,7 @@ private CpioArchiveInputStream(final Builder builder) throws IOException { *Since 1.29.0: throws {@link IOException}.
* * @param in The cpio stream - * @throws IOException if an I/O error has occurred + * @throws IOException if an I/O error has occurred. */ public CpioArchiveInputStream(final InputStream in) throws IOException { this(builder().setInputStream(in)); @@ -226,7 +226,7 @@ public CpioArchiveInputStream(final InputStream in) throws IOException { * * @param in The cpio stream * @param blockSize The block size of the archive. - * @throws IOException if an I/O error has occurred + * @throws IOException if an I/O error has occurred. * @since 1.5 * @deprecated Since 1.29.0, use {@link #builder()}. */ @@ -244,7 +244,7 @@ public CpioArchiveInputStream(final InputStream in, final int blockSize) throws * @param blockSize The block size of the archive. * @param encoding The encoding of file names to expect - use null for the platform's default. * @throws IllegalArgumentException if {@code blockSize} is not bigger than 0 - * @throws IOException if an I/O error has occurred + * @throws IOException if an I/O error has occurred. * @since 1.6 * @deprecated Since 1.29.0, use {@link #builder()}. */ @@ -260,7 +260,7 @@ public CpioArchiveInputStream(final InputStream in, final int blockSize, final S * * @param in The cpio stream * @param encoding The encoding of file names to expect - use null for the platform's default. - * @throws IOException if an I/O error has occurred + * @throws IOException if an I/O error has occurred. * @since 1.6 * @deprecated Since 1.29.0, use {@link #builder()}. */ @@ -298,7 +298,7 @@ private void checkOpen() throws IOException { /** * Closes the CPIO input stream. * - * @throws IOException if an I/O error has occurred + * @throws IOException if an I/O error has occurred. */ @Override public void close() throws IOException { diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java index 9e7bfbd7c7e..4c3545fdb2b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java @@ -91,9 +91,9 @@ public static Builder builder() { * Look at the first few bytes of the file to decide if it's a dump archive. With 32 bytes we can look at the magic value, with a full 1k we can verify the * checksum. * - * @param buffer data to match - * @param length length of data - * @return whether the buffer seems to contain dump data + * @param buffer data to match. + * @param length length of data. + * @return whether the buffer seems to contain dump data. */ public static boolean matches(final byte[] buffer, final int length) { // do we have enough of the header? @@ -187,8 +187,8 @@ private DumpArchiveInputStream(final Builder builder) throws IOException { * *Since 1.29.0: throws {@link IOException}.
* - * @param is stream to read from - * @throws IOException on error + * @param is stream to read from. + * @throws IOException on error. */ public DumpArchiveInputStream(final InputStream is) throws IOException { this(builder().setInputStream(is)); @@ -199,9 +199,9 @@ public DumpArchiveInputStream(final InputStream is) throws IOException { * *Since 1.29.0: throws {@link IOException}.
* - * @param is stream to read from - * @param encoding the encoding to use for file names, use null for the platform's default encoding - * @throws IOException on error + * @param is stream to read from. + * @param encoding the encoding to use for file names, use null for the platform's default encoding. + * @throws IOException on error. * @since 1.6 * @deprecated Since 1.29.0, use {@link #builder()}. */ @@ -240,8 +240,8 @@ public int getCount() { /** * Reads the next entry. * - * @return the next entry - * @throws IOException on error + * @return the next entry. + * @throws IOException on error. * @deprecated Use {@link #getNextEntry()}. */ @Deprecated @@ -345,7 +345,7 @@ public DumpArchiveEntry getNextEntry() throws IOException { /** * Gets full path for specified archive entry, or null if there's a gap. * - * @param entry + * @param entry The entry to query. * @return full path for specified archive entry, or null if there's a gap. * @throws DumpArchiveException Infinite loop detected in directory entries. */ @@ -387,7 +387,7 @@ private String getPath(final DumpArchiveEntry entry) throws DumpArchiveException /** * Gets the archive summary information. * - * @return the summary + * @return the summary. */ public DumpArchiveSummary getSummary() { return summary; diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index c068d302088..a6cdf6bc3a5 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -1157,7 +1157,7 @@ private Archive initializeArchive(final StartHeader startHeader, final byte[] pa * Reads a byte of data. * * @return the byte read, or -1 if end of input is reached - * @throws IOException if an I/O error has occurred + * @throws IOException if an I/O error has occurred. */ public int read() throws IOException { @SuppressWarnings("resource") // does not allocate @@ -1173,7 +1173,7 @@ public int read() throws IOException { * * @param b the array to write data to * @return the number of bytes read, or -1 if end of input is reached - * @throws IOException if an I/O error has occurred + * @throws IOException if an I/O error has occurred. */ public int read(final byte[] b) throws IOException { return read(b, 0, b.length); @@ -1186,7 +1186,7 @@ public int read(final byte[] b) throws IOException { * @param off offset into the buffer to start filling at * @param len of bytes to read * @return the number of bytes read, or -1 if end of input is reached - * @throws IOException if an I/O error has occurred + * @throws IOException if an I/O error has occurred. */ public int read(final byte[] b, final int off, final int len) throws IOException { if (len == 0) { From a00bf8745fb980488b4f60bae057d1876a897e7d Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory"Date: Thu, 16 Oct 2025 10:44:12 -0400 Subject: [PATCH 23/40] Rename private instance variable --- .../archivers/dump/DumpArchiveInputStream.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java index 4c3545fdb2b..f14c50643ed 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java @@ -113,7 +113,7 @@ public static boolean matches(final byte[] buffer, final int length) { private final DumpArchiveSummary summary; private DumpArchiveEntry active; private boolean isClosed; - private boolean hasHitEOF; + private boolean eof; private long entrySize; private long entryOffset; private int readIdx; @@ -144,7 +144,7 @@ public static boolean matches(final byte[] buffer, final int length) { private DumpArchiveInputStream(final Builder builder) throws IOException { super(builder); this.raw = new TapeInputStream(in); - this.hasHitEOF = false; + this.eof = false; this.zipEncoding = ZipEncodingHelper.getZipEncoding(builder.getCharset()); // read header, verify it's a dump archive. @@ -260,7 +260,7 @@ public DumpArchiveEntry getNextEntry() throws IOException { } while (entry == null) { - if (hasHitEOF) { + if (eof) { return null; } @@ -306,7 +306,7 @@ public DumpArchiveEntry getNextEntry() throws IOException { // check if this is an end-of-volume marker. if (DumpArchiveConstants.SEGMENT_TYPE.END == active.getHeaderType()) { - hasHitEOF = true; + eof = true; return null; } @@ -406,7 +406,7 @@ public int read(final byte[] buf, int off, int len) throws IOException { } int totalRead = 0; - if (hasHitEOF || isClosed || entryOffset >= entrySize) { + if (eof || isClosed || entryOffset >= entrySize) { return -1; } From 54baf273d73d11151fbfab1befee4b084e875758 Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Thu, 16 Oct 2025 23:22:16 +0200 Subject: [PATCH 24/40] 7z: enforce reference limits on `Folder` parsing (#729) 7z: enforce reference limits on `Folder` parsing This change aligns `Folder` parsing with the limits defined in the official 7-Zip implementation ([`7zIn.cpp`](https://github.com/ip7z/7zip/blob/main/CPP/7zip/Archive/7z/7zIn.cpp)): * Maximum coders per folder: **64** * Maximum input streams per coder: **64** * Maximum output streams per coder: **1** * Maximum total input streams per folder: **64** These bounds are consistent with the reference behavior and are safe because: * Other 7z implementations use the same or stricter limits. * No supported coder uses multiple inputs or outputs. * Custom coder definitions are not supported in this implementation. By enforcing these limits, the parser becomes simpler and more predictable, and redundant dynamic size checks can be removed. --- src/changes/changes.xml | 1 + .../compress/archivers/sevenz/Folder.java | 18 ++- .../compress/archivers/sevenz/SevenZFile.java | 77 +++++++++-- .../archivers/sevenz/SevenZFileTest.java | 129 ++++++++++++++++++ .../archivers/sevenz/SevenZFolderTest.java | 4 +- 5 files changed, 208 insertions(+), 21 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 3e7f5463e12..af7bf4aa47a 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -56,6 +56,7 @@ The type attribute can be add,update,fix,remove. AES256SHA256Decoder now enforces the CPP source k_NumCyclesPower_Supported_MAX = 24 limit. Don't loose precision while reading folders from a SevenZFile. Improve some exception messages in TarUtils and TarArchiveEntry. +SevenZFile now enforces the same folder and coder limits as the CPP implementation. BZip2CompressorInputStream now throw CompressorException (a subclass of IOException) for invalid or corrupted data, providing more specific error reporting. BZip2 input streams treat Huffman codes longer than 20 bits as corrupted data, matching the behavior of the reference implementation. diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java index fca70b07395..7e368655887 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java @@ -37,13 +37,23 @@ final class Folder { /** * Total number of input streams across all coders. This field is currently unused but technically part of the 7z API. + * + *Currently limited to {@code MAX_CODER_STREAMS_PER_FOLDER}
*/ - long totalInputStreams; + int totalInputStreams; - /** Total number of output streams across all coders. */ - long totalOutputStreams; + /** + * Total number of output streams across all coders. + * + *Currently limited to {@code MAX_CODER_STREAMS_PER_FOLDER}
+ */ + int totalOutputStreams; - /** Mapping between input and output streams. */ + /** + * Mapping between input and output streams. + * + *Its size is equal to {@code totalOutputStreams - 1}
+ */ BindPair[] bindPairs; /** Indices of input streams, one per input stream not listed in bindPairs. */ diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index a6cdf6bc3a5..112e5fe5f7a 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -339,6 +339,30 @@ public Builder setUseDefaultNameForUnnamedEntries(final boolean useDefaultNameFo /** Shared with SevenZOutputFile and tests, neither mutates it. */ static final byte[] SIGNATURE = { (byte) '7', (byte) 'z', (byte) 0xBC, (byte) 0xAF, (byte) 0x27, (byte) 0x1C }; + /** + * Maximum number of coders permitted in a single 7z folder. + * + *This limit is defined by the original 7-Zip implementation + * ({@code CPP/7zip/Archive/7z/7zIn.cpp}) to guard against malformed archives:
+ * + *+ * #define k_Scan_NumCoders_MAX 64 + *+ */ + private static final int MAX_CODERS_PER_FOLDER = 64; + + /** + * Maximum total number of coder input/output streams permitted in a single folder. + * + *This limit is also taken from the reference implementation + * ({@code CPP/7zip/Archive/7z/7zIn.cpp}):
+ * + *+ * #define k_Scan_NumCodersStreams_in_Folder_MAX 64 + *+ */ + private static final int MAX_CODER_STREAMS_PER_FOLDER = 64; + /** * Creates a new Builder. * @@ -1438,12 +1462,15 @@ private void readFilesInfo(final ByteBuffer header, final Archive archive) throw calculateStreamMap(archive); } - private Folder readFolder(final ByteBuffer header) throws IOException { + Folder readFolder(final ByteBuffer header) throws IOException { final Folder folder = new Folder(); - final int numCoders = readUint64ToIntExact(header); - final Coder[] coders = new Coder[checkObjectArray(numCoders)]; - long totalInStreams = 0; - long totalOutStreams = 0; + final long numCoders = readUint64(header); + if (numCoders == 0 || numCoders > MAX_CODERS_PER_FOLDER) { + throw new ArchiveException("Unsupported 7z archive: " + numCoders + " coders in folder."); + } + final Coder[] coders = new Coder[(int) numCoders]; + int totalInStreams = 0; + int totalOutStreams = 0; for (int i = 0; i < coders.length; i++) { final int bits = getUnsignedByte(header); final int idSize = bits & 0xf; @@ -1459,10 +1486,19 @@ private Folder readFolder(final ByteBuffer header) throws IOException { numOutStreams = 1; } else { numInStreams = readUint64(header); + if (numInStreams > MAX_CODER_STREAMS_PER_FOLDER) { + throw new ArchiveException("Unsupported 7z archive: %,d coder input streams in folder.", numInStreams); + } numOutStreams = readUint64(header); + if (numOutStreams != 1) { + throw new ArchiveException("Unsupported 7z archive: %,d coder output streams in folder.", numOutStreams); + } + } + totalInStreams += (int) numInStreams; + if (totalInStreams > MAX_CODER_STREAMS_PER_FOLDER) { + throw new ArchiveException("Unsupported 7z archive: %,d coder input streams in folder.", totalInStreams); } - totalInStreams = ArchiveException.addExact(totalInStreams, numInStreams); - totalOutStreams = ArchiveException.addExact(totalOutStreams, numOutStreams); + totalOutStreams += (int) numOutStreams; byte[] properties = null; if (hasAttributes) { final long propertiesSize = readUint64(header); @@ -1471,22 +1507,30 @@ private Folder readFolder(final ByteBuffer header) throws IOException { } // would need to keep looping as above: if (moreAlternativeMethods) { - throw new ArchiveException("Alternative methods are unsupported, please report. The reference implementation doesn't support them either."); + throw new ArchiveException("Unsupported 7z archive: alternative methods are unsupported, please report. " + + "The reference implementation doesn't support them either."); } coders[i] = new Coder(decompressionMethodId, numInStreams, numOutStreams, properties); } folder.coders = coders; folder.totalInputStreams = totalInStreams; folder.totalOutputStreams = totalOutStreams; - final long numBindPairs = totalOutStreams - 1; - final BindPair[] bindPairs = new BindPair[checkObjectArray(ArchiveException.toIntExact(numBindPairs))]; + final int numBindPairs = totalOutStreams - 1; + final BindPair[] bindPairs = new BindPair[numBindPairs]; for (int i = 0; i < bindPairs.length; i++) { - bindPairs[i] = new BindPair(readUint64(header), readUint64(header)); + final long inIndex = readUint64(header); + if (inIndex >= totalInStreams) { + throw new ArchiveException("Unsupported 7z archive: bind pair inIndex %d out of range.", inIndex); + } + final long outIndex = readUint64(header); + if (outIndex >= totalOutStreams) { + throw new ArchiveException("Unsupported 7z archive: bind pair outIndex %d out of range.", inIndex); + } + bindPairs[i] = new BindPair(inIndex, outIndex); } folder.bindPairs = bindPairs; - final long numPackedStreams = totalInStreams - numBindPairs; - final int numPackedStreamsInt = ArchiveException.toIntExact(numPackedStreams); - final long[] packedStreams = new long[checkObjectArray(numPackedStreamsInt)]; + final int numPackedStreams = totalInStreams - numBindPairs; + final long[] packedStreams = new long[numPackedStreams]; if (numPackedStreams == 1) { long i; for (i = 0; i < totalInStreams; i++) { @@ -1496,8 +1540,11 @@ private Folder readFolder(final ByteBuffer header) throws IOException { } packedStreams[0] = i; } else { - for (int i = 0; i < numPackedStreamsInt; i++) { + for (int i = 0; i < numPackedStreams; i++) { packedStreams[i] = readUint64(header); + if (packedStreams[i] >= totalInStreams) { + throw new ArchiveException("Unsupported 7z archive: packed stream index %d out of range.", packedStreams[i]); + } } } folder.packedStreams = packedStreams; diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java index ec3878429a3..772a2101f90 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java @@ -33,6 +33,8 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.attribute.FileTime; @@ -47,7 +49,9 @@ import java.util.List; import java.util.Map; import java.util.Random; +import java.util.function.Consumer; import java.util.function.Function; +import java.util.stream.Stream; import javax.crypto.Cipher; @@ -61,6 +65,8 @@ import org.apache.commons.io.input.ChecksumInputStream; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; class SevenZFileTest extends AbstractArchiveFileTest{ private static final String TEST2_CONTENT = "\r\n\r\n \r\n\t \n"; @@ -69,6 +75,114 @@ private static boolean isStrongCryptoAvailable() throws NoSuchAlgorithmException return Cipher.getMaxAllowedKeyLength("AES/ECB/PKCS5Padding") >= 256; } + static Stream\r\n > testReadFolder_Unsupported() { + return Stream.of( + // Folder with no coders + buf -> writeFolder(buf, new Coder[0]), + // Folder with too many coders + buf -> { + final Coder[] coders = new Coder[65]; + final Coder simpleCoder = new Coder(new byte[] { 0x03 }, 1, 1, null); + Arrays.fill(coders, simpleCoder); + writeFolder(buf, coders); + }, + // Folder with too many input streams per coder + buf -> { + final Coder coder = new Coder(new byte[] { 0x03 }, 65, 1, null); + writeFolder(buf, new Coder[] { coder }); + }, + // Folder with more than one output stream per coder + buf -> { + final Coder coder = new Coder(new byte[] { 0x03 }, 1, 2, null); + writeFolder(buf, new Coder[] { coder }); + }, + // Folder with too many total input streams + buf -> { + final Coder coder = new Coder(new byte[] { 0x03 }, 2, 1, null); + final Coder[] coders = new Coder[33]; + Arrays.fill(coders, coder); + writeFolder(buf, coders); + }, + // Folder with more alternative methods (not supported yet) + buf -> writeFolder(buf, new Coder[]{new Coder(new byte[]{0x03}, 1, 1, null)}, + true, false, false, false), + // Folder with unsupported bind pair in index + buf -> { + final Coder coder = new Coder(new byte[] { 0x03 }, 1, 1, null); + writeFolder(buf, new Coder[] { coder, coder }, false, true, false, false); + }, + // Folder with unsupported bind pair out index + buf -> { + final Coder coder = new Coder(new byte[] { 0x03 }, 1, 1, null); + writeFolder(buf, new Coder[] { coder, coder }, false, false, true, false); + }, + // Folder with unsupported packed stream index + buf -> { + final Coder coder = new Coder(new byte[]{0x03}, 2, 1, null); + writeFolder(buf, new Coder[]{ coder, coder }, false, false, false, true); + } + ); + } + + private static void writeBindPair(final ByteBuffer buffer, final long inIndex, final long outIndex) { + writeUint64(buffer, inIndex); + writeUint64(buffer, outIndex); + } + + private static void writeCoder(final ByteBuffer buffer, final byte[] methodId, final long numInStreams, final long numOutStreams, + final boolean moreAlternativeMethods) { + final boolean isComplex = numInStreams != 1 || numOutStreams != 1; + int flag = methodId.length; + if (isComplex) { + flag |= 0x10; + } + if (moreAlternativeMethods) { + flag |= 0x80; + } + // coder + buffer.put((byte) flag); + buffer.put(methodId); + if (isComplex) { + writeUint64(buffer, numInStreams); + writeUint64(buffer, numOutStreams); + } + } + + private static void writeFolder(final ByteBuffer buffer, final Coder[] coders) { + writeFolder(buffer, coders, false, false, false, false); + } + + private static void writeFolder(final ByteBuffer buffer, final Coder[] coders, final boolean moreAlternativeMethods, final boolean unsupportedBindPairIn, + final boolean unsupportedBindPairOut, final boolean unsupportedPackedStreams) { + writeUint64(buffer, coders.length); + long totalInStreams = 0; + long totalOutStreams = 0; + for (final Coder coder : coders) { + writeCoder(buffer, coder.decompressionMethodId, coder.numInStreams, coder.numOutStreams, moreAlternativeMethods); + totalInStreams += coder.numInStreams; + totalOutStreams += coder.numOutStreams; + } + long i = 0; + // Bind pairs: one less than number of total out streams + for (; i < totalOutStreams - 1; i++) { + final long inIndex = (unsupportedBindPairIn ? totalInStreams : 0) + i; + final long outIndex = (unsupportedBindPairOut ? totalOutStreams : 0) + i + 1; + writeBindPair(buffer, inIndex, outIndex); + } + // Packed streams: one per in stream that is not bound + if (totalInStreams > i + 1) { + for (; i < totalInStreams; i++) { + final long packedStreamIndex = (unsupportedPackedStreams ? totalInStreams : 0) + i; + writeUint64(buffer, packedStreamIndex); + } + } + } + + private static void writeUint64(final ByteBuffer buffer, final long value) { + buffer.put((byte) 0b1111_1111); + buffer.putLong(value); + } + private void assertDate(final SevenZArchiveEntry entry, final String value, final Function hasValue, final Function timeFunction, final Function dateFunction) { if (value != null) { @@ -838,6 +952,21 @@ void testReadEntriesOfSize0() throws IOException { } } + @ParameterizedTest + @MethodSource + void testReadFolder_Unsupported(final Consumer folderWriter) throws IOException { + try (SevenZFile file = SevenZFile.builder().setURI(getURI("bla.7z")).get()) { + // Allocate a buffer large enough to hold the folder data + final ByteBuffer buffer = ByteBuffer.allocate(8192).order(ByteOrder.LITTLE_ENDIAN); + folderWriter.accept(buffer); + buffer.flip(); + final ArchiveException e = assertThrows(ArchiveException.class, () -> { + file.readFolder(buffer); + }); + assertTrue(e.getMessage().contains("Unsupported 7z archive")); + } + } + /** * Test case for COMPRESS-681. */ diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFolderTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFolderTest.java index 72139550518..ed2755386b3 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFolderTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFolderTest.java @@ -53,12 +53,12 @@ void testGetUnpackSizeForCoderOne() { @Test void testGetUnpackSizeOne() throws ArchiveException { final Folder folder = new Folder(); - folder.totalOutputStreams = 266L; + folder.totalOutputStreams = 266; final BindPair[] bindPairArray = new BindPair[1]; final BindPair bindPair = new BindPair(0, 0); bindPairArray[0] = bindPair; folder.bindPairs = bindPairArray; - folder.totalOutputStreams = 1L; + folder.totalOutputStreams = 1; assertEquals(0L, folder.getUnpackSize()); } From ad8b0a000f95a02f8be91a23ebb349a5c1d82de4 Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Fri, 17 Oct 2025 13:51:23 +0200 Subject: [PATCH 25/40] Improve sparse file handling performance (#715) * Improve sparse file handling performance Previously, sparse files were processed recursively. On highly fragmented files, this led to deep recursion and significant inefficiency. This change replaces the recursive approach with an iterative strategy, which scales better for files with many fragments. It also introduces generated tests that simulate sparse files with very high fragmentation to ensure correctness and performance under stress. * fix: remove unused method * fix: simplify input streams * fix: error message * Fix failing tests * Sort members --- .../archivers/tar/TarArchiveInputStream.java | 210 +++------- .../compress/archivers/tar/TarFile.java | 131 +++--- .../utils/BoundedArchiveInputStream.java | 25 +- .../archivers/MaxNameEntryLengthTest.java | 32 +- .../archivers/TestArchiveGenerator.java | 390 ++++++++++++++++++ .../archivers/tar/SparseFilesTest.java | 61 +++ .../compress/archivers/tar/TarFileTest.java | 5 +- 7 files changed, 587 insertions(+), 267 deletions(-) create mode 100644 src/test/java/org/apache/commons/compress/archivers/TestArchiveGenerator.java diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java index 2bd6ade2b5e..57cd596ca7a 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java @@ -26,11 +26,12 @@ import java.io.ByteArrayOutputStream; import java.io.EOFException; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.io.SequenceInputStream; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -150,21 +151,15 @@ public static boolean matches(final byte[] signature, final int length) { /** True if stream is at EOF. */ private boolean atEof; - /** Size of the current. */ - private long entrySize; - /** How far into the entry the stream is at. */ private long entryOffset; - /** Input streams for reading sparse entries. **/ - private List sparseInputStreams; - - /** The index of current input stream being read when reading sparse entries. */ - private int currentSparseInputStreamIndex; - /** The meta-data about the current entry. */ private TarArchiveEntry currEntry; + /** The current input stream. */ + private InputStream currentInputStream; + /** The encoding of the file. */ private final ZipEncoding zipEncoding; @@ -322,6 +317,17 @@ public TarArchiveInputStream(final InputStream inputStream, final String encodin this(builder().setInputStream(inputStream).setCharset(encoding)); } + private void afterRead(final int read) throws IOException { + // Count the bytes read + count(read); + // Check for truncated entries + if (read == -1 && entryOffset < currEntry.getSize()) { + throw new EOFException(String.format("Truncated TAR archive: entry '%s' expected %,d bytes, actual %,d", currEntry.getName(), currEntry.getSize(), + entryOffset)); + } + entryOffset += Math.max(0, read); + } + /** * Gets the available data that can be read from the current entry in the archive. This does not indicate how much data is left in the entire archive, only * in the current entry. This value is determined from the entry's size header field and the amount of data already read from the current entry. @@ -350,8 +356,7 @@ public int available() throws IOException { * sparseInputStreams = new ArrayList<>(); final List sparseHeaders = currEntry.getOrderedSparseHeaders(); // Stream doesn't need to be closed at all as it doesn't use any resources final InputStream zeroInputStream = new TarArchiveSparseZeroInputStream(); // NOSONAR @@ -377,15 +382,15 @@ private void buildSparseInputStreams() throws IOException { // @formatter:off sparseInputStreams.add(BoundedInputStream.builder() .setInputStream(in) + .setAfterRead(this::afterRead) .setMaxCount(sparseHeader.getNumbytes()) + .setPropagateClose(false) .get()); // @formatter:on } offset = sparseHeader.getOffset() + sparseHeader.getNumbytes(); } - if (!sparseInputStreams.isEmpty()) { - currentSparseInputStreamIndex = 0; - } + currentInputStream = new SequenceInputStream(Collections.enumeration(sparseInputStreams)); } /** @@ -406,10 +411,9 @@ public boolean canReadEntryData(final ArchiveEntry archiveEntry) { @Override public void close() throws IOException { // Close all the input streams in sparseInputStreams - if (sparseInputStreams != null) { - for (final InputStream inputStream : sparseInputStreams) { - inputStream.close(); - } + if (currentInputStream != null) { + currentInputStream.close(); + currentInputStream = null; } in.close(); } @@ -425,26 +429,6 @@ private void consumeRemainderOfLastBlock() throws IOException { } } - /** - * For FileInputStream, the skip always return the number you input, so we need the available bytes to determine how many bytes are actually skipped - * - * @param available available bytes returned by {@link InputStream#available()}. - * @param skipped skipped bytes returned by {@link InputStream#skip()}. - * @param expected bytes expected to skip. - * @return number of bytes actually skipped. - * @throws IOException if a truncated tar archive is detected. - */ - private long getActuallySkipped(final long available, final long skipped, final long expected) throws IOException { - long actuallySkipped = skipped; - if (in instanceof FileInputStream) { - actuallySkipped = Math.min(skipped, available); - } - if (actuallySkipped != expected) { - throw new ArchiveException("Truncated TAR archive"); - } - return actuallySkipped; - } - /** * Gets the current TAR Archive Entry that this input stream is processing * @@ -509,8 +493,8 @@ public TarArchiveEntry getNextEntry() throws IOException { boolean lastWasSpecial = false; do { // If there is a current entry, skip any unread data and padding - if (currEntry != null) { - IOUtils.skip(this, Long.MAX_VALUE); // Skip to end of current entry + if (currentInputStream != null) { + IOUtils.skip(currentInputStream, Long.MAX_VALUE); // Skip to end of current entry skipRecordPadding(); // Skip padding to align to the next record } // Read the next header record @@ -525,13 +509,19 @@ public TarArchiveEntry getNextEntry() throws IOException { } // Parse the header into a new entry currEntry = new TarArchiveEntry(globalPaxHeaders, headerBuf, zipEncoding, lenient); + // Set up the input stream for the new entry + currentInputStream = BoundedInputStream.builder() + .setInputStream(in) + .setAfterRead(this::afterRead) + .setMaxCount(currEntry.getSize()) + .setPropagateClose(false) + .get(); entryOffset = 0; - entrySize = currEntry.getSize(); lastWasSpecial = TarUtils.isSpecialTarRecord(currEntry); if (lastWasSpecial) { // Handle PAX, GNU long name, or other special records - TarUtils.handleSpecialTarRecord(this, zipEncoding, getMaxEntryNameLength(), currEntry, paxHeaders, sparseHeaders, globalPaxHeaders, - globalSparseHeaders); + TarUtils.handleSpecialTarRecord(currentInputStream, zipEncoding, getMaxEntryNameLength(), currEntry, paxHeaders, sparseHeaders, + globalPaxHeaders, globalSparseHeaders); } } while (lastWasSpecial); // Apply global and local PAX headers @@ -539,9 +529,12 @@ public TarArchiveEntry getNextEntry() throws IOException { // Handle sparse files if (currEntry.isSparse()) { if (currEntry.isOldGNUSparse()) { + // Old GNU sparse format uses extra header blocks for metadata. + // These blocks are not included in the entry’s size, so we cannot + // rely on BoundedInputStream here. readOldGNUSparse(); } else if (currEntry.isPaxGNU1XSparse()) { - currEntry.setSparseHeaders(TarUtils.parsePAX1XSparseHeaders(in, getRecordSize())); + currEntry.setSparseHeaders(TarUtils.parsePAX1XSparseHeaders(currentInputStream, getRecordSize())); } // sparse headers are all done reading, we need to build // sparse input streams using these sparse headers @@ -551,8 +544,6 @@ public TarArchiveEntry getNextEntry() throws IOException { if (currEntry.isDirectory() && !currEntry.getName().endsWith("/")) { currEntry.setName(currEntry.getName() + "/"); } - // Update entry size in case it changed due to PAX headers - entrySize = currEntry.getSize(); return currEntry; } @@ -664,33 +655,13 @@ public int read(final byte[] buf, final int offset, int numToRead) throws IOExce if (numToRead == 0) { return 0; } - int totalRead = 0; if (isAtEOF() || isDirectory()) { return -1; } - if (currEntry == null) { + if (currEntry == null || currentInputStream == null) { throw new IllegalStateException("No current tar entry"); } - if (entryOffset >= currEntry.getRealSize()) { - return -1; - } - numToRead = Math.min(numToRead, available()); - if (currEntry.isSparse()) { - // for sparse entries, we need to read them in another way - totalRead = readSparse(buf, offset, numToRead); - } else { - totalRead = in.read(buf, offset, numToRead); - } - if (totalRead == -1) { - if (numToRead > 0) { - throw new EOFException("Truncated TAR archive"); - } - setAtEOF(true); - } else { - count(totalRead); - entryOffset += totalRead; - } - return totalRead; + return currentInputStream.read(buf, offset, numToRead); } /** @@ -710,9 +681,6 @@ private void readOldGNUSparse() throws IOException { currEntry.getSparseHeaders().addAll(entry.getSparseHeaders()); } while (entry.isExtended()); } - // sparse headers are all done reading, we need to build - // sparse input streams using these sparse headers - buildSparseInputStreams(); } /** @@ -730,52 +698,6 @@ protected byte[] readRecord() throws IOException { return recordBuffer; } - /** - * For sparse tar entries, there are many "holes"(consisting of all 0) in the file. Only the non-zero data is stored in tar files, and they are stored - * separately. The structure of non-zero data is introduced by the sparse headers using the offset, where a block of non-zero data starts, and numbytes, the - * length of the non-zero data block. When reading sparse entries, the actual data is read out with "holes" and non-zero data combined according to - * the sparse headers. - * - * @param buf The buffer into which to place bytes read. - * @param offset The offset at which to place bytes read. - * @param numToRead The number of bytes to read. - * @return The number of bytes read, or -1 at EOF. - * @throws IOException on error. - */ - private int readSparse(final byte[] buf, final int offset, final int numToRead) throws IOException { - // if there are no actual input streams, just read from the original input stream - if (sparseInputStreams == null || sparseInputStreams.isEmpty()) { - return in.read(buf, offset, numToRead); - } - if (currentSparseInputStreamIndex >= sparseInputStreams.size()) { - return -1; - } - final InputStream currentInputStream = sparseInputStreams.get(currentSparseInputStreamIndex); - final int readLen = currentInputStream.read(buf, offset, numToRead); - // if the current input stream is the last input stream, - // just return the number of bytes read from current input stream - if (currentSparseInputStreamIndex == sparseInputStreams.size() - 1) { - return readLen; - } - // if EOF of current input stream is meet, open a new input stream and recursively call read - if (readLen == -1) { - currentSparseInputStreamIndex++; - return readSparse(buf, offset, numToRead); - } - // if the rest data of current input stream is not long enough, open a new input stream - // and recursively call read - if (readLen < numToRead) { - currentSparseInputStreamIndex++; - final int readLenOfNext = readSparse(buf, offset + readLen, numToRead - readLen); - if (readLenOfNext == -1) { - return readLen; - } - return readLen + readLenOfNext; - } - // if the rest data of current input stream is enough(which means readLen == len), just return readLen - return readLen; - } - /** * Since we do not support marking just yet, we do nothing. */ @@ -816,21 +738,11 @@ public long skip(final long n) throws IOException { if (n <= 0 || isDirectory()) { return 0; } - final long availableOfInputStream = in.available(); - final long available = currEntry.getRealSize() - entryOffset; - final long numToSkip = Math.min(n, available); - long skipped; - if (!currEntry.isSparse()) { - skipped = IOUtils.skip(in, numToSkip); - // for non-sparse entry, we should get the bytes actually skipped bytes along with - // inputStream.available() if inputStream is instance of FileInputStream - skipped = getActuallySkipped(availableOfInputStream, skipped, numToSkip); - } else { - skipped = skipSparse(numToSkip); + if (currEntry == null || currentInputStream == null) { + throw new IllegalStateException("No current tar entry"); } - count(skipped); - entryOffset += skipped; - return skipped; + // Use Apache Commons IO to skip as it handles skipping fully + return org.apache.commons.io.IOUtils.skip(currentInputStream, n); } /** @@ -839,37 +751,15 @@ public long skip(final long n) throws IOException { * @throws IOException if a truncated tar archive is detected. */ private void skipRecordPadding() throws IOException { - if (!isDirectory() && this.entrySize > 0 && this.entrySize % getRecordSize() != 0) { - final long available = in.available(); - final long numRecords = this.entrySize / getRecordSize() + 1; - final long padding = numRecords * getRecordSize() - this.entrySize; - long skipped = IOUtils.skip(in, padding); - skipped = getActuallySkipped(available, skipped, padding); + final long entrySize = currEntry != null ? currEntry.getSize() : 0; + if (!isDirectory() && entrySize > 0 && entrySize % getRecordSize() != 0) { + final long padding = getRecordSize() - (entrySize % getRecordSize()); + final long skipped = org.apache.commons.io.IOUtils.skip(in, padding); count(skipped); - } - } - - /** - * Skip n bytes from current input stream, if the current input stream doesn't have enough data to skip, jump to the next input stream and skip the rest - * bytes, keep doing this until total n bytes are skipped or the input streams are all skipped - * - * @param n bytes of data to skip. - * @return actual bytes of data skipped. - * @throws IOException if an I/O error occurs. - */ - private long skipSparse(final long n) throws IOException { - if (sparseInputStreams == null || sparseInputStreams.isEmpty()) { - return in.skip(n); - } - long bytesSkipped = 0; - while (bytesSkipped < n && currentSparseInputStreamIndex < sparseInputStreams.size()) { - final InputStream currentInputStream = sparseInputStreams.get(currentSparseInputStreamIndex); - bytesSkipped += currentInputStream.skip(n - bytesSkipped); - if (bytesSkipped < n) { - currentSparseInputStreamIndex++; + if (skipped != padding) { + throw new EOFException(String.format("Truncated TAR archive: failed to skip record padding for entry '%s'", currEntry.getName())); } } - return bytesSkipped; } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java index e699b1d4f45..d9cd604a610 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java @@ -22,15 +22,18 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.io.SequenceInputStream; import java.nio.ByteBuffer; import java.nio.channels.SeekableByteChannel; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import org.apache.commons.compress.archivers.ArchiveException; import org.apache.commons.compress.archivers.ArchiveFile; @@ -39,7 +42,6 @@ import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.utils.ArchiveUtils; import org.apache.commons.compress.utils.BoundedArchiveInputStream; -import org.apache.commons.compress.utils.BoundedSeekableByteChannelInputStream; import org.apache.commons.io.function.IOStream; import org.apache.commons.io.input.BoundedInputStream; @@ -50,91 +52,45 @@ */ public class TarFile implements ArchiveFile { + /** + * InputStream that reads a specific entry from the archive. + * + * It ensures that:
+ *+ *
+ */ private final class BoundedTarEntryInputStream extends BoundedArchiveInputStream { private final SeekableByteChannel channel; - private final TarArchiveEntry entry; - - private long entryOffset; + private final long end; - private int currentSparseInputStreamIndex; - - BoundedTarEntryInputStream(final TarArchiveEntry entry, final SeekableByteChannel channel) throws IOException { - super(entry.getDataOffset(), entry.getRealSize()); - if (channel.size() - entry.getSize() < entry.getDataOffset()) { - throw new EOFException("Truncated TAR archive: entry size exceeds archive size"); - } - this.entry = entry; + BoundedTarEntryInputStream(final long start, final long remaining, final SeekableByteChannel channel) { + super(start, remaining); + this.end = start + remaining; this.channel = channel; } @Override protected int read(final long pos, final ByteBuffer buf) throws IOException { - if (entryOffset >= entry.getRealSize()) { - return -1; - } - final int totalRead; - if (entry.isSparse()) { - totalRead = readSparse(entryOffset, buf, buf.limit()); - } else { - totalRead = readArchive(pos, buf); - } + Objects.requireNonNull(buf, "ByteBuffer"); + // The caller ensures that [pos, pos + buf.remaining()] is within [start, end] + channel.position(pos); + final int totalRead = channel.read(buf); if (totalRead == -1) { - if (buf.array().length > 0) { - throw new EOFException("Truncated TAR archive"); + if (buf.remaining() > 0) { + throw new EOFException(String.format("Truncated TAR archive: expected at least %d bytes, but got only %d bytes", + end, channel.position())); } + // Marks the TarFile as having reached EOF. setAtEOF(true); } else { - entryOffset += totalRead; buf.flip(); } return totalRead; } - - private int readArchive(final long pos, final ByteBuffer buf) throws IOException { - channel.position(pos); - return channel.read(buf); - } - - private int readSparse(final long pos, final ByteBuffer buf, final int numToRead) throws IOException { - // if there are no actual input streams, just read from the original archive - final List- No more than the specified number of bytes are read from the underlying channel.
+ *- If the end of the entry is reached before the expected number of bytes, an {@link EOFException} is thrown.
+ *entrySparseInputStreams = sparseInputStreams.get(entry.getName()); - if (entrySparseInputStreams == null || entrySparseInputStreams.isEmpty()) { - return readArchive(entry.getDataOffset() + pos, buf); - } - if (currentSparseInputStreamIndex >= entrySparseInputStreams.size()) { - return -1; - } - final InputStream currentInputStream = entrySparseInputStreams.get(currentSparseInputStreamIndex); - final byte[] bufArray = new byte[numToRead]; - final int readLen = currentInputStream.read(bufArray); - if (readLen != -1) { - buf.put(bufArray, 0, readLen); - } - // if the current input stream is the last input stream, - // just return the number of bytes read from current input stream - if (currentSparseInputStreamIndex == entrySparseInputStreams.size() - 1) { - return readLen; - } - // if EOF of current input stream is meet, open a new input stream and recursively call read - if (readLen == -1) { - currentSparseInputStreamIndex++; - return readSparse(pos, buf, numToRead); - } - // if the rest data of current input stream is not long enough, open a new input stream - // and recursively call read - if (readLen < numToRead) { - currentSparseInputStreamIndex++; - final int readLenOfNext = readSparse(pos + readLen, buf, numToRead - readLen); - if (readLenOfNext == -1) { - return readLen; - } - return readLen + readLenOfNext; - } - // if the rest data of current input stream is enough(which means readLen == len), just return readLen - return readLen; - } } // @formatter:off @@ -423,7 +379,7 @@ private void buildSparseInputStreams() throws IOException { // possible integer overflow throw new ArchiveException("Unreadable TAR archive, sparse block offset or length too big"); } - streams.add(new BoundedSeekableByteChannelInputStream(start, sparseHeader.getNumbytes(), archive)); + streams.add(new BoundedTarEntryInputStream(start, sparseHeader.getNumbytes(), archive)); } offset = sparseHeader.getOffset() + sparseHeader.getNumbytes(); } @@ -467,7 +423,13 @@ public List getEntries() { @Override public InputStream getInputStream(final TarArchiveEntry entry) throws IOException { try { - return new BoundedTarEntryInputStream(entry, archive); + // Sparse entries are composed of multiple fragments: wrap them in a SequenceInputStream + if (entry.isSparse()) { + final List streams = sparseInputStreams.get(entry.getName()); + return new SequenceInputStream(streams != null ? Collections.enumeration(streams) : Collections.emptyEnumeration()); + } + // Regular entries are bounded: wrap in BoundedTarEntryInputStream to enforce size and detect premature EOF + return new BoundedTarEntryInputStream(entry.getDataOffset(), entry.getSize(), archive); } catch (final RuntimeException e) { throw new ArchiveException("Corrupted TAR archive. Can't read entry", (Throwable) e); } @@ -489,6 +451,7 @@ private TarArchiveEntry getNextTarEntry() throws IOException { final List sparseHeaders = new ArrayList<>(); // Handle special tar records boolean lastWasSpecial = false; + InputStream currentStream; do { // If there is a current entry, skip any unread data and padding if (currEntry != null) { @@ -509,22 +472,33 @@ private TarArchiveEntry getNextTarEntry() throws IOException { // Parse the header into a new entry final long position = archive.position(); currEntry = new TarArchiveEntry(globalPaxHeaders, headerBuf.array(), zipEncoding, lenient, position); + currentStream = new BoundedTarEntryInputStream(currEntry.getDataOffset(), currEntry.getSize(), archive); lastWasSpecial = TarUtils.isSpecialTarRecord(currEntry); if (lastWasSpecial) { // Handle PAX, GNU long name, or other special records - TarUtils.handleSpecialTarRecord(getInputStream(currEntry), zipEncoding, maxEntryNameLength, currEntry, paxHeaders, sparseHeaders, - globalPaxHeaders, globalSparseHeaders); + TarUtils.handleSpecialTarRecord(currentStream, zipEncoding, maxEntryNameLength, currEntry, paxHeaders, sparseHeaders, globalPaxHeaders, + globalSparseHeaders); } } while (lastWasSpecial); // Apply global and local PAX headers TarUtils.applyPaxHeadersToEntry(currEntry, paxHeaders, sparseHeaders, globalPaxHeaders, globalSparseHeaders); // Handle sparse files if (currEntry.isSparse()) { + // These sparse formats have the sparse headers in the entry if (currEntry.isOldGNUSparse()) { + // Old GNU sparse format uses extra header blocks for metadata. + // These blocks are not included in the entry’s size, so we cannot + // rely on BoundedTarEntryInputStream here. readOldGNUSparse(); + // Reposition to the start of the entry data to correctly compute the sparse streams + currEntry.setDataOffset(archive.position()); } else if (currEntry.isPaxGNU1XSparse()) { - currEntry.setSparseHeaders(TarUtils.parsePAX1XSparseHeaders(getInputStream(currEntry), recordSize)); - currEntry.setDataOffset(currEntry.getDataOffset() + recordSize); + final long position = archive.position(); + currEntry.setSparseHeaders(TarUtils.parsePAX1XSparseHeaders(currentStream, recordSize)); + // Adjust the current entry to point to the start of the sparse file data + final long sparseHeadersSize = archive.position() - position; + currEntry.setSize(currEntry.getSize() - sparseHeadersSize); + currEntry.setDataOffset(currEntry.getDataOffset() + sparseHeadersSize); } // sparse headers are all done reading, we need to build // sparse input streams using these sparse headers @@ -593,12 +567,8 @@ private void readOldGNUSparse() throws IOException { } entry = new TarArchiveSparseEntry(headerBuf.array()); currEntry.getSparseHeaders().addAll(entry.getSparseHeaders()); - currEntry.setDataOffset(currEntry.getDataOffset() + recordSize); } while (entry.isExtended()); } - // sparse headers are all done reading, we need to build - // sparse input streams using these sparse headers - buildSparseInputStreams(); } /** @@ -644,8 +614,7 @@ protected final void setAtEOF(final boolean eof) { */ private void skipRecordPadding() throws IOException { if (!isDirectory() && currEntry.getSize() > 0 && currEntry.getSize() % recordSize != 0) { - final long numRecords = currEntry.getSize() / recordSize + 1; - final long padding = numRecords * recordSize - currEntry.getSize(); + final long padding = recordSize - (currEntry.getSize() % recordSize); repositionForwardBy(padding); throwExceptionIfPositionIsNotInArchive(); } @@ -668,7 +637,7 @@ public IOStream extends TarArchiveEntry> stream() { */ private void throwExceptionIfPositionIsNotInArchive() throws IOException { if (archive.size() < archive.position()) { - throw new ArchiveException("Truncated TAR archive"); + throw new EOFException("Truncated TAR archive: archive should be at least " + archive.position() + " bytes but was " + archive.size() + " bytes"); } } diff --git a/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java b/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java index c66b24a1cdf..d334af0f377 100644 --- a/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java @@ -78,15 +78,9 @@ public synchronized int read(final byte[] b, final int off, final int len) throw if (loc >= end) { return -1; } - final long maxLen = Math.min(len, end - loc); - if (maxLen <= 0) { - return 0; - } - if (off < 0 || off > b.length || maxLen > b.length - off) { - throw new IndexOutOfBoundsException("offset or len are out of bounds"); - } - - final ByteBuffer buf = ByteBuffer.wrap(b, off, (int) maxLen); + // Both len and end - loc are guaranteed to be > 0 here and at least len is <= Integer.MAX_VALUE. + final int maxLen = (int) Math.min(len, end - loc); + final ByteBuffer buf = ByteBuffer.wrap(b, off, maxLen); final int ret = read(loc, buf); if (ret > 0) { loc += ret; @@ -95,12 +89,15 @@ public synchronized int read(final byte[] b, final int off, final int len) throw } /** - * Reads content of the stream into a {@link ByteBuffer}. + * Reads bytes from this stream into the given {@link ByteBuffer}, starting at the specified position. + * + * The caller is responsible for ensuring that the requested range + * {@code [pos, pos + buf.remaining())} lies within the valid bounds of the stream.
* - * @param pos position to start the read. - * @param buf buffer to add the read content. - * @return number of read bytes. - * @throws IOException if I/O fails. + * @param pos the position within the stream at which to begin reading + * @param buf the buffer into which bytes are read; bytes are written starting at the buffer’s current position + * @return the number of bytes read into the buffer + * @throws IOException if an I/O error occurs while reading */ protected abstract int read(long pos, ByteBuffer buf) throws IOException; } diff --git a/src/test/java/org/apache/commons/compress/archivers/MaxNameEntryLengthTest.java b/src/test/java/org/apache/commons/compress/archivers/MaxNameEntryLengthTest.java index 16ef55bc726..10fef8f1488 100644 --- a/src/test/java/org/apache/commons/compress/archivers/MaxNameEntryLengthTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/MaxNameEntryLengthTest.java @@ -46,6 +46,7 @@ import org.apache.commons.io.function.IOStream; import org.apache.commons.lang3.StringUtils; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.function.Executable; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -111,12 +112,15 @@ static StreamtestTruncatedStreams() throws IOException { static Stream testTruncatedTarFiles() throws IOException { return Stream.of( - Arguments.of(TarFile.builder() - .setMaxEntryNameLength(Integer.MAX_VALUE) - .setURI(getURI("synthetic/long-name/pax-fail.tar"))), - Arguments.of(TarFile.builder() - .setMaxEntryNameLength(Integer.MAX_VALUE) - .setURI(getURI("synthetic/long-name/gnu-fail.tar")))); + Arguments.of( + TarFile.builder().setMaxEntryNameLength(Integer.MAX_VALUE).setURI(getURI("synthetic/long-name/pax-fail.tar")), + Integer.MAX_VALUE + ), + Arguments.of( + TarFile.builder().setMaxEntryNameLength(Integer.MAX_VALUE).setURI(getURI("synthetic/long-name/gnu-fail.tar")), + SOFT_MAX_ARRAY_LENGTH + ) + ); } static Stream testValidStreams() throws IOException { @@ -175,10 +179,18 @@ void testTruncatedStreams(final ArchiveInputStream> archiveInputStream, final @ParameterizedTest @MethodSource - void testTruncatedTarFiles(final TarFile.Builder tarFileBuilder) { - // Since the real size of the archive is known, the truncation is detected - // much earlier and before trying to read file names. - assertThrows(EOFException.class, () -> tarFileBuilder.get().getEntries()); + void testTruncatedTarFiles(final TarFile.Builder tarFileBuilder, final long expectedLength) { + // If the file name length exceeds available memory, the stream fails fast with MemoryLimitException. + // Otherwise, it fails with EOFException when the stream ends unexpectedly. + final Executable action = () -> tarFileBuilder.get().entries(); + if (Runtime.getRuntime().totalMemory() < expectedLength) { + final MemoryLimitException exception = assertThrows(MemoryLimitException.class, action); + final String message = exception.getMessage(); + assertNotNull(message); + assertTrue(message.contains(String.format("%,d", expectedLength)), "Message mentions expected length (" + expectedLength + "): " + message); + } else { + assertThrows(EOFException.class, action); + } } @Test diff --git a/src/test/java/org/apache/commons/compress/archivers/TestArchiveGenerator.java b/src/test/java/org/apache/commons/compress/archivers/TestArchiveGenerator.java new file mode 100644 index 00000000000..49c06da5172 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/TestArchiveGenerator.java @@ -0,0 +1,390 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers; + +import static java.nio.charset.StandardCharsets.US_ASCII; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; + +public final class TestArchiveGenerator { + + private static final int FILE_MODE = 0100644; + private static final int GROUP_ID = 0; + private static final String GROUP_NAME = "group"; + // TAR + private static final String OLD_GNU_MAGIC = "ustar "; + private static final int OWNER_ID = 0; + private static final String OWNER_NAME = "owner"; + private static final String PAX_MAGIC = "ustar\u000000"; + private static final int TIMESTAMP = 0; + + private static byte[] createData(final int size) { + final byte[] data = new byte[size]; + for (int i = 0; i < size; i++) { + data[i] = (byte) (i % 256); + } + return data; + } + + // Very fragmented sparse file + private static List > createFragmentedSparseEntries(final int realSize) { + final List > sparseEntries = new ArrayList<>(); + for (int offset = 0; offset < realSize; offset++) { + sparseEntries.add(Pair.of(offset, 1)); + } + return sparseEntries; + } + + private static byte[] createGnuSparse00PaxData( + final Collection extends Pair > sparseEntries, final int realSize) { + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (PrintWriter writer = new PrintWriter(new OutputStreamWriter(baos, US_ASCII))) { + writePaxKeyValue("GNU.sparse.size", realSize, writer); + writePaxKeyValue("GNU.sparse.numblocks", sparseEntries.size(), writer); + for (final Pair entry : sparseEntries) { + writePaxKeyValue("GNU.sparse.offset", entry.getLeft(), writer); + writePaxKeyValue("GNU.sparse.numbytes", entry.getRight(), writer); + } + } + return baos.toByteArray(); + } + + private static byte[] createGnuSparse01PaxData( + final Collection extends Pair > sparseEntries, final int realSize) { + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (PrintWriter writer = new PrintWriter(new OutputStreamWriter(baos, US_ASCII))) { + writePaxKeyValue("GNU.sparse.size", realSize, writer); + writePaxKeyValue("GNU.sparse.numblocks", sparseEntries.size(), writer); + final String map = sparseEntries.stream() + .map(e -> e.getLeft() + "," + e.getRight()) + .collect(Collectors.joining(",")); + writePaxKeyValue("GNU.sparse.map", map, writer); + } + return baos.toByteArray(); + } + + private static byte[] createGnuSparse1EntriesData(final Collection extends Pair > sparseEntries) + throws IOException { + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (PrintWriter writer = new PrintWriter(new OutputStreamWriter(baos, US_ASCII))) { + writer.printf("%d\n", sparseEntries.size()); + for (final Pair entry : sparseEntries) { + writer.printf("%d\n", entry.getLeft()); + writer.printf("%d\n", entry.getRight()); + } + } + padTo512Bytes(baos.size(), baos); + return baos.toByteArray(); + } + + private static byte[] createGnuSparse1PaxData( + final Collection > sparseEntries, final int realSize) { + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try (PrintWriter writer = new PrintWriter(new OutputStreamWriter(baos, US_ASCII))) { + writePaxKeyValue("GNU.sparse.realsize", realSize, writer); + writePaxKeyValue("GNU.sparse.numblocks", sparseEntries.size(), writer); + writePaxKeyValue("GNU.sparse.major", 1, writer); + writePaxKeyValue("GNU.sparse.minor", 0, writer); + } + return baos.toByteArray(); + } + + public static void createSparseFileTestCases(final Path path) throws IOException { + if (!Files.isDirectory(path)) { + throw new IllegalArgumentException("Not a directory: " + path); + } + oldGnuSparse(path); + gnuSparse00(path); + gnuSparse01(path); + gnuSparse1X(path); + } + + private static void gnuSparse00(final Path path) throws IOException { + final Path file = path.resolve("gnu-sparse-00.tar"); + try (OutputStream out = Files.newOutputStream(file)) { + final byte[] data = createData(8 * 1024); + final List > sparseEntries = createFragmentedSparseEntries(data.length); + final byte[] paxData = createGnuSparse00PaxData(sparseEntries, data.length); + writeGnuSparse0File(data, paxData, out); + writeUstarTrailer(out); + } + } + + private static void gnuSparse01(final Path path) throws IOException { + final Path file = path.resolve("gnu-sparse-01.tar"); + try (OutputStream out = Files.newOutputStream(file)) { + final byte[] data = createData(8 * 1024); + final List > sparseEntries = createFragmentedSparseEntries(data.length); + final byte[] paxData = createGnuSparse01PaxData(sparseEntries, data.length); + writeGnuSparse0File(data, paxData, out); + writeUstarTrailer(out); + } + } + + private static void gnuSparse1X(final Path path) throws IOException { + final Path file = path.resolve("gnu-sparse-1.tar"); + try (OutputStream out = Files.newOutputStream(file)) { + final byte[] data = createData(8 * 1024); + final List > sparseEntries = createFragmentedSparseEntries(data.length); + writeGnuSparse1File(sparseEntries, data, out); + writeUstarTrailer(out); + } + } + + public static void main(final String[] args) throws IOException { + if (args.length != 1) { + System.err.println("Expected one argument: output directory"); + System.exit(1); + } + final Path path = Paths.get(args[0]); + if (!Files.isDirectory(path)) { + System.err.println("Not a directory: " + path); + System.exit(1); + } + // Sparse file examples + final Path sparsePath = path.resolve("sparse"); + Files.createDirectories(sparsePath); + createSparseFileTestCases(sparsePath); + } + + private static void oldGnuSparse(final Path path) throws IOException { + final Path file = path.resolve("old-gnu-sparse.tar"); + try (OutputStream out = Files.newOutputStream(file)) { + final byte[] data = createData(8 * 1024); + final List > sparseEntries = createFragmentedSparseEntries(data.length); + writeOldGnuSparseFile(sparseEntries, data, data.length, out); + writeUstarTrailer(out); + } + } + + private static int padTo512Bytes(final int offset, final OutputStream out) throws IOException { + int count = offset; + while (count % 512 != 0) { + out.write(0); + count++; + } + return count; + } + + private static void writeGnuSparse0File(final byte[] data, final byte[] paxData, final OutputStream out) + throws IOException { + // PAX entry + int offset = writeTarUstarHeader("./GNUSparseFile.1/" + "sparse-file.txt", paxData.length, PAX_MAGIC, 'x', out); + offset = padTo512Bytes(offset, out); + // PAX data + out.write(paxData); + offset += paxData.length; + offset = padTo512Bytes(offset, out); + // File entry + offset += writeTarUstarHeader("sparse-file.txt", data.length, PAX_MAGIC, '0', out); + offset = padTo512Bytes(offset, out); + // File data + out.write(data); + offset += data.length; + padTo512Bytes(offset, out); + } + + private static void writeGnuSparse1File( + final Collection > sparseEntries, final byte[] data, final OutputStream out) + throws IOException { + // PAX entry + final byte[] paxData = createGnuSparse1PaxData(sparseEntries, data.length); + int offset = writeTarUstarHeader("./GNUSparseFile.1/sparse-file.txt", paxData.length, PAX_MAGIC, 'x', out); + offset = padTo512Bytes(offset, out); + // PAX data + out.write(paxData); + offset += paxData.length; + offset = padTo512Bytes(offset, out); + // File entry + final byte[] sparseEntriesData = createGnuSparse1EntriesData(sparseEntries); + offset += writeTarUstarHeader("sparse-file.txt", sparseEntriesData.length + data.length, PAX_MAGIC, '0', out); + offset = padTo512Bytes(offset, out); + // File data + out.write(sparseEntriesData); + offset += sparseEntriesData.length; + out.write(data); + offset += data.length; + padTo512Bytes(offset, out); + } + + private static int writeOctalString(final long value, final int length, final OutputStream out) throws IOException { + int count = 0; + final String s = Long.toOctalString(value); + count += writeString(s, length - 1, out); + out.write('\0'); + return ++count; + } + + private static int writeOldGnuSparseEntries( + final Iterable > sparseEntries, final int limit, final OutputStream out) + throws IOException { + int offset = 0; + int count = 0; + final Iterator > it = sparseEntries.iterator(); + while (it.hasNext()) { + if (count >= limit) { + out.write(1); // more entries follow + return ++offset; + } + final Pair entry = it.next(); + it.remove(); + count++; + offset += writeOldGnuSparseEntry(entry.getLeft(), entry.getRight(), out); + } + while (count < limit) { + // pad with empty entries + offset += writeOldGnuSparseEntry(0, 0, out); + count++; + } + out.write(0); // no more entries + return ++offset; + } + + private static int writeOldGnuSparseEntry(final int offset, final int length, final OutputStream out) + throws IOException { + int count = 0; + count += writeOctalString(offset, 12, out); + count += writeOctalString(length, 12, out); + return count; + } + + private static int writeOldGnuSparseExtendedHeader( + final Iterable > sparseEntries, final OutputStream out) throws IOException { + int offset = 0; + offset += writeOldGnuSparseEntries(sparseEntries, 21, out); + offset = padTo512Bytes(offset, out); + return offset; + } + + private static void writeOldGnuSparseFile( + final Collection > sparseEntries, + final byte[] data, + final int realSize, + final OutputStream out) + throws IOException { + int offset = writeTarUstarHeader("sparse-file.txt", data.length, OLD_GNU_MAGIC, 'S', out); + while (offset < 386) { + out.write(0); + offset++; + } + // Sparse entries (24 bytes each) + offset += writeOldGnuSparseEntries(sparseEntries, 4, out); + // Real size (12 bytes) + offset += writeOctalString(realSize, 12, out); + offset = padTo512Bytes(offset, out); + // Write extended headers + while (!sparseEntries.isEmpty()) { + offset += writeOldGnuSparseExtendedHeader(sparseEntries, out); + } + // Write file data + out.write(data); + offset += data.length; + padTo512Bytes(offset, out); + } + + private static void writePaxKeyValue(final String key, final int value, final PrintWriter out) { + writePaxKeyValue(key, Integer.toString(value), out); + } + + private static void writePaxKeyValue(final String key, final String value, final PrintWriter out) { + final String entry = ' ' + key + "=" + value + "\n"; + // Guess length: length of length + space + entry + final int length = String.valueOf(entry.length()).length() + entry.length(); + // Recompute if number of digits changes + out.print(String.valueOf(length).length() + entry.length()); + out.print(entry); + } + + private static int writeString(final String s, final int length, final OutputStream out) throws IOException { + final byte[] bytes = s.getBytes(US_ASCII); + out.write(bytes); + for (int i = bytes.length; i < length; i++) { + out.write('\0'); + } + return length; + } + + private static int writeTarUstarHeader( + final String fileName, + final long fileSize, + final String magicAndVersion, + final char typeFlag, + final OutputStream out) + throws IOException { + int count = 0; + // File name (100 bytes) + count += writeString(fileName, 100, out); + // File mode (8 bytes) + count += writeOctalString(FILE_MODE, 8, out); + // Owner ID (8 bytes) + count += writeOctalString(OWNER_ID, 8, out); + // Group ID (8 bytes) + count += writeOctalString(GROUP_ID, 8, out); + // File size (12 bytes) + count += writeOctalString(fileSize, 12, out); + // Modification timestamp (12 bytes) + count += writeOctalString(TIMESTAMP, 12, out); + // Checksum (8 bytes), filled with spaces for now + count += writeString(StringUtils.repeat(' ', 7), 8, out); + // Link indicator (1 byte) + out.write(typeFlag); + count++; + // Name of linked file (100 bytes) + count += writeString("", 100, out); + // Magic (6 bytes) + Version (2 bytes) + count += writeString(magicAndVersion, 8, out); + // Owner user name (32 bytes) + count += writeString(OWNER_NAME, 32, out); + // Owner group name (32 bytes) + count += writeString(GROUP_NAME, 32, out); + // Device major number (8 bytes) + count += writeString("", 8, out); + // Device minor number (8 bytes) + count += writeString("", 8, out); + return count; + } + + private static void writeUstarTrailer(final OutputStream out) throws IOException { + int offset = 0; + // 1024 bytes of zero + while (offset < 1024) { + out.write(0); + offset++; + } + } + + private TestArchiveGenerator() { + // hide constructor + } +} diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java index 2e03a978b29..ce39e07a89f 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java @@ -23,6 +23,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assumptions.assumeFalse; @@ -35,14 +36,27 @@ import java.util.List; import org.apache.commons.compress.AbstractTest; +import org.apache.commons.compress.archivers.TestArchiveGenerator; import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledOnOs; import org.junit.jupiter.api.condition.EnabledOnOs; import org.junit.jupiter.api.condition.OS; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; class SparseFilesTest extends AbstractTest { + @TempDir + private static Path tempDir; + + @BeforeAll + static void setupAll() throws IOException { + TestArchiveGenerator.createSparseFileTestCases(tempDir); + } + private void assertPaxGNUEntry(final TarArchiveEntry entry, final String suffix) { assertEquals("sparsefile-" + suffix, entry.getName()); assertEquals(TarConstants.LF_NORMAL, entry.getLinkFlag()); @@ -245,6 +259,53 @@ void testExtractSparseTarsOnWindows() throws IOException { } } + @ParameterizedTest + @ValueSource(strings = {"old-gnu-sparse.tar" , "gnu-sparse-00.tar", "gnu-sparse-01.tar", "gnu-sparse-1.tar"}) + void testMaximallyFragmentedTarFile(final String fileName) throws IOException { + final int expectedSize = 8192; + try (TarFile input = TarFile.builder().setPath(tempDir.resolve(fileName)).get()) { + final List entries = input.getEntries(); + assertEquals(1, entries.size()); + final TarArchiveEntry entry = entries.get(0); + assertNotNull(entry); + assertEquals("sparse-file.txt", entry.getName()); + + try (InputStream inputStream = input.getInputStream(entry)) { + // read the expected amount of data + final byte[] content = new byte[expectedSize]; + assertEquals(expectedSize, IOUtils.read(inputStream, content)); + // verify that the stream is at EOF + assertEquals(IOUtils.EOF, inputStream.read()); + // check content + for (int i = 0; i < content.length; i++) { + assertEquals((byte) (i % 256), content[i], "at index " + i); + } + } + } + } + + @ParameterizedTest + @ValueSource(strings = {"old-gnu-sparse.tar", "gnu-sparse-00.tar", "gnu-sparse-01.tar", "gnu-sparse-1.tar"}) + void testMaximallyFragmentedTarStream(final String fileName) throws IOException { + final int expectedSize = 8192; + try (TarArchiveInputStream input = TarArchiveInputStream.builder().setPath(tempDir.resolve(fileName)).get()) { + final TarArchiveEntry entry = input.getNextEntry(); + assertNotNull(entry); + assertEquals("sparse-file.txt", entry.getName()); + // read the expected amount of data + final byte[] content = new byte[expectedSize]; + assertEquals(expectedSize, IOUtils.read(input, content)); + // verify that the stream is at EOF + assertEquals(IOUtils.EOF, input.read()); + // check content + for (int i = 0; i < content.length; i++) { + assertEquals((byte) (i % 256), content[i], "at index " + i); + } + // check that there are no more entries + assertNull(input.getNextEntry()); + } + } + @Test void testOldGNU() throws Throwable { try (TarArchiveInputStream tin = TarArchiveInputStream.builder() diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarFileTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarFileTest.java index add3f9464fc..ee1a677f75a 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/TarFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarFileTest.java @@ -26,7 +26,6 @@ import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; -import java.io.EOFException; import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -226,7 +225,9 @@ void testParseTarWithNonNumberPaxHeaders() { @Test void testParseTarWithSpecialPaxHeaders() { - assertThrows(EOFException.class, () -> TarFile.builder().setURI(getURI("COMPRESS-530-fail.tar")).get()); + final ArchiveException ex = assertThrows(ArchiveException.class, () -> TarFile.builder().setURI(getURI("COMPRESS-530-fail.tar")).get()); + // Parsing fails since the data starts with null bytes + assertTrue(ex.getMessage().contains("non-number")); } @Test From d25f7221d671cfaeed2cd90a22d551ba36b00fe3 Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory" Date: Fri, 17 Oct 2025 08:01:56 -0400 Subject: [PATCH 26/40] Simplify writing ustart trailer - Javadoc --- .../commons/compress/utils/BoundedArchiveInputStream.java | 8 ++++---- .../commons/compress/archivers/TestArchiveGenerator.java | 8 ++------ 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java b/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java index d334af0f377..e0d7f7d033a 100644 --- a/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/utils/BoundedArchiveInputStream.java @@ -94,10 +94,10 @@ public synchronized int read(final byte[] b, final int off, final int len) throw * The caller is responsible for ensuring that the requested range * {@code [pos, pos + buf.remaining())} lies within the valid bounds of the stream.
* - * @param pos the position within the stream at which to begin reading - * @param buf the buffer into which bytes are read; bytes are written starting at the buffer’s current position - * @return the number of bytes read into the buffer - * @throws IOException if an I/O error occurs while reading + * @param pos the position within the stream at which to begin reading. + * @param buf the buffer into which bytes are read; bytes are written starting at the buffer’s current position. + * @return the number of bytes read into the buffer. + * @throws IOException if an I/O error occurs while reading. */ protected abstract int read(long pos, ByteBuffer buf) throws IOException; } diff --git a/src/test/java/org/apache/commons/compress/archivers/TestArchiveGenerator.java b/src/test/java/org/apache/commons/compress/archivers/TestArchiveGenerator.java index 49c06da5172..a7c4fde0240 100644 --- a/src/test/java/org/apache/commons/compress/archivers/TestArchiveGenerator.java +++ b/src/test/java/org/apache/commons/compress/archivers/TestArchiveGenerator.java @@ -39,6 +39,7 @@ public final class TestArchiveGenerator { + private static final byte[] USTAR_TRAILER = new byte[1024]; private static final int FILE_MODE = 0100644; private static final int GROUP_ID = 0; private static final String GROUP_NAME = "group"; @@ -376,12 +377,7 @@ private static int writeTarUstarHeader( } private static void writeUstarTrailer(final OutputStream out) throws IOException { - int offset = 0; - // 1024 bytes of zero - while (offset < 1024) { - out.write(0); - offset++; - } + out.write(USTAR_TRAILER); } private TestArchiveGenerator() { From 2e8431937500fbd84b3eeb9d543f105b9fec4069 Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz"Date: Fri, 17 Oct 2025 14:08:18 +0200 Subject: [PATCH 27/40] ARJ: correct byte accounting and truncation errors (#723) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ARJ: correct byte accounting and truncation errors * `getBytesRead()` could drift from the actual archive size after a full read. * Exceptions on truncation errors were inconsistent or missing. * `DataInputStream` (big-endian) forced ad-hoc helpers for ARJ’s little-endian fields. * **Accurate byte accounting:** count all consumed bytes across main/file headers, variable strings, CRCs, extended headers, and file data. `getBytesRead()` now matches the archive length at end-of-stream. * **Consistent truncation handling:** * Truncation in the **main (archive) header**, read during construction, now throws an `ArchiveException` **wrapping** an `EOFException` (cause preserved). * Truncation in **file headers or file data** is propagated as a plain `EOFException` from `getNextEntry()`/`read()`. * **Endianness refactor:** replace `DataInputStream` with `EndianUtils`, removing several bespoke helpers and making intent explicit. * Add assertion that `getBytesRead()` equals the archive size after full consumption. * Parameterized truncation tests at key boundaries (signature, basic/fixed header sizes, end of fixed/basic header, CRC, extended-header length, file data) verifying the exception contract above. * fix: failing legacy test * fix: checkstyle error * fix: remove `EndianUtils` static import The static import makes it harder to distinguish calls that need to count bytes from those that do not. * Fix failing test * Sort methods * Remove unused method --- src/changes/changes.xml | 2 + .../archivers/arj/ArjArchiveInputStream.java | 300 +++++++++--------- .../compress/LegacyConstructorsTest.java | 1 - .../arj/ArjArchiveInputStreamTest.java | 105 ++++++ 4 files changed, 257 insertions(+), 151 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index af7bf4aa47a..22b0182a5f9 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -80,6 +80,8 @@ The type attribute can be add,update,fix,remove. ArArchiveInputStream.readGNUStringTable(byte[], int, int) now provides a better exception message, wrapping the underlying exception. ArArchiveInputStream.read(byte[], int, int) now throws ArchiveException instead of ArithmeticException. Simplify handling of special AR records in ArArchiveInputStream. + +Correct byte accounting and truncation errors in ARJ input stream. org.apache.commons.compress.harmony.unpack200 now throws Pack200Exception, IllegalArgumentException, and IllegalStateException instead of other runtime exceptions and Error. diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java index fa5654f4708..01984c253ad 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -20,7 +20,6 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; @@ -32,7 +31,7 @@ import org.apache.commons.compress.archivers.ArchiveException; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.utils.ArchiveUtils; -import org.apache.commons.compress.utils.IOUtils; +import org.apache.commons.io.EndianUtils; import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.io.input.ChecksumInputStream; @@ -99,14 +98,31 @@ public static boolean matches(final byte[] signature, final int length) { return length >= 2 && (0xff & signature[0]) == ARJ_MAGIC_1 && (0xff & signature[1]) == ARJ_MAGIC_2; } - private final DataInputStream dis; + private static void readExtraData(final int firstHeaderSize, final InputStream firstHeader, final LocalFileHeader localFileHeader) throws IOException { + if (firstHeaderSize >= 33) { + localFileHeader.extendedFilePosition = EndianUtils.readSwappedInteger(firstHeader); + if (firstHeaderSize >= 45) { + localFileHeader.dateTimeAccessed = EndianUtils.readSwappedInteger(firstHeader); + localFileHeader.dateTimeCreated = EndianUtils.readSwappedInteger(firstHeader); + localFileHeader.originalSizeEvenForVolumes = EndianUtils.readSwappedInteger(firstHeader); + } + } + } + + private static int readUnsignedByte(InputStream in) throws IOException { + final int value = in.read(); + if (value == -1) { + throw new EOFException(); + } + return value & 0xff; + } + private final MainHeader mainHeader; private LocalFileHeader currentLocalFileHeader; private InputStream currentInputStream; private ArjArchiveInputStream(final Builder builder) throws IOException { super(builder); - dis = new DataInputStream(in); mainHeader = readMainHeader(); if ((mainHeader.arjFlags & MainHeader.Flags.GARBLED) != 0) { throw new ArchiveException("Encrypted ARJ files are unsupported"); @@ -148,11 +164,6 @@ public boolean canReadEntryData(final ArchiveEntry ae) { return ae instanceof ArjArchiveEntry && ((ArjArchiveEntry) ae).getMethod() == LocalFileHeader.Methods.STORED; } - @Override - public void close() throws IOException { - dis.close(); - } - /** * Gets the archive's comment. * @@ -185,10 +196,22 @@ public ArjArchiveEntry getNextEntry() throws IOException { currentLocalFileHeader = readLocalFileHeader(); if (currentLocalFileHeader != null) { // @formatter:off + final long currentPosition = getBytesRead(); currentInputStream = BoundedInputStream.builder() - .setInputStream(dis) + .setInputStream(in) .setMaxCount(currentLocalFileHeader.compressedSize) .setPropagateClose(false) + .setAfterRead(read -> { + if (read < 0) { + throw new EOFException(String.format( + "Truncated ARJ archive: entry '%s' expected %,d bytes, but only %,d were read.", + currentLocalFileHeader.name, + currentLocalFileHeader.compressedSize, + getBytesRead() - currentPosition + )); + } + count(read); + }) .get(); // @formatter:on if (currentLocalFileHeader.method == LocalFileHeader.Methods.STORED) { @@ -222,63 +245,51 @@ public int read(final byte[] b, final int off, final int len) throws IOException return currentInputStream.read(b, off, len); } - private int read16(final DataInputStream dataIn) throws IOException { - final int value = dataIn.readUnsignedShort(); - count(2); - return Integer.reverseBytes(value) >>> 16; - } - - private int read32(final DataInputStream dataIn) throws IOException { - final int value = dataIn.readInt(); - count(4); - return Integer.reverseBytes(value); - } - - private int read8(final DataInputStream dataIn) throws IOException { - final int value = dataIn.readUnsignedByte(); - count(1); - return value; + private String readComment(final InputStream dataIn) throws IOException { + return new String(readString(dataIn).toByteArray(), getCharset()); } - private void readExtraData(final int firstHeaderSize, final DataInputStream firstHeader, final LocalFileHeader localFileHeader) throws IOException { - if (firstHeaderSize >= 33) { - localFileHeader.extendedFilePosition = read32(firstHeader); - if (firstHeaderSize >= 45) { - localFileHeader.dateTimeAccessed = read32(firstHeader); - localFileHeader.dateTimeCreated = read32(firstHeader); - localFileHeader.originalSizeEvenForVolumes = read32(firstHeader); - pushedBackBytes(12); - } - pushedBackBytes(4); - } + private String readEntryName(final InputStream dataIn) throws IOException { + final ByteArrayOutputStream buffer = readString(dataIn); + ArchiveUtils.checkEntryNameLength(buffer.size(), getMaxEntryNameLength(), "ARJ"); + return new String(buffer.toByteArray(), getCharset()); } + /** + * Scans for the next valid ARJ header. + * + * @return The header bytes, or {@code null} if end of archive. + * @throws EOFException If the end of the stream is reached before a valid header is found. + * @throws IOException If an I/O error occurs. + */ private byte[] readHeader() throws IOException { - boolean found = false; - byte[] basicHeaderBytes = null; - do { + byte[] basicHeaderBytes; + // TODO: Explain why we are scanning for a valid ARJ header + // and don't throw, when an invalid/corrupted header is found, + // which might indicate a corrupted archive. + while (true) { int first; - int second = read8(dis); + int second = readUnsignedByte(); do { first = second; - second = read8(dis); + second = readUnsignedByte(); } while (first != ARJ_MAGIC_1 && second != ARJ_MAGIC_2); - final int basicHeaderSize = read16(dis); + final int basicHeaderSize = readSwappedUnsignedShort(); if (basicHeaderSize == 0) { // end of archive return null; - } - if (basicHeaderSize <= 2600) { - basicHeaderBytes = readRange(dis, basicHeaderSize); - final long basicHeaderCrc32 = read32(dis) & 0xFFFFFFFFL; + } else if (basicHeaderSize <= 2600) { + basicHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, basicHeaderSize); + count(basicHeaderSize); + final long basicHeaderCrc32 = EndianUtils.readSwappedUnsignedInteger(in); + count(4); final CRC32 crc32 = new CRC32(); crc32.update(basicHeaderBytes); if (basicHeaderCrc32 == crc32.getValue()) { - found = true; + return basicHeaderBytes; } } - } while (!found); - return basicHeaderBytes; + } } private LocalFileHeader readLocalFileHeader() throws IOException { @@ -286,100 +297,96 @@ private LocalFileHeader readLocalFileHeader() throws IOException { if (basicHeaderBytes == null) { return null; } - try (DataInputStream basicHeader = new DataInputStream(new ByteArrayInputStream(basicHeaderBytes))) { - - final int firstHeaderSize = basicHeader.readUnsignedByte(); - final byte[] firstHeaderBytes = readRange(basicHeader, firstHeaderSize - 1); - pushedBackBytes(firstHeaderBytes.length); - try (DataInputStream firstHeader = new DataInputStream(new ByteArrayInputStream(firstHeaderBytes))) { - - final LocalFileHeader localFileHeader = new LocalFileHeader(); - localFileHeader.archiverVersionNumber = firstHeader.readUnsignedByte(); - localFileHeader.minVersionToExtract = firstHeader.readUnsignedByte(); - localFileHeader.hostOS = firstHeader.readUnsignedByte(); - localFileHeader.arjFlags = firstHeader.readUnsignedByte(); - localFileHeader.method = firstHeader.readUnsignedByte(); - localFileHeader.fileType = firstHeader.readUnsignedByte(); - localFileHeader.reserved = firstHeader.readUnsignedByte(); - localFileHeader.dateTimeModified = read32(firstHeader); - localFileHeader.compressedSize = 0xffffFFFFL & read32(firstHeader); - localFileHeader.originalSize = 0xffffFFFFL & read32(firstHeader); - localFileHeader.originalCrc32 = 0xffffFFFFL & read32(firstHeader); - localFileHeader.fileSpecPosition = read16(firstHeader); - localFileHeader.fileAccessMode = read16(firstHeader); - pushedBackBytes(20); - localFileHeader.firstChapter = firstHeader.readUnsignedByte(); - localFileHeader.lastChapter = firstHeader.readUnsignedByte(); + final LocalFileHeader localFileHeader = new LocalFileHeader(); + try (InputStream basicHeader = new ByteArrayInputStream(basicHeaderBytes)) { + + final int firstHeaderSize = readUnsignedByte(basicHeader); + try (InputStream firstHeader = BoundedInputStream.builder().setInputStream(basicHeader).setMaxCount(firstHeaderSize - 1).get()) { + + localFileHeader.archiverVersionNumber = readUnsignedByte(firstHeader); + localFileHeader.minVersionToExtract = readUnsignedByte(firstHeader); + localFileHeader.hostOS = readUnsignedByte(firstHeader); + localFileHeader.arjFlags = readUnsignedByte(firstHeader); + localFileHeader.method = readUnsignedByte(firstHeader); + localFileHeader.fileType = readUnsignedByte(firstHeader); + localFileHeader.reserved = readUnsignedByte(firstHeader); + localFileHeader.dateTimeModified = EndianUtils.readSwappedInteger(firstHeader); + localFileHeader.compressedSize = EndianUtils.readSwappedUnsignedInteger(firstHeader); + localFileHeader.originalSize = EndianUtils.readSwappedUnsignedInteger(firstHeader); + localFileHeader.originalCrc32 = EndianUtils.readSwappedUnsignedInteger(firstHeader); + localFileHeader.fileSpecPosition = EndianUtils.readSwappedShort(firstHeader); + localFileHeader.fileAccessMode = EndianUtils.readSwappedShort(firstHeader); + localFileHeader.firstChapter = readUnsignedByte(firstHeader); + localFileHeader.lastChapter = readUnsignedByte(firstHeader); readExtraData(firstHeaderSize, firstHeader, localFileHeader); + } - localFileHeader.name = readEntryName(basicHeader); - localFileHeader.comment = readComment(basicHeader); - - final ArrayListextendedHeaders = new ArrayList<>(); - int extendedHeaderSize; - while ((extendedHeaderSize = read16(dis)) > 0) { - final byte[] extendedHeaderBytes = readRange(dis, extendedHeaderSize); - final long extendedHeaderCrc32 = 0xffffFFFFL & read32(dis); - final CRC32 crc32 = new CRC32(); - crc32.update(extendedHeaderBytes); - if (extendedHeaderCrc32 != crc32.getValue()) { - throw new ArchiveException("Extended header CRC32 verification failure"); - } - extendedHeaders.add(extendedHeaderBytes); - } - localFileHeader.extendedHeaders = extendedHeaders.toArray(new byte[0][]); + localFileHeader.name = readEntryName(basicHeader); + localFileHeader.comment = readComment(basicHeader); + } - return localFileHeader; + final ArrayList extendedHeaders = new ArrayList<>(); + int extendedHeaderSize; + while ((extendedHeaderSize = readSwappedUnsignedShort()) > 0) { + final byte[] extendedHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, extendedHeaderSize); + count(extendedHeaderSize); + final long extendedHeaderCrc32 = EndianUtils.readSwappedUnsignedInteger(in); + count(4); + final CRC32 crc32 = new CRC32(); + crc32.update(extendedHeaderBytes); + if (extendedHeaderCrc32 != crc32.getValue()) { + throw new ArchiveException("Extended header CRC32 verification failure"); } + extendedHeaders.add(extendedHeaderBytes); } + localFileHeader.extendedHeaders = extendedHeaders.toArray(new byte[0][]); + + return localFileHeader; } private MainHeader readMainHeader() throws IOException { final byte[] basicHeaderBytes = readHeader(); - if (basicHeaderBytes == null) { - throw new ArchiveException("Archive ends without any headers"); - } - final DataInputStream basicHeader = new DataInputStream(new ByteArrayInputStream(basicHeaderBytes)); - - final int firstHeaderSize = basicHeader.readUnsignedByte(); - final byte[] firstHeaderBytes = readRange(basicHeader, firstHeaderSize - 1); - pushedBackBytes(firstHeaderBytes.length); - - final DataInputStream firstHeader = new DataInputStream(new ByteArrayInputStream(firstHeaderBytes)); - final MainHeader header = new MainHeader(); - header.archiverVersionNumber = firstHeader.readUnsignedByte(); - header.minVersionToExtract = firstHeader.readUnsignedByte(); - header.hostOS = firstHeader.readUnsignedByte(); - header.arjFlags = firstHeader.readUnsignedByte(); - header.securityVersion = firstHeader.readUnsignedByte(); - header.fileType = firstHeader.readUnsignedByte(); - header.reserved = firstHeader.readUnsignedByte(); - header.dateTimeCreated = read32(firstHeader); - header.dateTimeModified = read32(firstHeader); - header.archiveSize = 0xffffFFFFL & read32(firstHeader); - header.securityEnvelopeFilePosition = read32(firstHeader); - header.fileSpecPosition = read16(firstHeader); - header.securityEnvelopeLength = read16(firstHeader); - pushedBackBytes(20); // count has already counted them via readRange - header.encryptionVersion = firstHeader.readUnsignedByte(); - header.lastChapter = firstHeader.readUnsignedByte(); + try (InputStream basicHeader = new ByteArrayInputStream(basicHeaderBytes)) { + + final int firstHeaderSize = readUnsignedByte(basicHeader); + try (InputStream firstHeader = BoundedInputStream.builder().setInputStream(basicHeader).setMaxCount(firstHeaderSize - 1).get()) { + + header.archiverVersionNumber = readUnsignedByte(firstHeader); + header.minVersionToExtract = readUnsignedByte(firstHeader); + header.hostOS = readUnsignedByte(firstHeader); + header.arjFlags = readUnsignedByte(firstHeader); + header.securityVersion = readUnsignedByte(firstHeader); + header.fileType = readUnsignedByte(firstHeader); + header.reserved = readUnsignedByte(firstHeader); + header.dateTimeCreated = EndianUtils.readSwappedInteger(firstHeader); + header.dateTimeModified = EndianUtils.readSwappedInteger(firstHeader); + header.archiveSize = EndianUtils.readSwappedUnsignedInteger(firstHeader); + header.securityEnvelopeFilePosition = EndianUtils.readSwappedInteger(firstHeader); + header.fileSpecPosition = EndianUtils.readSwappedShort(firstHeader); + header.securityEnvelopeLength = EndianUtils.readSwappedShort(firstHeader); + header.encryptionVersion = readUnsignedByte(firstHeader); + header.lastChapter = readUnsignedByte(firstHeader); + + if (firstHeaderSize >= 33) { + header.arjProtectionFactor = readUnsignedByte(firstHeader); + header.arjFlags2 = readUnsignedByte(firstHeader); + readUnsignedByte(firstHeader); + readUnsignedByte(firstHeader); + } + } - if (firstHeaderSize >= 33) { - header.arjProtectionFactor = firstHeader.readUnsignedByte(); - header.arjFlags2 = firstHeader.readUnsignedByte(); - firstHeader.readUnsignedByte(); - firstHeader.readUnsignedByte(); + header.name = readEntryName(basicHeader); + header.comment = readComment(basicHeader); } - header.name = readEntryName(basicHeader); - header.comment = readComment(basicHeader); - - final int extendedHeaderSize = read16(dis); + final int extendedHeaderSize = readSwappedUnsignedShort(); if (extendedHeaderSize > 0) { - header.extendedHeaderBytes = readRange(dis, extendedHeaderSize); - final long extendedHeaderCrc32 = 0xffffFFFFL & read32(dis); + header.extendedHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, extendedHeaderSize); + count(extendedHeaderSize); + final long extendedHeaderCrc32 = EndianUtils.readSwappedUnsignedInteger(in); + count(4); final CRC32 crc32 = new CRC32(); crc32.update(header.extendedHeaderBytes); if (extendedHeaderCrc32 != crc32.getValue()) { @@ -390,32 +397,25 @@ private MainHeader readMainHeader() throws IOException { return header; } - private byte[] readRange(final InputStream in, final int len) throws IOException { - final byte[] b = IOUtils.readRange(in, len); - count(b.length); - if (b.length < len) { - throw new EOFException(); - } - return b; - } - - private String readComment(DataInputStream dataIn) throws IOException { - return new String(readString(dataIn).toByteArray(), getCharset()); - } - - private String readEntryName(DataInputStream dataIn) throws IOException { - final ByteArrayOutputStream buffer = readString(dataIn); - ArchiveUtils.checkEntryNameLength(buffer.size(), getMaxEntryNameLength(), "ARJ"); - return new String(buffer.toByteArray(), getCharset()); - } - - private ByteArrayOutputStream readString(DataInputStream dataIn) throws IOException { + private ByteArrayOutputStream readString(final InputStream dataIn) throws IOException { try (ByteArrayOutputStream buffer = new ByteArrayOutputStream()) { int nextByte; - while ((nextByte = dataIn.readUnsignedByte()) != 0) { + while ((nextByte = readUnsignedByte(dataIn)) != 0) { buffer.write(nextByte); } return buffer; } } + + private int readSwappedUnsignedShort() throws IOException { + final int value = EndianUtils.readSwappedUnsignedShort(in); + count(2); + return value; + } + + private int readUnsignedByte() throws IOException { + final int value = readUnsignedByte(in); + count(1); + return value & 0xff; + } } diff --git a/src/test/java/org/apache/commons/compress/LegacyConstructorsTest.java b/src/test/java/org/apache/commons/compress/LegacyConstructorsTest.java index d82ef793fe5..40c11a81bf0 100644 --- a/src/test/java/org/apache/commons/compress/LegacyConstructorsTest.java +++ b/src/test/java/org/apache/commons/compress/LegacyConstructorsTest.java @@ -100,7 +100,6 @@ static Stream testZipConstructors() throws IOException { void testArjConstructor() throws Exception { try (InputStream inputStream = Files.newInputStream(getPath("bla.arj")); ArjArchiveInputStream archiveInputStream = new ArjArchiveInputStream(inputStream, "US-ASCII")) { - // Arj wraps the input stream in a DataInputStream assertEquals(inputStream, getNestedInputStream(archiveInputStream)); assertEquals(US_ASCII, archiveInputStream.getCharset()); } diff --git a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java index f85abafaf8e..25a66622053 100644 --- a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java @@ -25,17 +25,22 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; +import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.nio.file.Files; +import java.nio.file.Path; import java.util.Calendar; import java.util.TimeZone; import org.apache.commons.compress.AbstractTest; import org.apache.commons.io.IOUtils; +import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.io.output.ByteArrayOutputStream; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; /** * Tests {@link ArjArchiveInputStream}. @@ -106,6 +111,19 @@ void testForEach() throws Exception { assertEquals(expected.toString(), result.toString()); } + @ParameterizedTest + @ValueSource(strings = { "bla.arj", "bla.unix.arj" }) + void testGetBytesRead(final String resource) throws IOException { + final Path path = getPath(resource); + try (ArjArchiveInputStream in = ArjArchiveInputStream.builder().setPath(path).get()) { + while (in.getNextEntry() != null) { + // nop + } + final long expected = Files.size(path); + assertEquals(expected, in.getBytesRead(), "getBytesRead() did not return the expected value"); + } + } + @Test void testGetNextEntry() throws Exception { final StringBuilder expected = new StringBuilder(); @@ -266,4 +284,91 @@ void testSingleByteReadConsistentlyReturnsMinusOneAtEof() throws Exception { assertForEach(archive); } } + + /** + * Verifies that reading an ARJ header record cut short at various boundaries + * results in an {@link EOFException}. + * + * The test archive is crafted so that the local file header of the first entry begins at + * byte offset {@code 0x0035}. Within that header:
+ *+ *
+ * + * @param maxCount absolute truncation point (number of readable bytes from the start of the file) + */ + @ParameterizedTest + @ValueSource(longs = { + // Before the local file header signature + 0x35, + // Immediately after the 2-byte signature + 0x35 + 0x02, + // Inside / after the basic-header size (2 bytes at 0x02–0x03) + 0x35 + 0x03, 0x35 + 0x04, + // Just after the fixed-header size (1 byte at 0x04) + 0x35 + 0x05, + // End of fixed header (0x04 + first_hdr_size == 0x32) + 0x35 + 0x32, + // End of basic header after filename/comment (0x04 + basic_hdr_size == 0x3d) + 0x35 + 0x3d, + // Inside / after the basic-header CRC-32 (4 bytes) + 0x35 + 0x3e, 0x35 + 0x41, + // Inside / after the extended-header length (2 bytes) + 0x35 + 0x42, 0x35 + 0x43, + // One byte before the first file’s data + 0x95 + }) + void testTruncatedLocalHeader(long maxCount) throws Exception { + try (InputStream input = BoundedInputStream.builder().setURI(getURI("bla.arj")).setMaxCount(maxCount).get(); + ArjArchiveInputStream archive = ArjArchiveInputStream.builder().setInputStream(input).get()) { + assertThrows(EOFException.class, () -> { + archive.getNextEntry(); + IOUtils.skip(archive, Long.MAX_VALUE); + }); + } + } + + /** + * Verifies that reading an ARJ header record cut short at various boundaries + * results in an {@link EOFException}. + * + *- Basic header size (2 bytes at offsets 0x02–0x03) = {@code 0x0039}.
+ *- Fixed header size (aka {@code first_hdr_size}, 1 byte at 0x04) = {@code 0x2E}.
+ *- The filename and comment C-strings follow the fixed header and complete the basic header.
+ *- A 4-byte basic header CRC-32 follows the basic header.
+ *The main archive header is at the beginning of the file. Within that header:
+ *+ *
+ * + * @param maxCount absolute truncation point (number of readable bytes from the start of the file) + */ + @ParameterizedTest + @ValueSource(longs = { + // Empty file. + 0, + // Immediately after the 2-byte signature + 0x02, + // Inside / after the basic-header size (2 bytes at 0x02–0x03) + 0x03, 0x04, + // Just after the fixed-header size (1 byte at 0x04) + 0x05, + // End of fixed header (0x04 + first_hdr_size == 0x26) + 0x26, + // End of basic header after filename/comment (0x04 + basic_hdr_size == 0x2f) + 0x2f, + // Inside / after the basic-header CRC-32 (4 bytes) + 0x30, 0x33, + // Inside the extended-header length (2 bytes) + 0x34}) + void testTruncatedMainHeader(long maxCount) throws Exception { + try (InputStream input = BoundedInputStream.builder() + .setURI(getURI("bla.arj")) + .setMaxCount(maxCount) + .get()) { + assertThrows(EOFException.class, () -> ArjArchiveInputStream.builder().setInputStream(input).get()); + } + } } From f1b18050992ae7b9e1dbbed3a2fe202ef4f13d03 Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory"- Basic header size (2 bytes at offsets 0x02–0x03) = {@code 0x002b}.
+ *- Fixed header size (aka {@code first_hdr_size}, 1 byte at 0x04) = {@code 0x22}.
+ *- The archive name and comment C-strings follow the fixed header and complete the basic header.
+ *- A 4-byte basic header CRC-32 follows the basic header.
+ *Date: Fri, 17 Oct 2025 08:21:49 -0400 Subject: [PATCH 28/40] Refactor common test pattern Use final --- .../org/apache/commons/compress/AbstractTest.java | 11 +++++++++++ .../archivers/arj/ArjArchiveInputStreamTest.java | 8 +++----- .../archivers/cpio/CpioArchiveInputStreamTest.java | 10 ---------- .../archivers/tar/TarArchiveInputStreamTest.java | 6 ++---- .../archivers/zip/ZipArchiveInputStreamTest.java | 3 +-- 5 files changed, 17 insertions(+), 21 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/AbstractTest.java b/src/test/java/org/apache/commons/compress/AbstractTest.java index 1a72bb2985c..ba6489a6f4a 100644 --- a/src/test/java/org/apache/commons/compress/AbstractTest.java +++ b/src/test/java/org/apache/commons/compress/AbstractTest.java @@ -19,6 +19,7 @@ package org.apache.commons.compress; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedInputStream; @@ -224,6 +225,16 @@ protected void closeQuietly(final Closeable closeable) { IOUtils.closeQuietly(closeable); } + protected long consumeEntries(final ArchiveInputStream in) throws IOException { + long count = 0; + E entry; + while ((entry = in.getNextEntry()) != null) { + count++; + assertNotNull(entry); + } + return count; + } + /** * Creates an archive of text-based files in several directories. The archive name is the factory identifier for the archiver, for example zip, tar, cpio, * jar, ar. The archive is created as a temp file. diff --git a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java index 25a66622053..8d32be9e15b 100644 --- a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java @@ -116,9 +116,7 @@ void testForEach() throws Exception { void testGetBytesRead(final String resource) throws IOException { final Path path = getPath(resource); try (ArjArchiveInputStream in = ArjArchiveInputStream.builder().setPath(path).get()) { - while (in.getNextEntry() != null) { - // nop - } + consumeEntries(in); final long expected = Files.size(path); assertEquals(expected, in.getBytesRead(), "getBytesRead() did not return the expected value"); } @@ -321,7 +319,7 @@ void testSingleByteReadConsistentlyReturnsMinusOneAtEof() throws Exception { // One byte before the first file’s data 0x95 }) - void testTruncatedLocalHeader(long maxCount) throws Exception { + void testTruncatedLocalHeader(final long maxCount) throws Exception { try (InputStream input = BoundedInputStream.builder().setURI(getURI("bla.arj")).setMaxCount(maxCount).get(); ArjArchiveInputStream archive = ArjArchiveInputStream.builder().setInputStream(input).get()) { assertThrows(EOFException.class, () -> { @@ -363,7 +361,7 @@ void testTruncatedLocalHeader(long maxCount) throws Exception { 0x30, 0x33, // Inside the extended-header length (2 bytes) 0x34}) - void testTruncatedMainHeader(long maxCount) throws Exception { + void testTruncatedMainHeader(final long maxCount) throws Exception { try (InputStream input = BoundedInputStream.builder() .setURI(getURI("bla.arj")) .setMaxCount(maxCount) diff --git a/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java index 02eda947b29..166888ce27b 100644 --- a/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java @@ -36,16 +36,6 @@ class CpioArchiveInputStreamTest extends AbstractTest { - private long consumeEntries(final CpioArchiveInputStream in) throws IOException { - long count = 0; - CpioArchiveEntry entry; - while ((entry = in.getNextEntry()) != null) { - count++; - assertNotNull(entry); - } - return count; - } - @Test void testCpioUnarchive() throws Exception { final StringBuilder expected = new StringBuilder(); diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java index 665c29e1eea..c293a53f626 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java @@ -167,7 +167,7 @@ private void testCompress666(final int factor, final boolean bufferInputStream, final ExecutorService executorService = Executors.newFixedThreadPool(10); try { final List >> tasks = IntStream.range(0, 200).mapToObj(index -> executorService.submit(() -> { - TarArchiveEntry tarEntry = null; + final TarArchiveEntry tarEntry = null; try (InputStream inputStream = getClass().getResourceAsStream(localPath); // @formatter:off TarArchiveInputStream tarInputStream = TarArchiveInputStream.builder() @@ -176,9 +176,7 @@ private void testCompress666(final int factor, final boolean bufferInputStream, .setRecordSize(TarConstants.DEFAULT_RCDSIZE) .get()) { // @formatter:on - while ((tarEntry = tarInputStream.getNextEntry()) != null) { - assertNotNull(tarEntry); - } + consumeEntries(tarInputStream); } catch (final IOException e) { fail(Objects.toString(tarEntry), e); } diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java index b29ac9a9279..3da6147aab2 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java @@ -221,8 +221,7 @@ void testGetCompressedCountEmptyZip() throws IOException { @Test void testGetFirstEntryEmptyZip() throws IOException { try (ZipArchiveInputStream zin = ZipArchiveInputStream.builder().setByteArray(ArrayUtils.EMPTY_BYTE_ARRAY).get()) { - final ZipArchiveEntry entry = zin.getNextEntry(); - assertNull(entry); + assertNull(zin.getNextEntry()); } } From 4420a9d2df5d012071d82fbb019c527e93ef59fb Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Sat, 18 Oct 2025 13:13:29 +0200 Subject: [PATCH 29/40] ARJ: strict header validation and selfExtracting option (#728) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ARJ: correct byte accounting and truncation errors * `getBytesRead()` could drift from the actual archive size after a full read. * Exceptions on truncation errors were inconsistent or missing. * `DataInputStream` (big-endian) forced ad-hoc helpers for ARJ’s little-endian fields. * **Accurate byte accounting:** count all consumed bytes across main/file headers, variable strings, CRCs, extended headers, and file data. `getBytesRead()` now matches the archive length at end-of-stream. * **Consistent truncation handling:** * Truncation in the **main (archive) header**, read during construction, now throws an `ArchiveException` **wrapping** an `EOFException` (cause preserved). * Truncation in **file headers or file data** is propagated as a plain `EOFException` from `getNextEntry()`/`read()`. * **Endianness refactor:** replace `DataInputStream` with `EndianUtils`, removing several bespoke helpers and making intent explicit. * Add assertion that `getBytesRead()` equals the archive size after full consumption. * Parameterized truncation tests at key boundaries (signature, basic/fixed header sizes, end of fixed/basic header, CRC, extended-header length, file data) verifying the exception contract above. * fix: failing legacy test * fix: checkstyle error * fix: remove `EndianUtils` static import The static import makes it harder to distinguish calls that need to count bytes from those that do not. * ARJ: strict header validation and `selfExtracting` option Today `ArjArchiveInputStream` keeps scanning past invalid headers, assuming self-extracting stubs. That can hide corruption. This PR: * Introduces a `selfExtracting` ARJ archive option (default **false**). * **false:** no scanning; parse strictly from the first byte. Any invalid/truncated header fails fast. * **true:** scan only to locate the Main Archive Header (AMH), then switch to **strict mode**. All subsequent headers must be contiguous and valid. **Behavioral change** Previously, we might “skip over” bad data. Now we **only** allow a discovery scan for AMH (when opted in); everything after must validate or fail. * fix: simplify rethrowing * Fix failing test * Sort methods * Remove unused method * Sort members * Fix remove unused method * Extract `MAX_BASIC_HEADER_SIZE` constant * fix: exception message --- src/changes/changes.xml | 5 +- .../archivers/arj/ArjArchiveInputStream.java | 187 ++++++++++++------ .../arj/ArjArchiveInputStreamTest.java | 78 +++++++- 3 files changed, 204 insertions(+), 66 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 22b0182a5f9..b35baba53bf 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -57,7 +57,7 @@ The type attribute can be add,update,fix,remove. Don't loose precision while reading folders from a SevenZFile. Improve some exception messages in TarUtils and TarArchiveEntry. SevenZFile now enforces the same folder and coder limits as the CPP implementation. - +BZip2CompressorInputStream now throw CompressorException (a subclass of IOException) for invalid or corrupted data, providing more specific error reporting. BZip2 input streams treat Huffman codes longer than 20 bits as corrupted data, matching the behavior of the reference implementation. @@ -82,7 +82,8 @@ Thetype attribute can be add,update,fix,remove. Simplify handling of special AR records in ArArchiveInputStream. Correct byte accounting and truncation errors in ARJ input stream. - +Add strict header validation in ARJ input stream and `selfExtracting` option. +org.apache.commons.compress.harmony.unpack200 now throws Pack200Exception, IllegalArgumentException, and IllegalStateException instead of other runtime exceptions and Error. org.apache.commons.compress.harmony.pack200 now throws Pack200Exception, IllegalArgumentException, IllegalStateException, instead of other runtime exceptions and Error. diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java index 01984c253ad..499bf59a319 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -63,6 +63,8 @@ public class ArjArchiveInputStream extends ArchiveInputStream{ */ public static final class Builder extends AbstractArchiveBuilder { + private boolean selfExtracting; + private Builder() { setCharset(ENCODING_NAME); } @@ -71,11 +73,43 @@ private Builder() { public ArjArchiveInputStream get() throws IOException { return new ArjArchiveInputStream(this); } + + /** + * Enables compatibility with self-extracting (SFX) ARJ files. + * + * When {@code true}, the stream is scanned forward to locate the first + * valid ARJ main header. All bytes before that point are ignored, which + * allows reading ARJ data embedded in an executable stub.
+ * + *Caveat: this lenient pre-scan can mask corruption that + * would otherwise be reported at the start of a normal {@code .arj} file. + * Enable only when you expect an SFX input.
+ * + *Default: {@code false}.
+ * + * @param selfExtracting {@code true} if the input stream is for a self-extracting archive + * @return {@code this} instance + * @since 1.29.0 + */ + public Builder setSelfExtracting(final boolean selfExtracting) { + this.selfExtracting = selfExtracting; + return asThis(); + } } private static final String ENCODING_NAME = "CP437"; private static final int ARJ_MAGIC_1 = 0x60; private static final int ARJ_MAGIC_2 = 0xEA; + /** + * Maximum size of the basic header, in bytes. + * + *The value is taken from the reference implementation
+ */ + private static final int MAX_BASIC_HEADER_SIZE = 2600; + /** + * Minimum size of the first header (the fixed-size part of the basic header), in bytes. + */ + private static final int MIN_FIRST_HEADER_SIZE = 30; /** * Creates a new builder. @@ -98,21 +132,10 @@ public static boolean matches(final byte[] signature, final int length) { return length >= 2 && (0xff & signature[0]) == ARJ_MAGIC_1 && (0xff & signature[1]) == ARJ_MAGIC_2; } - private static void readExtraData(final int firstHeaderSize, final InputStream firstHeader, final LocalFileHeader localFileHeader) throws IOException { - if (firstHeaderSize >= 33) { - localFileHeader.extendedFilePosition = EndianUtils.readSwappedInteger(firstHeader); - if (firstHeaderSize >= 45) { - localFileHeader.dateTimeAccessed = EndianUtils.readSwappedInteger(firstHeader); - localFileHeader.dateTimeCreated = EndianUtils.readSwappedInteger(firstHeader); - localFileHeader.originalSizeEvenForVolumes = EndianUtils.readSwappedInteger(firstHeader); - } - } - } - private static int readUnsignedByte(InputStream in) throws IOException { final int value = in.read(); if (value == -1) { - throw new EOFException(); + throw new EOFException("Truncated ARJ archive: expected more data"); } return value & 0xff; } @@ -123,7 +146,7 @@ private static int readUnsignedByte(InputStream in) throws IOException { private ArjArchiveInputStream(final Builder builder) throws IOException { super(builder); - mainHeader = readMainHeader(); + mainHeader = readMainHeader(builder.selfExtracting); if ((mainHeader.arjFlags & MainHeader.Flags.GARBLED) != 0) { throw new ArchiveException("Encrypted ARJ files are unsupported"); } @@ -164,6 +187,55 @@ public boolean canReadEntryData(final ArchiveEntry ae) { return ae instanceof ArjArchiveEntry && ((ArjArchiveEntry) ae).getMethod() == LocalFileHeader.Methods.STORED; } + /** + * Verifies the CRC32 checksum of the given data against the next four bytes read from the input stream. + * + * @param data The data to verify. + * @return true if the checksum matches, false otherwise. + * @throws EOFException If the end of the stream is reached before reading the checksum. + * @throws IOException If an I/O error occurs. + */ + @SuppressWarnings("Since15") + private boolean checkCRC32(final byte[] data) throws IOException { + final CRC32 crc32 = new CRC32(); + crc32.update(data); + final long expectedCrc32 = readSwappedUnsignedInteger(); + return crc32.getValue() == expectedCrc32; + } + + /** + * Scans for the next valid ARJ header. + * + * @return The header bytes. + * @throws EOFException If the end of the stream is reached before a valid header is found. + * @throws IOException If an I/O error occurs. + */ + private byte[] findMainHeader() throws IOException { + byte[] basicHeaderBytes; + try { + while (true) { + int first; + int second = readUnsignedByte(); + do { + first = second; + second = readUnsignedByte(); + } while (first != ARJ_MAGIC_1 && second != ARJ_MAGIC_2); + final int basicHeaderSize = readSwappedUnsignedShort(); + // At least two bytes are required for the null-terminated name and comment + if (MIN_FIRST_HEADER_SIZE + 2 <= basicHeaderSize && basicHeaderSize <= MAX_BASIC_HEADER_SIZE) { + basicHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, basicHeaderSize); + count(basicHeaderSize); + if (checkCRC32(basicHeaderBytes)) { + return basicHeaderBytes; + } + } + // CRC32 failed, continue scanning + } + } catch (EOFException e) { + throw new ArchiveException("Corrupted ARJ archive: unable to find valid main header"); + } + } + /** * Gets the archive's comment. * @@ -263,33 +335,26 @@ private String readEntryName(final InputStream dataIn) throws IOException { * @throws IOException If an I/O error occurs. */ private byte[] readHeader() throws IOException { - byte[] basicHeaderBytes; - // TODO: Explain why we are scanning for a valid ARJ header - // and don't throw, when an invalid/corrupted header is found, - // which might indicate a corrupted archive. - while (true) { - int first; - int second = readUnsignedByte(); - do { - first = second; - second = readUnsignedByte(); - } while (first != ARJ_MAGIC_1 && second != ARJ_MAGIC_2); - final int basicHeaderSize = readSwappedUnsignedShort(); - if (basicHeaderSize == 0) { - // end of archive - return null; - } else if (basicHeaderSize <= 2600) { - basicHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, basicHeaderSize); - count(basicHeaderSize); - final long basicHeaderCrc32 = EndianUtils.readSwappedUnsignedInteger(in); - count(4); - final CRC32 crc32 = new CRC32(); - crc32.update(basicHeaderBytes); - if (basicHeaderCrc32 == crc32.getValue()) { - return basicHeaderBytes; - } - } + final int first = readUnsignedByte(); + final int second = readUnsignedByte(); + if (first != ARJ_MAGIC_1 || second != ARJ_MAGIC_2) { + throw new ArchiveException("Corrupted ARJ archive: invalid ARJ header signature 0x%02X 0x%02X", first, second); } + final int basicHeaderSize = readSwappedUnsignedShort(); + if (basicHeaderSize == 0) { + // End of archive + return null; + } + // At least two bytes are required for the null-terminated name and comment + if (basicHeaderSize < MIN_FIRST_HEADER_SIZE + 2 || basicHeaderSize > MAX_BASIC_HEADER_SIZE) { + throw new ArchiveException("Corrupted ARJ archive: invalid ARJ header size %,d", basicHeaderSize); + } + final byte[] basicHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, basicHeaderSize); + count(basicHeaderSize); + if (!checkCRC32(basicHeaderBytes)) { + throw new ArchiveException("Corrupted ARJ archive: invalid ARJ header CRC32 checksum"); + } + return basicHeaderBytes; } private LocalFileHeader readLocalFileHeader() throws IOException { @@ -318,8 +383,18 @@ private LocalFileHeader readLocalFileHeader() throws IOException { localFileHeader.fileAccessMode = EndianUtils.readSwappedShort(firstHeader); localFileHeader.firstChapter = readUnsignedByte(firstHeader); localFileHeader.lastChapter = readUnsignedByte(firstHeader); - - readExtraData(firstHeaderSize, firstHeader, localFileHeader); + // Total read (including size byte): 10 + 4 * 4 + 2 * 2 = 30 bytes + + if (firstHeaderSize >= MIN_FIRST_HEADER_SIZE + 4) { + localFileHeader.extendedFilePosition = EndianUtils.readSwappedInteger(firstHeader); + // Total read (including size byte): 30 + 4 = 34 bytes + if (firstHeaderSize >= MIN_FIRST_HEADER_SIZE + 4 + 12) { + localFileHeader.dateTimeAccessed = EndianUtils.readSwappedInteger(firstHeader); + localFileHeader.dateTimeCreated = EndianUtils.readSwappedInteger(firstHeader); + localFileHeader.originalSizeEvenForVolumes = EndianUtils.readSwappedInteger(firstHeader); + // Total read (including size byte): 34 + 12 = 46 bytes + } + } } localFileHeader.name = readEntryName(basicHeader); @@ -331,12 +406,8 @@ private LocalFileHeader readLocalFileHeader() throws IOException { while ((extendedHeaderSize = readSwappedUnsignedShort()) > 0) { final byte[] extendedHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, extendedHeaderSize); count(extendedHeaderSize); - final long extendedHeaderCrc32 = EndianUtils.readSwappedUnsignedInteger(in); - count(4); - final CRC32 crc32 = new CRC32(); - crc32.update(extendedHeaderBytes); - if (extendedHeaderCrc32 != crc32.getValue()) { - throw new ArchiveException("Extended header CRC32 verification failure"); + if (!checkCRC32(extendedHeaderBytes)) { + throw new ArchiveException("Corrupted ARJ archive: extended header CRC32 verification failure"); } extendedHeaders.add(extendedHeaderBytes); } @@ -345,8 +416,8 @@ private LocalFileHeader readLocalFileHeader() throws IOException { return localFileHeader; } - private MainHeader readMainHeader() throws IOException { - final byte[] basicHeaderBytes = readHeader(); + private MainHeader readMainHeader(final boolean selfExtracting) throws IOException { + final byte[] basicHeaderBytes = selfExtracting ? findMainHeader() : readHeader(); final MainHeader header = new MainHeader(); try (InputStream basicHeader = new ByteArrayInputStream(basicHeaderBytes)) { @@ -368,12 +439,14 @@ private MainHeader readMainHeader() throws IOException { header.securityEnvelopeLength = EndianUtils.readSwappedShort(firstHeader); header.encryptionVersion = readUnsignedByte(firstHeader); header.lastChapter = readUnsignedByte(firstHeader); + // Total read (including size byte): 10 + 4 * 4 + 2 * 2 = 30 bytes - if (firstHeaderSize >= 33) { + if (firstHeaderSize >= MIN_FIRST_HEADER_SIZE + 4) { header.arjProtectionFactor = readUnsignedByte(firstHeader); header.arjFlags2 = readUnsignedByte(firstHeader); readUnsignedByte(firstHeader); readUnsignedByte(firstHeader); + // Total read (including size byte): 30 + 4 = 34 bytes } } @@ -385,12 +458,8 @@ private MainHeader readMainHeader() throws IOException { if (extendedHeaderSize > 0) { header.extendedHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, extendedHeaderSize); count(extendedHeaderSize); - final long extendedHeaderCrc32 = EndianUtils.readSwappedUnsignedInteger(in); - count(4); - final CRC32 crc32 = new CRC32(); - crc32.update(header.extendedHeaderBytes); - if (extendedHeaderCrc32 != crc32.getValue()) { - throw new ArchiveException("Extended header CRC32 verification failure"); + if (!checkCRC32(header.extendedHeaderBytes)) { + throw new ArchiveException("Corrupted ARJ archive: extended header CRC32 verification failure"); } } @@ -407,6 +476,12 @@ private ByteArrayOutputStream readString(final InputStream dataIn) throws IOExce } } + private long readSwappedUnsignedInteger() throws IOException { + final long value = EndianUtils.readSwappedUnsignedInteger(in); + count(4); + return value; + } + private int readSwappedUnsignedShort() throws IOException { final int value = EndianUtils.readSwappedUnsignedShort(in); count(2); diff --git a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java index 8d32be9e15b..cb856e163f0 100644 --- a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java @@ -23,30 +23,71 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; +import java.io.ByteArrayInputStream; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; +import java.io.SequenceInputStream; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.util.Calendar; import java.util.TimeZone; +import java.util.stream.Stream; +import java.util.zip.CRC32; import org.apache.commons.compress.AbstractTest; +import org.apache.commons.compress.archivers.ArchiveException; +import org.apache.commons.io.EndianUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.io.output.ByteArrayOutputStream; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; +import org.junitpioneer.jupiter.cartesian.CartesianTest; /** * Tests {@link ArjArchiveInputStream}. */ class ArjArchiveInputStreamTest extends AbstractTest { + private static byte[] createArjArchiveHeader(int size, boolean computeCrc) { + // Enough space for the fixed-size portion of the header plus: + // - signature (2 bytes) + // - the 2-byte basic header size field itself (2 bytes) + // - at least one byte each for the filename and comment C-strings (2 bytes) + // - the 4-byte CRC-32 that follows the basic header + final byte[] bytes = new byte[4 + size + 10]; + bytes[0] = (byte) 0x60; // ARJ signature + bytes[1] = (byte) 0xEA; + // Basic header size (little-endian) + EndianUtils.writeSwappedShort(bytes, 2, (short) (size + 2)); + // First header size + bytes[4] = (byte) size; + // Compute valid CRC-32 for the basic header + if (computeCrc) { + final CRC32 crc32 = new CRC32(); + crc32.update(bytes, 4, size + 2); + EndianUtils.writeSwappedInteger(bytes, 4 + size + 2, (int) crc32.getValue()); + } + return bytes; + } + + static StreamtestSelfExtractingArchive() { + return Stream.of( + new byte[] { 0x10, 0x11, 0x12, 0x13, 0x14 }, + // In a normal context: an archive trailer. + new byte[] { 0x60, (byte) 0xEA, 0x00, 0x00 }, + // Header of valid size, but with an invalid CRC-32. + createArjArchiveHeader(30, false) + ); + } + private void assertArjArchiveEntry(final ArjArchiveEntry entry) { assertNotNull(entry.getName()); assertNotNull(entry.getLastModifiedDate()); @@ -83,12 +124,6 @@ private void assertForEach(final ArjArchiveInputStream archive) throws IOExcepti }); } - @Test - void testFirstHeaderSizeSetToZero() { - assertThrows(IOException.class, - () -> ArjArchiveInputStream.builder().setURI(getURI("org/apache/commons/compress/arj/zero_sized_headers-fail.arj")).get().close()); - } - @Test void testForEach() throws Exception { final StringBuilder expected = new StringBuilder(); @@ -264,6 +299,25 @@ void testReadingOfAttributesUnixVersion() throws Exception { } } + @ParameterizedTest + @MethodSource + void testSelfExtractingArchive(byte[] junk) throws Exception { + final Path validArj = getPath("bla.arj"); + try (InputStream first = new ByteArrayInputStream(junk); + InputStream second = Files.newInputStream(validArj); + SequenceInputStream seq = new SequenceInputStream(first, second); + ArjArchiveInputStream in = ArjArchiveInputStream.builder().setInputStream(seq).setSelfExtracting(true).get()) { + ArjArchiveEntry entry = in.getNextEntry(); + assertNotNull(entry); + assertEquals("test1.xml", entry.getName()); + entry = in.getNextEntry(); + assertNotNull(entry); + assertEquals("test2.xml", entry.getName()); + entry = in.getNextEntry(); + assertNull(entry); + } + } + @Test void testSingleArgumentConstructor() throws Exception { try (InputStream inputStream = Files.newInputStream(getPath("bla.arj")); @@ -283,6 +337,14 @@ void testSingleByteReadConsistentlyReturnsMinusOneAtEof() throws Exception { } } + @CartesianTest + void testSmallFirstHeaderSize( + // 30 is the minimum valid size + @CartesianTest.Values(ints = {0, 1, 10, 29}) int size, @CartesianTest.Values(booleans = {false, true}) boolean selfExtracting) { + final byte[] bytes = createArjArchiveHeader(size, true); + assertThrows(ArchiveException.class, () -> ArjArchiveInputStream.builder().setByteArray(bytes).setSelfExtracting(selfExtracting).get()); + } + /** * Verifies that reading an ARJ header record cut short at various boundaries * results in an {@link EOFException}. @@ -319,7 +381,7 @@ void testSingleByteReadConsistentlyReturnsMinusOneAtEof() throws Exception { // One byte before the first file’s data 0x95 }) - void testTruncatedLocalHeader(final long maxCount) throws Exception { + void testTruncatedLocalHeader(long maxCount) throws Exception { try (InputStream input = BoundedInputStream.builder().setURI(getURI("bla.arj")).setMaxCount(maxCount).get(); ArjArchiveInputStream archive = ArjArchiveInputStream.builder().setInputStream(input).get()) { assertThrows(EOFException.class, () -> { @@ -361,7 +423,7 @@ void testTruncatedLocalHeader(final long maxCount) throws Exception { 0x30, 0x33, // Inside the extended-header length (2 bytes) 0x34}) - void testTruncatedMainHeader(final long maxCount) throws Exception { + void testTruncatedMainHeader(long maxCount) throws Exception { try (InputStream input = BoundedInputStream.builder() .setURI(getURI("bla.arj")) .setMaxCount(maxCount) From dc57654edc30ad82525c8c4583b580c1de153c7d Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory" Date: Sat, 18 Oct 2025 07:27:07 -0400 Subject: [PATCH 30/40] Javadoc - Normalize error messages - Use final - Reduce vertical whitespace --- .../archivers/arj/ArjArchiveInputStream.java | 63 ++++++++----------- .../arj/ArjArchiveInputStreamTest.java | 10 +-- 2 files changed, 32 insertions(+), 41 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java index 499bf59a319..6e6554f7f00 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -56,6 +56,7 @@ public class ArjArchiveInputStream extends ArchiveInputStream { * ArjArchiveInputStream in = ArjArchiveInputStream.builder() * .setPath(inputPath) * .setCharset(StandardCharsets.UTF_8) + * .setSelfExtracting(false) * .get(); * } * @@ -75,20 +76,20 @@ public ArjArchiveInputStream get() throws IOException { } /** - * Enables compatibility with self-extracting (SFX) ARJ files. + * Enables compatibility with self-extracting (SFX) ARJ files, default to {@code false}. * * When {@code true}, the stream is scanned forward to locate the first * valid ARJ main header. All bytes before that point are ignored, which * allows reading ARJ data embedded in an executable stub.
* - *Caveat: this lenient pre-scan can mask corruption that + *
Caveat: This lenient pre-scan can mask corruption that * would otherwise be reported at the start of a normal {@code .arj} file. * Enable only when you expect an SFX input.
* - *Default: {@code false}.
+ *Default to {@code false}.
* - * @param selfExtracting {@code true} if the input stream is for a self-extracting archive - * @return {@code this} instance + * @param selfExtracting {@code true} if the input stream is for a self-extracting archive. + * @return {@code this} instance. * @since 1.29.0 */ public Builder setSelfExtracting(final boolean selfExtracting) { @@ -100,12 +101,14 @@ public Builder setSelfExtracting(final boolean selfExtracting) { private static final String ENCODING_NAME = "CP437"; private static final int ARJ_MAGIC_1 = 0x60; private static final int ARJ_MAGIC_2 = 0xEA; + /** * Maximum size of the basic header, in bytes. * *The value is taken from the reference implementation
*/ private static final int MAX_BASIC_HEADER_SIZE = 2600; + /** * Minimum size of the first header (the fixed-size part of the basic header), in bytes. */ @@ -124,18 +127,18 @@ public static Builder builder() { /** * Checks if the signature matches what is expected for an arj file. * - * @param signature the bytes to check - * @param length the number of bytes to check - * @return true, if this stream is an arj archive stream, false otherwise + * @param signature the bytes to check. + * @param length the number of bytes to check. + * @return true, if this stream is an arj archive stream, false otherwise. */ public static boolean matches(final byte[] signature, final int length) { return length >= 2 && (0xff & signature[0]) == ARJ_MAGIC_1 && (0xff & signature[1]) == ARJ_MAGIC_2; } - private static int readUnsignedByte(InputStream in) throws IOException { + private static int readUnsignedByte(final InputStream in) throws IOException { final int value = in.read(); if (value == -1) { - throw new EOFException("Truncated ARJ archive: expected more data"); + throw new EOFException("Truncated ARJ archive: Expected more data"); } return value & 0xff; } @@ -158,10 +161,10 @@ private ArjArchiveInputStream(final Builder builder) throws IOException { /** * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in, and using the CP437 character encoding. * - *Since 1.29.0: throws {@link IOException}.
+ *Since 1.29.0: Throws {@link IOException}.
* - * @param inputStream the underlying stream, whose ownership is taken - * @throws IOException if an exception occurs while reading + * @param inputStream the underlying stream, whose ownership is taken. + * @throws IOException if an exception occurs while reading. */ public ArjArchiveInputStream(final InputStream inputStream) throws IOException { this(builder().setInputStream(inputStream)); @@ -170,9 +173,9 @@ public ArjArchiveInputStream(final InputStream inputStream) throws IOException { /** * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in. * - *Since 1.29.0: throws {@link IOException}.
+ *Since 1.29.0: Throws {@link IOException}.
* - * @param inputStream the underlying stream, whose ownership is taken + * @param inputStream the underlying stream, whose ownership is taken. * @param charsetName the charset used for file names and comments in the archive. May be {@code null} to use the platform default. * @throws IOException if an exception occurs while reading * @deprecated Since 1.29.0, use {@link #builder()}. @@ -231,8 +234,8 @@ private byte[] findMainHeader() throws IOException { } // CRC32 failed, continue scanning } - } catch (EOFException e) { - throw new ArchiveException("Corrupted ARJ archive: unable to find valid main header"); + } catch (final EOFException e) { + throw new ArchiveException("Corrupted ARJ archive: Unable to find valid main header"); } } @@ -276,7 +279,7 @@ public ArjArchiveEntry getNextEntry() throws IOException { .setAfterRead(read -> { if (read < 0) { throw new EOFException(String.format( - "Truncated ARJ archive: entry '%s' expected %,d bytes, but only %,d were read.", + "Truncated ARJ archive: Entry '%s' expected %,d bytes, but only %,d were read.", currentLocalFileHeader.name, currentLocalFileHeader.compressedSize, getBytesRead() - currentPosition @@ -328,7 +331,7 @@ private String readEntryName(final InputStream dataIn) throws IOException { } /** - * Scans for the next valid ARJ header. + * Reads the next valid ARJ header. * * @return The header bytes, or {@code null} if end of archive. * @throws EOFException If the end of the stream is reached before a valid header is found. @@ -338,7 +341,7 @@ private byte[] readHeader() throws IOException { final int first = readUnsignedByte(); final int second = readUnsignedByte(); if (first != ARJ_MAGIC_1 || second != ARJ_MAGIC_2) { - throw new ArchiveException("Corrupted ARJ archive: invalid ARJ header signature 0x%02X 0x%02X", first, second); + throw new ArchiveException("Corrupted ARJ archive: Invalid ARJ header signature 0x%02X 0x%02X", first, second); } final int basicHeaderSize = readSwappedUnsignedShort(); if (basicHeaderSize == 0) { @@ -347,12 +350,12 @@ private byte[] readHeader() throws IOException { } // At least two bytes are required for the null-terminated name and comment if (basicHeaderSize < MIN_FIRST_HEADER_SIZE + 2 || basicHeaderSize > MAX_BASIC_HEADER_SIZE) { - throw new ArchiveException("Corrupted ARJ archive: invalid ARJ header size %,d", basicHeaderSize); + throw new ArchiveException("Corrupted ARJ archive: Invalid ARJ header size %,d", basicHeaderSize); } final byte[] basicHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, basicHeaderSize); count(basicHeaderSize); if (!checkCRC32(basicHeaderBytes)) { - throw new ArchiveException("Corrupted ARJ archive: invalid ARJ header CRC32 checksum"); + throw new ArchiveException("Corrupted ARJ archive: Invalid ARJ header CRC32 checksum"); } return basicHeaderBytes; } @@ -364,10 +367,8 @@ private LocalFileHeader readLocalFileHeader() throws IOException { } final LocalFileHeader localFileHeader = new LocalFileHeader(); try (InputStream basicHeader = new ByteArrayInputStream(basicHeaderBytes)) { - final int firstHeaderSize = readUnsignedByte(basicHeader); try (InputStream firstHeader = BoundedInputStream.builder().setInputStream(basicHeader).setMaxCount(firstHeaderSize - 1).get()) { - localFileHeader.archiverVersionNumber = readUnsignedByte(firstHeader); localFileHeader.minVersionToExtract = readUnsignedByte(firstHeader); localFileHeader.hostOS = readUnsignedByte(firstHeader); @@ -384,7 +385,6 @@ private LocalFileHeader readLocalFileHeader() throws IOException { localFileHeader.firstChapter = readUnsignedByte(firstHeader); localFileHeader.lastChapter = readUnsignedByte(firstHeader); // Total read (including size byte): 10 + 4 * 4 + 2 * 2 = 30 bytes - if (firstHeaderSize >= MIN_FIRST_HEADER_SIZE + 4) { localFileHeader.extendedFilePosition = EndianUtils.readSwappedInteger(firstHeader); // Total read (including size byte): 30 + 4 = 34 bytes @@ -396,23 +396,20 @@ private LocalFileHeader readLocalFileHeader() throws IOException { } } } - localFileHeader.name = readEntryName(basicHeader); localFileHeader.comment = readComment(basicHeader); } - final ArrayListextendedHeaders = new ArrayList<>(); int extendedHeaderSize; while ((extendedHeaderSize = readSwappedUnsignedShort()) > 0) { final byte[] extendedHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, extendedHeaderSize); count(extendedHeaderSize); if (!checkCRC32(extendedHeaderBytes)) { - throw new ArchiveException("Corrupted ARJ archive: extended header CRC32 verification failure"); + throw new ArchiveException("Corrupted ARJ archive: Extended header CRC32 verification failure"); } extendedHeaders.add(extendedHeaderBytes); } localFileHeader.extendedHeaders = extendedHeaders.toArray(new byte[0][]); - return localFileHeader; } @@ -420,10 +417,8 @@ private MainHeader readMainHeader(final boolean selfExtracting) throws IOExcepti final byte[] basicHeaderBytes = selfExtracting ? findMainHeader() : readHeader(); final MainHeader header = new MainHeader(); try (InputStream basicHeader = new ByteArrayInputStream(basicHeaderBytes)) { - final int firstHeaderSize = readUnsignedByte(basicHeader); try (InputStream firstHeader = BoundedInputStream.builder().setInputStream(basicHeader).setMaxCount(firstHeaderSize - 1).get()) { - header.archiverVersionNumber = readUnsignedByte(firstHeader); header.minVersionToExtract = readUnsignedByte(firstHeader); header.hostOS = readUnsignedByte(firstHeader); @@ -440,7 +435,6 @@ private MainHeader readMainHeader(final boolean selfExtracting) throws IOExcepti header.encryptionVersion = readUnsignedByte(firstHeader); header.lastChapter = readUnsignedByte(firstHeader); // Total read (including size byte): 10 + 4 * 4 + 2 * 2 = 30 bytes - if (firstHeaderSize >= MIN_FIRST_HEADER_SIZE + 4) { header.arjProtectionFactor = readUnsignedByte(firstHeader); header.arjFlags2 = readUnsignedByte(firstHeader); @@ -449,20 +443,17 @@ private MainHeader readMainHeader(final boolean selfExtracting) throws IOExcepti // Total read (including size byte): 30 + 4 = 34 bytes } } - header.name = readEntryName(basicHeader); header.comment = readComment(basicHeader); } - final int extendedHeaderSize = readSwappedUnsignedShort(); if (extendedHeaderSize > 0) { header.extendedHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, extendedHeaderSize); count(extendedHeaderSize); if (!checkCRC32(header.extendedHeaderBytes)) { - throw new ArchiveException("Corrupted ARJ archive: extended header CRC32 verification failure"); + throw new ArchiveException("Corrupted ARJ archive: Extended header CRC32 verification failure"); } } - return header; } diff --git a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java index cb856e163f0..7fdca647836 100644 --- a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java @@ -56,7 +56,7 @@ */ class ArjArchiveInputStreamTest extends AbstractTest { - private static byte[] createArjArchiveHeader(int size, boolean computeCrc) { + private static byte[] createArjArchiveHeader(final int size, final boolean computeCrc) { // Enough space for the fixed-size portion of the header plus: // - signature (2 bytes) // - the 2-byte basic header size field itself (2 bytes) @@ -301,7 +301,7 @@ void testReadingOfAttributesUnixVersion() throws Exception { @ParameterizedTest @MethodSource - void testSelfExtractingArchive(byte[] junk) throws Exception { + void testSelfExtractingArchive(final byte[] junk) throws Exception { final Path validArj = getPath("bla.arj"); try (InputStream first = new ByteArrayInputStream(junk); InputStream second = Files.newInputStream(validArj); @@ -340,7 +340,7 @@ void testSingleByteReadConsistentlyReturnsMinusOneAtEof() throws Exception { @CartesianTest void testSmallFirstHeaderSize( // 30 is the minimum valid size - @CartesianTest.Values(ints = {0, 1, 10, 29}) int size, @CartesianTest.Values(booleans = {false, true}) boolean selfExtracting) { + @CartesianTest.Values(ints = {0, 1, 10, 29}) final int size, @CartesianTest.Values(booleans = {false, true}) final boolean selfExtracting) { final byte[] bytes = createArjArchiveHeader(size, true); assertThrows(ArchiveException.class, () -> ArjArchiveInputStream.builder().setByteArray(bytes).setSelfExtracting(selfExtracting).get()); } @@ -381,7 +381,7 @@ void testSmallFirstHeaderSize( // One byte before the first file’s data 0x95 }) - void testTruncatedLocalHeader(long maxCount) throws Exception { + void testTruncatedLocalHeader(final long maxCount) throws Exception { try (InputStream input = BoundedInputStream.builder().setURI(getURI("bla.arj")).setMaxCount(maxCount).get(); ArjArchiveInputStream archive = ArjArchiveInputStream.builder().setInputStream(input).get()) { assertThrows(EOFException.class, () -> { @@ -423,7 +423,7 @@ void testTruncatedLocalHeader(long maxCount) throws Exception { 0x30, 0x33, // Inside the extended-header length (2 bytes) 0x34}) - void testTruncatedMainHeader(long maxCount) throws Exception { + void testTruncatedMainHeader(final long maxCount) throws Exception { try (InputStream input = BoundedInputStream.builder() .setURI(getURI("bla.arj")) .setMaxCount(maxCount) From f1830f923ffa143d4d6527a3975de782a1de3453 Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Sat, 18 Oct 2025 13:37:48 +0200 Subject: [PATCH 31/40] 7z: unsigned number parsing and improved header validation (#734) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 7z: unsigned number parsing and improved header validation The 7z file format specification defines only **unsigned numbers** (`UINT64`, `REAL_UINT64`, `UINT32`). However, the current implementation allows parsing methods like `readUint64`, `getLong`, and `getInt` to return negative values and then handles those inconsistently in downstream logic. This PR introduces a safer and more specification-compliant number parsing model. ### Key changes * **Strict unsigned number parsing** * Parsing methods now *never* return negative numbers. * `readUint64`, `readUint64ToIntExact`, `readRealUint64`, and `readUint32` follow the terminology from `7zFormat.txt`. * Eliminates scattered negative-value checks that previously compensated for parsing issues. * **Improved header integrity validation** * Before large allocations, the size is now validated against the **actual available data in the header** as well as the memory limit. * Prevents unnecessary or unsafe allocations when the archive is corrupted or truncated. * **Correct numeric type usage** * Some fields represent 7z numbers as 64-bit values but are constrained internally to Java `int` limits. * These are now declared as `int` to signal real constraints in our implementation. * **Consistent error handling** Parsing now throws only three well-defined exception types: | Condition | Exception | | ---------------------------------------------------------------------- | -------------------------------------------- | | Declared structure exceeds `maxMemoryLimitKiB` | `MemoryLimitException` | | Missing data inside header (truncated or corrupt) | `ArchiveException("Corrupted 7z archive")` | | Unsupported numeric values (too large for implementation) | `ArchiveException("Unsupported 7z archive")` | Note: `EOFException` is no longer used: a header with missing fields is not “EOF,” it is **corrupted**. This PR lays groundwork for safer parsing and easier future maintenance by aligning number handling with the actual 7z specification and making header parsing behavior *predictable and robust*. * fix: explain 3 bytes for Folder * fix: comment memory check again * fix: remove unused import --------- Co-authored-by: Gary Gregory --- src/changes/changes.xml | 3 +- .../compress/archivers/ArchiveException.java | 18 - .../compress/archivers/sevenz/SevenZFile.java | 594 ++++++++++-------- .../archivers/sevenz/StartHeader.java | 8 +- .../archivers/sevenz/SubStreamsInfo.java | 17 +- .../archivers/sevenz/SevenZFileTest.java | 104 +++ 6 files changed, 468 insertions(+), 276 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index b35baba53bf..6a3517de87f 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -57,7 +57,8 @@ The type attribute can be add,update,fix,remove. Don't loose precision while reading folders from a SevenZFile. Improve some exception messages in TarUtils and TarArchiveEntry. SevenZFile now enforces the same folder and coder limits as the CPP implementation. - +Refactor unsigned number parsing and header validation in SevenZFile. +BZip2CompressorInputStream now throw CompressorException (a subclass of IOException) for invalid or corrupted data, providing more specific error reporting. BZip2 input streams treat Huffman codes longer than 20 bits as corrupted data, matching the behavior of the reference implementation. diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveException.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveException.java index 505183077db..a2c4c0a31c4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveException.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveException.java @@ -33,24 +33,6 @@ public class ArchiveException extends CompressException { /** Serial. */ private static final long serialVersionUID = 2772690708123267100L; - /** - * Delegates to {@link Math#addExact(int, int)} wrapping its {@link ArithmeticException} in our {@link ArchiveException}. - * - * @param x the first value. - * @param y the second value. - * @return the result. - * @throws ArchiveException if the result overflows an {@code int}. - * @see Math#addExact(int, int) - * @since 1.29.0 - */ - public static int addExact(final int x, final int y) throws ArchiveException { - try { - return Math.addExact(x, y); - } catch (final ArithmeticException e) { - throw new ArchiveException(e); - } - } - /** * Delegates to {@link Math#addExact(int, int)} wrapping its {@link ArithmeticException} in our {@link ArchiveException}. * diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index 112e5fe5f7a..9f0de97efad 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -161,7 +161,7 @@ private long streamMapSize() { @Override public String toString() { return String.format("Archive with %,d entries in %,d folders, estimated size %,d KiB.", numberOfEntries, numberOfFolders, - kbToKiB(estimateSizeBytes())); + bytesToKiB(estimateSizeBytes())); } } @@ -226,7 +226,7 @@ public Builder setDefaultName(final String defaultName) { * @return {@code this} instance. */ public Builder setMaxMemoryLimitKb(final int maxMemoryLimitKb) { - this.maxMemoryLimitKiB = kbToKiB(maxMemoryLimitKb); + this.maxMemoryLimitKiB = maxMemoryLimitKb * 1000 / 1024; return this; } @@ -363,6 +363,51 @@ public Builder setUseDefaultNameForUnnamedEntries(final boolean useDefaultNameFo */ private static final int MAX_CODER_STREAMS_PER_FOLDER = 64; + /** Minimum number of bytes a 7z UINT64 can occupy. */ + private static final long MIN_UINT64_BYTES = 1L; + + /** Number of bytes a 7z UINT32 occupies. */ + private static final long UINT32_BYTES = 4L; + + /** Number of bytes a 7z REAL_UINT64 occupies. */ + private static final long REAL_UINT64_BYTES = 8L; + + /** + * Computes a partial count or sum of 7z objects, throwing ArchiveException if any limit is exceeded. + * + * @param sum current sum + * @param y second integer + * @param description description of the value being added, for error messages + * @return the new sum + * @throws ArchiveException if the sum overflows an int + */ + private static int accumulate(final int sum, final int y, final String description) throws ArchiveException { + try { + return Math.addExact(sum, y); + } catch (final ArithmeticException e) { + throw new ArchiveException("Unsupported 7-Zip archive: cannot handle more than %,d %s, but %,d present", Integer.MAX_VALUE, description, + Long.sum(sum, y)); + } + } + + /** + * Computes a partial count or sum of 7z objects, throwing ArchiveException if any limit is exceeded. + * + * @param sum current sum + * @param y second integer + * @param description description of the value being added, for error messages + * @return the new sum + * @throws ArchiveException if the sum overflows an int + */ + private static long accumulate(final long sum, final long y, final String description) throws ArchiveException { + try { + return Math.addExact(sum, y); + } catch (final ArithmeticException e) { + throw new ArchiveException("Unsupported 7-Zip archive: cannot handle more than %,d %s, but %,d present", Integer.MAX_VALUE, description, + Long.sum(sum, y)); + } + } + /** * Creates a new Builder. * @@ -377,46 +422,47 @@ static long bytesToKiB(final long bytes) { return bytes / 1024; } - private static ByteBuffer checkEndOfFile(final ByteBuffer buf, final int expectRemaining) throws EOFException { - final int remaining = buf.remaining(); - if (remaining < expectRemaining) { - throw new EOFException(String.format("remaining %,d < expectRemaining %,d", remaining, expectRemaining)); + /** + * Checks that there are at least {@code expectRemaining} bytes remaining in the header. + * + * @param header The buffer containing the 7z header. + * @param expectRemaining The number of bytes expected to be remaining. + * @return {@code header} for easy chaining. + * @throws ArchiveException if there are not enough bytes remaining, implying that the 7z header is incomplete or corrupted. + */ + private static ByteBuffer ensureRemaining(final ByteBuffer header, final long expectRemaining) throws ArchiveException { + if (expectRemaining > header.remaining()) { + throw new ArchiveException("Corrupted 7z archive: expecting %,d bytes, remaining header size %,d", expectRemaining, header.remaining()); } - return buf; + return header; } - private static void get(final ByteBuffer buf, final byte[] to) throws EOFException { - checkEndOfFile(buf, to.length).get(to); - } - - private static int getInt(final ByteBuffer buf) throws EOFException { - return checkEndOfFile(buf, Integer.BYTES).getInt(); - } - - private static long getLong(final ByteBuffer buf) throws EOFException { - return checkEndOfFile(buf, Long.BYTES).getLong(); + /** + * Wrapper of {@link ByteBuffer#get(byte[])} that checks remaining bytes first. + */ + private static void get(final ByteBuffer buf, final byte[] to) throws ArchiveException { + ensureRemaining(buf, to.length).get(to); } /** - * Gets the next unsigned byte as an int. - * - * @param buf the byte source. - * @return the next unsigned byte as an int. - * @throws EOFException Thrown if the given buffer doesn't have a remaining byte. + * Wrapper of {@link ByteBuffer#getInt()} that checks remaining bytes first. */ - private static int getUnsignedByte(final ByteBuffer buf) throws EOFException { - if (!buf.hasRemaining()) { - throw new EOFException(); - } - return buf.get() & 0xff; + private static int getInt(final ByteBuffer buf) throws ArchiveException { + return ensureRemaining(buf, Integer.BYTES).getInt(); } - private static int kbToKiB(final int kilobytes) { - return kilobytes * 1000 / 1024; + /** + * Wrapper of {@link ByteBuffer#getLong()} that checks remaining bytes first. + */ + private static long getLong(final ByteBuffer buf) throws ArchiveException { + return ensureRemaining(buf, Long.BYTES).getLong(); } - static long kbToKiB(final long kilobytes) { - return kilobytes * 1000 / 1024; + /** + * Checks remaining bytes and reads one unsigned byte. + */ + private static int getUnsignedByte(final ByteBuffer header) throws ArchiveException { + return Byte.toUnsignedInt(ensureRemaining(header, Byte.BYTES).get()); } /** @@ -431,50 +477,131 @@ public static boolean matches(final byte[] buffer, final int ignored) { return ArrayUtils.startsWith(buffer, SIGNATURE); } - private static long readUint64(final ByteBuffer in) throws IOException { + /** + * Reads the size of a header field and validates that it is not larger than the remaining bytes in the header buffer. + * + * @param header the buffer containing the 7z header. + * @return a non-negative int. + * @throws ArchiveException if the value is truncated, too large, or exceeds the remaining bytes in the header buffer. + */ + static int readFieldSize(final ByteBuffer header) throws ArchiveException { + final long propertySize = readUint64(header); + ensureRemaining(header, propertySize); + // propertySize is not larger than header.remaining() which is an int + return (int) propertySize; + } + + /** + * Reads a 7z REAL_UINT64 from the stream. + * + * @param inputStream the input stream containing the 7z header. + * @return a non-negative long. + * @throws ArchiveException if the value is truncated or too large. + */ + static long readRealUint64(final DataInputStream inputStream) throws IOException { + final long value = Long.reverseBytes(inputStream.readLong()); + if (value < 0) { + throw new ArchiveException("Unsupported 7-Zip archive: cannot handle integer larger then %d, but was %s", Integer.MAX_VALUE, + Long.toUnsignedString(value)); + } + return value; + } + + /** + * Reads a 7z UINT32 from the header. + * + * @param header the buffer containing the 7z header. + * @return a non-negative long. + * @throws ArchiveException if the value is truncated. + */ + static long readUint32(final ByteBuffer header) throws ArchiveException { + return Integer.toUnsignedLong(getInt(header)); + } + + + /** + * Reads a 7z UINT32 from the stream. + * + * @param inputStream the input stream containing the 7z header. + * @return a non-negative long. + * @throws ArchiveException if the value is truncated. + */ + static long readUint32(final DataInputStream inputStream) throws IOException { + return Integer.toUnsignedLong(Integer.reverseBytes(inputStream.readInt())); + } + + /** + * Reads a 7z UINT64 from the header. + * + * @param header the buffer containing the 7z header. + * @return a non-negative long. + * @throws ArchiveException if the value is truncated or too large. + */ + static long readUint64(final ByteBuffer header) throws ArchiveException { // long rather than int as it might get shifted beyond the range of an int - final long firstByte = getUnsignedByte(in); + final long firstByte = getUnsignedByte(header); int mask = 0x80; long value = 0; for (int i = 0; i < 8; i++) { if ((firstByte & mask) == 0) { - return value | (firstByte & mask - 1) << 8 * i; + value |= (firstByte & mask - 1) << 8 * i; + break; } - final long nextByte = getUnsignedByte(in); + final long nextByte = getUnsignedByte(header); value |= nextByte << 8 * i; mask >>>= 1; } + if (value < 0) { + throw new ArchiveException("Unsupported 7-Zip archive: can not handle integer values larger than %,d", Long.MAX_VALUE); + } return value; } - private static int readUint64ToIntExact(final ByteBuffer in) throws IOException { - return ArchiveException.toIntExact(readUint64(in)); + /** + * Reads a 7z UINT64 from the header. + * + *If the value is used as the length of a header field, use {@link #readFieldSize} instead, which also validates it against the number of remaining + * bytes in the header.
+ * + * @param header the buffer containing the 7z header. + * @return a non-negative int. + * @throws ArchiveException if the value is truncated or too large. + * @see #readFieldSize(ByteBuffer) + */ + private static int readUint64ToIntExact(final ByteBuffer header, final String description) throws ArchiveException { + final long value = readUint64(header); + // Values larger than Integer.MAX_VALUE are not formally forbidden, but we cannot handle them. + if (value > Integer.MAX_VALUE) { + throw new ArchiveException("Unsupported 7-Zip archive: cannot handle %s larger then %,d, but was %,d", description, Integer.MAX_VALUE, value); + } + return (int) value; } - private static long skipBytesFully(final ByteBuffer input, long bytesToSkip) { - if (bytesToSkip < 1) { - return 0; - } - final int current = input.position(); - final int maxSkip = input.remaining(); - if (maxSkip < bytesToSkip) { - bytesToSkip = maxSkip; - } - input.position(current + (int) bytesToSkip); - return bytesToSkip; + /** + * Skips the given number of bytes of an unsupported property. + * + * @param header the 7z header buffer. + * @param propertySize the number of bytes to skip. + * @throws ArchiveException if the property size exceeds the remaining bytes in the header buffer. + */ + private static void skipBytesFully(final ByteBuffer header, final long propertySize) throws ArchiveException { + // propertySize is not larger than header.remaining(), which is an int + ensureRemaining(header, propertySize).position(header.position() + (int) propertySize); } /** - * Throws IOException if the given value is not in {@code [0, Integer.MAX_VALUE]}. + * Throws ArchiveException if the given value is not in {@code [0, Integer.MAX_VALUE]}. * - * @param description A description for the IOException. - * @param value The value to check. + * @param description A description for the exception. + * @param value The value to check, interpreted as unsigned. * @return The given value as an int. - * @throws IOException Thrown if the given value is not in {@code [0, Integer.MAX_VALUE]}. + * @throws ArchiveException Thrown if the given value is not in {@code [0, Integer.MAX_VALUE]}. */ - private static int toNonNegativeInt(final String description, final long value) throws IOException { - if (value > Integer.MAX_VALUE || value < 0) { - throw new ArchiveException("Cannot handle %s %,d", description, value); + private static int toNonNegativeInt(final String description, final long value) throws ArchiveException { + assert value >= 0 : "value is supposed to be non-negative"; + if (value > Integer.MAX_VALUE) { + throw new ArchiveException("Unsupported 7-Zip archive: cannot handle %s larger then %d, but was %s", description, Integer.MAX_VALUE, + Long.toUnsignedString(value)); } return (int) value; } @@ -769,7 +896,7 @@ private InputStream buildDecoderStack(final Folder folder, final long folderOffs InputStream inputStreamStack = new FilterInputStream( new BufferedInputStream(new BoundedSeekableByteChannelInputStream(channel, archive.packSizes[firstPackStreamIndex]))) { private void count(final int c) throws ArchiveException { - compressedBytesReadFromCurrentEntry = ArchiveException.addExact(compressedBytesReadFromCurrentEntry, c); + compressedBytesReadFromCurrentEntry = accumulate(compressedBytesReadFromCurrentEntry, c, "compressed bytes read from current entry"); } @Override @@ -901,21 +1028,21 @@ private void buildDecodingStream(final int entryIndex, final boolean isRandomAcc private void calculateStreamMap(final Archive archive) throws IOException { int nextFolderPackStreamIndex = 0; - final int numFolders = ArrayUtils.getLength(archive.folders); - final int[] folderFirstPackStreamIndex = new int[checkIntArray(numFolders)]; + final int numFolders = archive.folders.length; + final int[] folderFirstPackStreamIndex = intArray(numFolders); for (int i = 0; i < numFolders; i++) { folderFirstPackStreamIndex[i] = nextFolderPackStreamIndex; - nextFolderPackStreamIndex = ArchiveException.addExact(nextFolderPackStreamIndex, archive.folders[i].packedStreams.length); + nextFolderPackStreamIndex = accumulate(nextFolderPackStreamIndex, archive.folders[i].packedStreams.length, "nextFolderPackStreamIndex"); } long nextPackStreamOffset = 0; final int numPackSizes = archive.packSizes.length; - final long[] packStreamOffsets = new long[checkLongArray(numPackSizes)]; + final long[] packStreamOffsets = longArray(numPackSizes); for (int i = 0; i < numPackSizes; i++) { packStreamOffsets[i] = nextPackStreamOffset; - nextPackStreamOffset = ArchiveException.addExact(nextPackStreamOffset, archive.packSizes[i]); + nextPackStreamOffset = accumulate(nextPackStreamOffset, archive.packSizes[i], "nextPackStreamOffset"); } - final int[] folderFirstFileIndex = new int[checkIntArray(numFolders)]; - final int[] fileFolderIndex = new int[checkIntArray(archive.files.length)]; + final int[] folderFirstFileIndex = intArray(numFolders); + final int[] fileFolderIndex = intArray(archive.files.length); int nextFolderIndex = 0; int nextFolderUnpackStreamIndex = 0; for (int i = 0; i < archive.files.length; i++) { @@ -947,26 +1074,6 @@ private void calculateStreamMap(final Archive archive) throws IOException { archive.streamMap = new StreamMap(folderFirstPackStreamIndex, packStreamOffsets, folderFirstFileIndex, fileFolderIndex); } - int checkByteArray(final int size) throws MemoryLimitException { - MemoryLimitException.checkKiB(bytesToKiB(size * Byte.BYTES), maxMemoryLimitKiB); - return size; - } - - int checkIntArray(final int size) throws MemoryLimitException { - MemoryLimitException.checkKiB(bytesToKiB(size * Integer.BYTES), maxMemoryLimitKiB); - return size; - } - - int checkLongArray(final int size) throws MemoryLimitException { - MemoryLimitException.checkKiB(bytesToKiB(size * Long.BYTES), maxMemoryLimitKiB); - return size; - } - - int checkObjectArray(final int size) throws MemoryLimitException { - MemoryLimitException.checkKiB(bytesToKiB(size * 4), maxMemoryLimitKiB); // assume compressed pointer - return size; - } - /** * Closes the archive. * @@ -1144,14 +1251,13 @@ private boolean hasCurrentEntryBeenRead() { } private Archive initializeArchive(final StartHeader startHeader, final byte[] password, final boolean verifyCrc) throws IOException { - final int nextHeaderSizeInt = toNonNegativeInt("startHeader.nextHeaderSize", startHeader.nextHeaderSize); - MemoryLimitException.checkKiB(bytesToKiB(nextHeaderSizeInt), Math.min(bytesToKiB(org.apache.commons.io.IOUtils.SOFT_MAX_ARRAY_LENGTH), + MemoryLimitException.checkKiB(bytesToKiB(startHeader.nextHeaderSize), Math.min(bytesToKiB(org.apache.commons.io.IOUtils.SOFT_MAX_ARRAY_LENGTH), maxMemoryLimitKiB)); channel.position(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset); if (verifyCrc) { final long position = channel.position(); final CheckedInputStream cis = new CheckedInputStream(Channels.newInputStream(channel), new CRC32()); - if (cis.skip(nextHeaderSizeInt) != nextHeaderSizeInt) { + if (cis.skip(startHeader.nextHeaderSize) != startHeader.nextHeaderSize) { throw new ArchiveException("Problem computing NextHeader CRC-32"); } if (startHeader.nextHeaderCrc != cis.getChecksum().getValue()) { @@ -1160,7 +1266,7 @@ private Archive initializeArchive(final StartHeader startHeader, final byte[] pa channel.position(position); } Archive archive = new Archive(); - ByteBuffer buf = ByteBuffer.allocate(nextHeaderSizeInt).order(ByteOrder.LITTLE_ENDIAN); + ByteBuffer buf = ByteBuffer.allocate(startHeader.nextHeaderSize).order(ByteOrder.LITTLE_ENDIAN); readFully(buf); int nid = getUnsignedByte(buf); if (nid == NID.kEncodedHeader) { @@ -1177,6 +1283,30 @@ private Archive initializeArchive(final StartHeader startHeader, final byte[] pa return archive; } + /** + * Creates an int array while checking memory limits. + * + * @param size the size of the array + * @return the int array + * @throws MemoryLimitException if memory limit is exceeded + */ + private int[] intArray(final int size) throws MemoryLimitException { + MemoryLimitException.checkKiB(bytesToKiB((long) size * Integer.BYTES), maxMemoryLimitKiB); + return new int[size]; + } + + /** + * Creates a long array while checking memory limits. + * + * @param size the size of the array + * @return the long array + * @throws MemoryLimitException if memory limit is exceeded + */ + private long[] longArray(final int size) throws MemoryLimitException { + MemoryLimitException.checkKiB(bytesToKiB((long) size * Long.BYTES), maxMemoryLimitKiB); + return new long[size]; + } + /** * Reads a byte of data. * @@ -1219,7 +1349,7 @@ public int read(final byte[] b, final int off, final int len) throws IOException @SuppressWarnings("resource") // does not allocate final int current = getCurrentStream().read(b, off, len); if (current > 0) { - uncompressedBytesReadFromCurrentEntry = ArchiveException.addExact(uncompressedBytesReadFromCurrentEntry, current); + uncompressedBytesReadFromCurrentEntry = accumulate(uncompressedBytesReadFromCurrentEntry, current, "uncompressedBytesReadFromCurrentEntry"); } return current; } @@ -1238,18 +1368,19 @@ private BitSet readAllOrBits(final ByteBuffer header, final int size) throws IOE return bits; } - private void readArchiveProperties(final ByteBuffer input) throws IOException { + private void readArchiveProperties(final ByteBuffer header) throws IOException { // FIXME: the reference implementation just throws them away? - long nid = readUint64(input); + long nid = readUint64(header); while (nid != NID.kEnd) { - final int propertySize = readUint64ToIntExact(input); - final byte[] property = new byte[checkByteArray(propertySize)]; - get(input, property); - nid = readUint64(input); + // We validate the size but ignore the value + final int propertySize = readFieldSize(header); + skipBytesFully(header, propertySize); + nid = readUint64(header); } } private BitSet readBits(final ByteBuffer header, final int size) throws IOException { + ensureRemaining(header, (size + 7) / 8); final BitSet bits = new BitSet(size); int mask = 0; int cache = 0; @@ -1310,7 +1441,7 @@ private ByteBuffer readEncodedHeader(final ByteBuffer header, final Archive arch } private void readFilesInfo(final ByteBuffer header, final Archive archive) throws IOException { - final int numFilesInt = readUint64ToIntExact(header); + final int numFilesInt = readUint64ToIntExact(header, "numFiles"); final MapfileMap = new LinkedHashMap<>(); BitSet isEmptyStream = null; BitSet isEmptyFile = null; @@ -1318,14 +1449,11 @@ private void readFilesInfo(final ByteBuffer header, final Archive archive) throw final int originalLimit = header.limit(); while (true) { final int propertyType = getUnsignedByte(header); - if (propertyType == 0) { + if (propertyType == NID.kEnd) { break; } - final int size = readUint64ToIntExact(header); - if (size < 0 || size > header.remaining()) { - throw new ArchiveException("Corrupted 7z archive: property size %,d, but only %,d bytes available", size, header.remaining()); - } - // Limit the buffer to the size of the property + final int size = readFieldSize(header); + // Limit the buffer to the size of the property, so we don't read beyond it header.limit(header.position() + size); switch (propertyType) { case NID.kEmptyStream: { @@ -1426,6 +1554,10 @@ private void readFilesInfo(final ByteBuffer header, final Archive archive) throw break; } } + // We should have consumed all the bytes by now + if (header.remaining() > 0) { + throw new ArchiveException("Unsupported 7z archive: property 0x%02d has %d trailing bytes.", propertyType, header.remaining()); + } // Restore original limit header.limit(originalLimit); } @@ -1446,9 +1578,6 @@ private void readFilesInfo(final ByteBuffer header, final Archive archive) throw entryAtIndex.setHasCrc(archive.subStreamsInfo.hasCrc.get(nonEmptyFileCounter)); entryAtIndex.setCrcValue(archive.subStreamsInfo.crcs[nonEmptyFileCounter]); entryAtIndex.setSize(archive.subStreamsInfo.unpackSizes[nonEmptyFileCounter]); - if (entryAtIndex.getSize() < 0) { - throw new ArchiveException("Broken archive, entry with negative size"); - } ++nonEmptyFileCounter; } else { entryAtIndex.setDirectory(isEmptyFile == null || !isEmptyFile.get(emptyFileCounter)); @@ -1477,8 +1606,7 @@ Folder readFolder(final ByteBuffer header) throws IOException { final boolean isSimple = (bits & 0x10) == 0; final boolean hasAttributes = (bits & 0x20) != 0; final boolean moreAlternativeMethods = (bits & 0x80) != 0; - final byte[] decompressionMethodId = new byte[idSize]; - get(header, decompressionMethodId); + final byte[] decompressionMethodId = toByteArray(header, idSize); final long numInStreams; final long numOutStreams; if (isSimple) { @@ -1501,9 +1629,8 @@ Folder readFolder(final ByteBuffer header) throws IOException { totalOutStreams += (int) numOutStreams; byte[] properties = null; if (hasAttributes) { - final long propertiesSize = readUint64(header); - properties = new byte[checkByteArray(ArchiveException.toIntExact(propertiesSize))]; - get(header, properties); + final int propertiesSize = readFieldSize(header); + properties = toByteArray(header, propertiesSize); } // would need to keep looping as above: if (moreAlternativeMethods) { @@ -1596,7 +1723,7 @@ private Archive readHeaders(final byte[] password) throws IOException { throw new ArchiveException("Unsupported 7z version (%d,%d)", archiveVersionMajor, archiveVersionMinor); } boolean headerLooksValid = false; // See https://www.7-zip.org/recover.html - "There is no correct End Header at the end of archive" - final long startHeaderCrc = 0xffffFFFFL & buf.getInt(); + final long startHeaderCrc = readUint32(buf); if (startHeaderCrc == 0) { // This is an indication of a corrupt header - peek the next 20 bytes final long currentPosition = channel.position(); @@ -1626,10 +1753,11 @@ private Archive readHeaders(final byte[] password) throws IOException { private void readPackInfo(final ByteBuffer header, final Archive archive) throws IOException { archive.packPos = readUint64(header); - final int numPackStreamsInt = readUint64ToIntExact(header); + final int numPackStreamsInt = readUint64ToIntExact(header, "numPackStreams"); int nid = getUnsignedByte(header); if (nid == NID.kSize) { - archive.packSizes = new long[checkLongArray(numPackStreamsInt)]; + ensureRemaining(header, MIN_UINT64_BYTES * numPackStreamsInt); + archive.packSizes = longArray(numPackStreamsInt); for (int i = 0; i < archive.packSizes.length; i++) { archive.packSizes[i] = readUint64(header); } @@ -1637,10 +1765,11 @@ private void readPackInfo(final ByteBuffer header, final Archive archive) throws } if (nid == NID.kCRC) { archive.packCrcsDefined = readAllOrBits(header, numPackStreamsInt); - archive.packCrcs = new long[checkLongArray(numPackStreamsInt)]; + ensureRemaining(header, UINT32_BYTES * archive.packCrcsDefined.cardinality()); + archive.packCrcs = longArray(numPackStreamsInt); for (int i = 0; i < numPackStreamsInt; i++) { if (archive.packCrcsDefined.get(i)) { - archive.packCrcs[i] = 0xffffFFFFL & getInt(header); + archive.packCrcs[i] = readUint32(header); } } // read one more @@ -1658,16 +1787,15 @@ private StartHeader readStartHeader(final long startHeaderCrc) throws IOExceptio .setExpectedChecksumValue(startHeaderCrc) .get())) { // @formatter:on - final long nextHeaderOffset = Long.reverseBytes(dataInputStream.readLong()); - if (nextHeaderOffset < 0 || nextHeaderOffset + SIGNATURE_HEADER_SIZE > channel.size()) { + final long nextHeaderOffset = readRealUint64(dataInputStream); + if (nextHeaderOffset > channel.size() - SIGNATURE_HEADER_SIZE) { throw new ArchiveException("nextHeaderOffset is out of bounds"); } - final long nextHeaderSize = Long.reverseBytes(dataInputStream.readLong()); - final long nextHeaderEnd = nextHeaderOffset + nextHeaderSize; - if (nextHeaderEnd < nextHeaderOffset || nextHeaderEnd + SIGNATURE_HEADER_SIZE > channel.size()) { + final int nextHeaderSize = toNonNegativeInt("nextHeaderSize", readRealUint64(dataInputStream)); + if (nextHeaderSize > channel.size() - SIGNATURE_HEADER_SIZE - nextHeaderOffset) { throw new ArchiveException("nextHeaderSize is out of bounds"); } - final long nextHeaderCrc = 0xffffFFFFL & Integer.reverseBytes(dataInputStream.readInt()); + final long nextHeaderCrc = readUint32(dataInputStream); return new StartHeader(nextHeaderOffset, nextHeaderSize, nextHeaderCrc); } } @@ -1695,14 +1823,13 @@ private void readSubStreamsInfo(final ByteBuffer header, final Archive archive) for (final Folder folder : archive.folders) { folder.numUnpackSubStreams = 1; } - long unpackStreamsCount = archive.folders.length; + int unpackStreamsCount = archive.folders.length; int nid = getUnsignedByte(header); if (nid == NID.kNumUnpackStream) { unpackStreamsCount = 0; for (final Folder folder : archive.folders) { - final long numStreams = readUint64(header); - folder.numUnpackSubStreams = (int) numStreams; - unpackStreamsCount = ArchiveException.addExact(unpackStreamsCount, numStreams); + folder.numUnpackSubStreams = readUint64ToIntExact(header, "numUnpackSubStreams"); + unpackStreamsCount = accumulate(unpackStreamsCount, folder.numUnpackSubStreams, "numUnpackStreams"); } nid = getUnsignedByte(header); } @@ -1712,18 +1839,19 @@ private void readSubStreamsInfo(final ByteBuffer header, final Archive archive) if (folder.numUnpackSubStreams == 0) { continue; } - long sum = 0; + long totalUnpackSize = 0; if (nid == NID.kSize) { + ensureRemaining(header, MIN_UINT64_BYTES * (folder.numUnpackSubStreams - 1)); for (int i = 0; i < folder.numUnpackSubStreams - 1; i++) { final long size = readUint64(header); subStreamsInfo.unpackSizes[nextUnpackStream++] = size; - sum = ArchiveException.addExact(sum, size); + totalUnpackSize = accumulate(totalUnpackSize, size, "unpackSize"); } } - if (sum > folder.getUnpackSize()) { + if (totalUnpackSize > folder.getUnpackSize()) { throw new ArchiveException("Sum of unpack sizes of folder exceeds total unpack size"); } - subStreamsInfo.unpackSizes[nextUnpackStream++] = folder.getUnpackSize() - sum; + subStreamsInfo.unpackSizes[nextUnpackStream++] = folder.getUnpackSize() - totalUnpackSize; } if (nid == NID.kSize) { nid = getUnsignedByte(header); @@ -1731,15 +1859,16 @@ private void readSubStreamsInfo(final ByteBuffer header, final Archive archive) int numDigests = 0; for (final Folder folder : archive.folders) { if (folder.numUnpackSubStreams != 1 || !folder.hasCrc) { - numDigests = ArchiveException.addExact(numDigests, folder.numUnpackSubStreams); + numDigests = accumulate(numDigests, folder.numUnpackSubStreams, "numDigests"); } } if (nid == NID.kCRC) { final BitSet hasMissingCrc = readAllOrBits(header, numDigests); - final long[] missingCrcs = new long[checkLongArray(numDigests)]; + ensureRemaining(header, UINT32_BYTES * hasMissingCrc.cardinality()); + final long[] missingCrcs = longArray(numDigests); for (int i = 0; i < numDigests; i++) { if (hasMissingCrc.get(i)) { - missingCrcs[i] = 0xffffFFFFL & getInt(header); + missingCrcs[i] = readUint32(header); } } int nextCrc = 0; @@ -1765,16 +1894,23 @@ private void readSubStreamsInfo(final ByteBuffer header, final Archive archive) private void readUnpackInfo(final ByteBuffer header, final Archive archive) throws IOException { int nid = getUnsignedByte(header); - final int numFoldersInt = readUint64ToIntExact(header); - final Folder[] folders = new Folder[checkObjectArray(numFoldersInt)]; - archive.folders = folders; + final int numFoldersInt = readUint64ToIntExact(header, "numFolders"); /* final int external = */ getUnsignedByte(header); + // Verify available header bytes and memory limit before allocating array + // A folder requires at least 3 bytes: the number of coders (1 byte), the flag byte for the coder (1 byte), + // and at least 1 byte for the method id (1 byte) + ensureRemaining(header, 3L * numFoldersInt); + // Assumes compressed pointer + MemoryLimitException.checkKiB(bytesToKiB(numFoldersInt * 4L), maxMemoryLimitKiB); + final Folder[] folders = new Folder[numFoldersInt]; + archive.folders = folders; for (int i = 0; i < numFoldersInt; i++) { folders[i] = readFolder(header); } nid = getUnsignedByte(header); for (final Folder folder : folders) { - folder.unpackSizes = new long[checkLongArray(toNonNegativeInt("totalOutputStreams", folder.totalOutputStreams))]; + ensureRemaining(header, folder.totalOutputStreams); + folder.unpackSizes = longArray(folder.totalOutputStreams); for (int i = 0; i < folder.totalOutputStreams; i++) { folder.unpackSizes[i] = readUint64(header); } @@ -1782,10 +1918,11 @@ private void readUnpackInfo(final ByteBuffer header, final Archive archive) thro nid = getUnsignedByte(header); if (nid == NID.kCRC) { final BitSet crcsDefined = readAllOrBits(header, numFoldersInt); + ensureRemaining(header, UINT32_BYTES * crcsDefined.cardinality()); for (int i = 0; i < numFoldersInt; i++) { if (crcsDefined.get(i)) { folders[i].hasCrc = true; - folders[i].crc = 0xffffFFFFL & getInt(header); + folders[i].crc = readUint32(header); } else { folders[i].hasCrc = false; } @@ -1841,27 +1978,23 @@ private ArchiveStatistics sanityCheckAndCollectStatistics(final ByteBuffer heade private void sanityCheckArchiveProperties(final ByteBuffer header) throws IOException { long nid = readUint64(header); while (nid != NID.kEnd) { - final int propertySize = toNonNegativeInt("propertySize", readUint64(header)); - if (skipBytesFully(header, propertySize) < propertySize) { - throw new ArchiveException("Invalid property size"); - } + // We validate the size but ignore the value + final int propertySize = readFieldSize(header); + skipBytesFully(header, propertySize); nid = readUint64(header); } } private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatistics stats) throws IOException { - stats.numberOfEntries = toNonNegativeInt("numFiles", readUint64(header)); + stats.numberOfEntries = readUint64ToIntExact(header, "numFiles"); int emptyStreams = -1; final int originalLimit = header.limit(); while (true) { final int propertyType = getUnsignedByte(header); - if (propertyType == 0) { + if (propertyType == NID.kEnd) { break; } - final int size = readUint64ToIntExact(header); - if (size < 0 || size > header.remaining()) { - throw new ArchiveException("Corrupted 7z archive: property size %,d, but only %,d bytes available", size, header.remaining()); - } + final int size = readFieldSize(header); // Limit the buffer to the size of the property header.limit(header.position() + size); switch (propertyType) { @@ -1873,14 +2006,14 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti if (emptyStreams == -1) { throw new ArchiveException("Header format error: kEmptyStream must appear before kEmptyFile"); } - readBits(header, emptyStreams); + skipBytesFully(header, size); break; } case NID.kAnti: { if (emptyStreams == -1) { throw new ArchiveException("Header format error: kEmptyStream must appear before kAnti"); } - readBits(header, emptyStreams); + skipBytesFully(header, size); break; } case NID.kName: { @@ -1904,48 +2037,16 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti } break; } - case NID.kCTime: { - final int timesDefined = readAllOrBits(header, stats.numberOfEntries).cardinality(); - final int external = getUnsignedByte(header); - if (external != 0) { - throw new ArchiveException("Not implemented"); - } - if (skipBytesFully(header, 8 * timesDefined) < 8 * timesDefined) { - throw new ArchiveException("Invalid creation dates size"); - } - break; - } - case NID.kATime: { - final int timesDefined = readAllOrBits(header, stats.numberOfEntries).cardinality(); - final int external = getUnsignedByte(header); - if (external != 0) { - throw new ArchiveException("Not implemented"); - } - if (skipBytesFully(header, 8 * timesDefined) < 8 * timesDefined) { - throw new ArchiveException("Invalid access dates size"); - } - break; - } - case NID.kMTime: { - final int timesDefined = readAllOrBits(header, stats.numberOfEntries).cardinality(); - final int external = getUnsignedByte(header); - if (external != 0) { - throw new ArchiveException("Not implemented"); - } - if (skipBytesFully(header, 8 * timesDefined) < 8 * timesDefined) { - throw new ArchiveException("Invalid modification dates size"); - } - break; - } + case NID.kCTime: + case NID.kATime: + case NID.kMTime: case NID.kWinAttributes: { - final int attributesDefined = readAllOrBits(header, stats.numberOfEntries).cardinality(); + final int definedCount = readAllOrBits(header, stats.numberOfEntries).cardinality(); final int external = getUnsignedByte(header); if (external != 0) { throw new ArchiveException("Not implemented"); } - if (skipBytesFully(header, 4 * attributesDefined) < 4 * attributesDefined) { - throw new ArchiveException("Invalid windows attributes size"); - } + skipBytesFully(header, (propertyType == NID.kWinAttributes ? UINT32_BYTES : REAL_UINT64_BYTES) * definedCount); break; } case NID.kStartPos: { @@ -1954,19 +2055,19 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti case NID.kDummy: { // 7z 9.20 asserts the content is all zeros and ignores the property // Compress up to 1.8.1 would throw an exception, now we ignore it (see COMPRESS-287 - if (skipBytesFully(header, size) < size) { - throw new ArchiveException("Incomplete kDummy property"); - } + skipBytesFully(header, size); break; } default: { // Compress up to 1.8.1 would throw an exception, now we ignore it (see COMPRESS-287 - if (skipBytesFully(header, size) < size) { - throw new ArchiveException("Incomplete property of type " + propertyType); - } + skipBytesFully(header, size); break; } } + // We should have consumed all the bytes by now + if (header.remaining() > 0) { + throw new ArchiveException("Unsupported 7z archive: property 0x%02d has %d trailing bytes.", propertyType, header.remaining()); + } // Restore original limit header.limit(originalLimit); } @@ -1974,17 +2075,16 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti } private long sanityCheckFolder(final ByteBuffer header, final ArchiveStatistics stats) throws IOException { - final int numCoders = toNonNegativeInt("numCoders", readUint64(header)); - if (numCoders == 0) { - throw new ArchiveException("Folder without coders"); + final long numCoders = readUint64(header); + if (numCoders == 0 || numCoders > MAX_CODERS_PER_FOLDER) { + throw new ArchiveException("Unsupported 7z archive: %,d coders in folder.", numCoders); } - stats.numberOfCoders = ArchiveException.addExact(stats.numberOfCoders, numCoders); - long totalOutStreams = 0; - long totalInStreams = 0; + stats.numberOfCoders = accumulate(stats.numberOfCoders, numCoders, "numCoders"); + int totalInStreams = 0; for (int i = 0; i < numCoders; i++) { final int bits = getUnsignedByte(header); final int idSize = bits & 0xf; - get(header, new byte[idSize]); + skipBytesFully(header, idSize); final boolean isSimple = (bits & 0x10) == 0; final boolean hasAttributes = (bits & 0x20) != 0; final boolean moreAlternativeMethods = (bits & 0x80) != 0; @@ -1993,37 +2093,36 @@ private long sanityCheckFolder(final ByteBuffer header, final ArchiveStatistics } if (isSimple) { totalInStreams++; - totalOutStreams++; } else { - totalInStreams = ArchiveException.addExact(totalInStreams, toNonNegativeInt("numInStreams", readUint64(header))); - totalOutStreams = ArchiveException.addExact(totalOutStreams, toNonNegativeInt("numOutStreams", readUint64(header))); + final long numInStreams = readUint64(header); + if (numInStreams > MAX_CODER_STREAMS_PER_FOLDER) { + throw new ArchiveException("Unsupported 7z archive: %,d coder input streams in folder.", numInStreams); + } + if (readUint64(header) != 1) { + throw new ArchiveException("Unsupported 7z archive: %,d coder output streams in folder.", readUint64(header)); + } + totalInStreams += (int) numInStreams; } if (hasAttributes) { - final int propertiesSize = toNonNegativeInt("propertiesSize", readUint64(header)); - if (skipBytesFully(header, propertiesSize) < propertiesSize) { - throw new ArchiveException("Invalid propertiesSize in folder"); - } + final int propertiesSize = readFieldSize(header); + skipBytesFully(header, propertiesSize); } } - toNonNegativeInt("totalInStreams", totalInStreams); - toNonNegativeInt("totalOutStreams", totalOutStreams); - stats.numberOfOutStreams = ArchiveException.addExact(stats.numberOfOutStreams, totalOutStreams); - stats.numberOfInStreams = ArchiveException.addExact(stats.numberOfInStreams, totalInStreams); - if (totalOutStreams == 0) { - throw new ArchiveException("Total output streams can't be 0"); - } - final int numBindPairs = toNonNegativeInt("numBindPairs", totalOutStreams - 1); + final int totalOutStreams = (int) numCoders; + stats.numberOfOutStreams = accumulate(stats.numberOfOutStreams, numCoders, "numOutStreams"); + stats.numberOfInStreams = accumulate(stats.numberOfInStreams, totalInStreams, "numInStreams"); + final int numBindPairs = totalOutStreams - 1; if (totalInStreams < numBindPairs) { throw new ArchiveException("Total input streams can't be less than the number of bind pairs"); } - final BitSet inStreamsBound = new BitSet((int) totalInStreams); + final BitSet inStreamsBound = new BitSet(totalInStreams); for (int i = 0; i < numBindPairs; i++) { - final int inIndex = toNonNegativeInt("inIndex", readUint64(header)); + final int inIndex = readUint64ToIntExact(header, "inIndex"); if (totalInStreams <= inIndex) { throw new ArchiveException("inIndex is bigger than number of inStreams"); } inStreamsBound.set(inIndex); - final int outIndex = toNonNegativeInt("outIndex", readUint64(header)); + final int outIndex = readUint64ToIntExact(header, "outIndex"); if (totalOutStreams <= outIndex) { throw new ArchiveException("outIndex is bigger than number of outStreams"); } @@ -2035,7 +2134,7 @@ private long sanityCheckFolder(final ByteBuffer header, final ArchiveStatistics } } else { for (int i = 0; i < numPackedStreams; i++) { - final int packedStreamIndex = toNonNegativeInt("packedStreamIndex", readUint64(header)); + final int packedStreamIndex = readUint64ToIntExact(header, "packedStreamIndex"); if (packedStreamIndex >= totalInStreams) { throw new ArchiveException("packedStreamIndex is bigger than number of totalInStreams"); } @@ -2046,19 +2145,19 @@ private long sanityCheckFolder(final ByteBuffer header, final ArchiveStatistics private void sanityCheckPackInfo(final ByteBuffer header, final ArchiveStatistics stats) throws IOException { final long packPos = readUint64(header); - if (packPos < 0 || SIGNATURE_HEADER_SIZE + packPos > channel.size() || SIGNATURE_HEADER_SIZE + packPos < 0) { + if (packPos > channel.size() - SIGNATURE_HEADER_SIZE) { throw new ArchiveException("packPos (%,d) is out of range", packPos); } - final long numPackStreams = readUint64(header); - stats.numberOfPackedStreams = toNonNegativeInt("numPackStreams", numPackStreams); + stats.numberOfPackedStreams = readUint64ToIntExact(header, "numPackStreams"); int nid = getUnsignedByte(header); if (nid == NID.kSize) { long totalPackSizes = 0; + ensureRemaining(header, MIN_UINT64_BYTES * stats.numberOfPackedStreams); for (int i = 0; i < stats.numberOfPackedStreams; i++) { final long packSize = readUint64(header); - totalPackSizes = ArchiveException.addExact(totalPackSizes, packSize); - final long endOfPackStreams = SIGNATURE_HEADER_SIZE + packPos + totalPackSizes; - if (packSize < 0 || endOfPackStreams > channel.size() || endOfPackStreams < packPos) { + totalPackSizes = accumulate(totalPackSizes, packSize, "packSize"); + // We check the total pack size against the file size. + if (totalPackSizes > channel.size() - SIGNATURE_HEADER_SIZE - packPos) { throw new ArchiveException("packSize (%,d) is out of range", packSize); } } @@ -2066,9 +2165,7 @@ private void sanityCheckPackInfo(final ByteBuffer header, final ArchiveStatistic } if (nid == NID.kCRC) { final int crcsDefined = readAllOrBits(header, stats.numberOfPackedStreams).cardinality(); - if (skipBytesFully(header, 4 * crcsDefined) < 4 * crcsDefined) { - throw new ArchiveException("Invalid number of CRCs in PackInfo"); - } + skipBytesFully(header, 4L * crcsDefined); nid = getUnsignedByte(header); } if (nid != NID.kEnd) { @@ -2100,7 +2197,7 @@ private void sanityCheckSubStreamsInfo(final ByteBuffer header, final ArchiveSta final List numUnpackSubStreamsPerFolder = new LinkedList<>(); if (nid == NID.kNumUnpackStream) { for (int i = 0; i < stats.numberOfFolders; i++) { - numUnpackSubStreamsPerFolder.add(toNonNegativeInt("numStreams", readUint64(header))); + numUnpackSubStreamsPerFolder.add(readUint64ToIntExact(header, "numStreams")); } stats.numberOfUnpackSubStreams = numUnpackSubStreamsPerFolder.stream().mapToLong(Integer::longValue).sum(); nid = getUnsignedByte(header); @@ -2114,10 +2211,7 @@ private void sanityCheckSubStreamsInfo(final ByteBuffer header, final ArchiveSta continue; } for (int i = 0; i < numUnpackSubStreams - 1; i++) { - final long size = readUint64(header); - if (size < 0) { - throw new ArchiveException("Negative unpackSize"); - } + readUint64(header); } } nid = getUnsignedByte(header); @@ -2129,16 +2223,13 @@ private void sanityCheckSubStreamsInfo(final ByteBuffer header, final ArchiveSta int folderIdx = 0; for (final int numUnpackSubStreams : numUnpackSubStreamsPerFolder) { if (numUnpackSubStreams != 1 || stats.folderHasCrc == null || !stats.folderHasCrc.get(folderIdx++)) { - numDigests = ArchiveException.addExact(numDigests, numUnpackSubStreams); + numDigests = accumulate(numDigests, numUnpackSubStreams, "numDigests"); } } } if (nid == NID.kCRC) { - toNonNegativeInt("numDigests", numDigests); final int missingCrcs = readAllOrBits(header, numDigests).cardinality(); - if (skipBytesFully(header, 4 * missingCrcs) < 4 * missingCrcs) { - throw new ArchiveException("Invalid number of missing CRCs in SubStreamInfo"); - } + skipBytesFully(header, UINT32_BYTES * missingCrcs); nid = getUnsignedByte(header); } if (nid != NID.kEnd) { @@ -2151,8 +2242,7 @@ private void sanityCheckUnpackInfo(final ByteBuffer header, final ArchiveStatist if (nid != NID.kFolder) { throw new ArchiveException("Expected NID.kFolder, got %s", nid); } - final long numFolders = readUint64(header); - stats.numberOfFolders = toNonNegativeInt("numFolders", numFolders); + stats.numberOfFolders = readUint64ToIntExact(header, "numFolders"); final int external = getUnsignedByte(header); if (external != 0) { throw new ArchiveException("External unsupported"); @@ -2172,19 +2262,14 @@ private void sanityCheckUnpackInfo(final ByteBuffer header, final ArchiveStatist } for (final long numberOfOutputStreams : numberOfOutputStreamsPerFolder) { for (long i = 0; i < numberOfOutputStreams; i++) { - final long unpackSize = readUint64(header); - if (unpackSize < 0) { - throw new IllegalArgumentException("Negative unpackSize"); - } + readUint64(header); } } nid = getUnsignedByte(header); if (nid == NID.kCRC) { stats.folderHasCrc = readAllOrBits(header, stats.numberOfFolders); final int crcsDefined = stats.folderHasCrc.cardinality(); - if (skipBytesFully(header, 4 * crcsDefined) < 4 * crcsDefined) { - throw new ArchiveException("Invalid number of CRCs in UnpackInfo"); - } + skipBytesFully(header, UINT32_BYTES * crcsDefined); nid = getUnsignedByte(header); } if (nid != NID.kEnd) { @@ -2273,6 +2358,22 @@ public IOStream extends SevenZArchiveEntry> stream() { return IOStream.of(archive.files); } + /** + * Converts the given ByteBuffer to a byte array of the given size. + * + * @param header The buffer containing the 7z header data. + * @param size The size of the byte array to create. + * @return A byte array containing the data from the buffer. + * @throws IOException if there are insufficient resources to allocate the array or insufficient data in the buffer. + */ + private byte[] toByteArray(final ByteBuffer header, final int size) throws IOException { + // Check if we have enough resources to allocate the array + MemoryLimitException.checkKiB(bytesToKiB(size * Byte.BYTES), maxMemoryLimitKiB); + final byte[] result = new byte[size]; + get(header, result); + return result; + } + @Override public String toString() { return archive.toString(); @@ -2305,8 +2406,9 @@ private Archive tryToLocateEndHeader(final byte[] password) throws IOException { try { // Try to initialize Archive structure from here final long nextHeaderOffset = pos - previousDataSize; + // Smaller than 1 MiB, so fits in an int final long nextHeaderSize = channel.size() - pos; - final StartHeader startHeader = new StartHeader(nextHeaderOffset, nextHeaderSize, 0); + final StartHeader startHeader = new StartHeader(nextHeaderOffset, (int) nextHeaderSize, 0); final Archive result = initializeArchive(startHeader, password, false); // Sanity check: There must be some data... if (result.packSizes.length > 0 && result.files.length > 0) { diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/StartHeader.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/StartHeader.java index a1821dd83ac..bf7212fe102 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/StartHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/StartHeader.java @@ -21,10 +21,14 @@ final class StartHeader { final long nextHeaderOffset; - final long nextHeaderSize; + final int nextHeaderSize; final long nextHeaderCrc; - StartHeader(final long nextHeaderOffset, final long nextHeaderSize, final long nextHeaderCrc) { + StartHeader(final long nextHeaderOffset, final int nextHeaderSize, final long nextHeaderCrc) { + // The interval [SIGNATURE_HEADER_SIZE + nextHeaderOffset, SIGNATURE_HEADER_SIZE + nextHeaderOffset + nextHeaderSize) + // must be a valid range of the file, in particular must be within [0, Long.MAX_VALUE). + assert nextHeaderOffset >= 0 && nextHeaderOffset <= Long.MAX_VALUE - SevenZFile.SIGNATURE_HEADER_SIZE; + assert nextHeaderSize >= 0 && nextHeaderSize <= Long.MAX_VALUE - SevenZFile.SIGNATURE_HEADER_SIZE - nextHeaderOffset; this.nextHeaderOffset = nextHeaderOffset; this.nextHeaderSize = nextHeaderSize; this.nextHeaderCrc = nextHeaderCrc; diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SubStreamsInfo.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SubStreamsInfo.java index 3b5ece3e7e5..f15cba1ee74 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SubStreamsInfo.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SubStreamsInfo.java @@ -41,15 +41,14 @@ final class SubStreamsInfo { */ final long[] crcs; - SubStreamsInfo(final long totalUnpackStreams, final int maxMemoryLimitKiB) throws CompressException { - final int intExactCount = Math.toIntExact(totalUnpackStreams); - int alloc; + SubStreamsInfo(final int totalUnpackStreams, final int maxMemoryLimitKiB) throws CompressException { + long alloc; try { // 2 long arrays, just count the longs - alloc = Math.multiplyExact(intExactCount, Long.BYTES * 2); + alloc = Math.multiplyExact(totalUnpackStreams, Long.BYTES * 2); // one BitSet [boolean, long[], int]. just count the long array - final int sizeOfBitSet = Math.multiplyExact(Long.BYTES, (intExactCount - 1 >> 6) + 1); - alloc = Math.addExact(alloc, Math.multiplyExact(intExactCount, sizeOfBitSet)); + final int sizeOfBitSet = Math.multiplyExact(Long.BYTES, (totalUnpackStreams - 1 >> 6) + 1); + alloc = Math.addExact(alloc, Math.multiplyExact(totalUnpackStreams, sizeOfBitSet)); } catch (final ArithmeticException e) { throw new CompressException("Cannot create allocation request for a SubStreamsInfo of totalUnpackStreams %,d, maxMemoryLimitKiB %,d: %s", totalUnpackStreams, maxMemoryLimitKiB, e); @@ -57,8 +56,8 @@ final class SubStreamsInfo { // Avoid false positives. // Not a reliable check in old VMs or in low memory VMs. // MemoryLimitException.checkKiB(SevenZFile.bytesToKiB(alloc), maxMemoryLimitKiB); - this.hasCrc = new BitSet(intExactCount); - this.crcs = new long[intExactCount]; - this.unpackSizes = new long[intExactCount]; + this.hasCrc = new BitSet(totalUnpackStreams); + this.crcs = new long[totalUnpackStreams]; + this.unpackSizes = new long[totalUnpackStreams]; } } diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java index 772a2101f90..0c338c86ecb 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java @@ -29,7 +29,9 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -66,6 +68,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; class SevenZFileTest extends AbstractArchiveFileTest { @@ -124,6 +127,63 @@ static Stream > testReadFolder_Unsupported() { ); } + static Stream testReadRealUint64_Invalid() { + final byte m = (byte) 0xff; + return Stream.of( + new byte[] { (byte) 0b11111111, 0, 0, 0, 0, 0, 0, (byte) 0x80 }, + new byte[] { (byte) 0b11111111, m, m, m, m, m, m, m } + ); + } + + static Stream testReadRealUint64_Valid() { + final byte m = (byte) 0xff; + return Stream.of( + Arguments.of(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 }, 0x0706_0504_0302_0100L), + Arguments.of(new byte[] { m, m, m, m, m, m, m, Byte.MAX_VALUE }, 0x7FFF_FFFF_FFFF_FFFFL) + ); + } + + static Stream testReadUint32_Valid() { + final byte m = (byte) 0xff; + return Stream.of( + Arguments.of(new byte[] { 0, 1, 2, 3 }, 0x0302_0100L), + Arguments.of(new byte[] { m, m, m, Byte.MAX_VALUE }, 0x7FFF_FFFFL), + Arguments.of(new byte[] { m, m, m, m }, 0xFFFF_FFFFL) + ); + } + + static Stream testReadUint64_Overflow() { + final byte m = (byte) 0xff; + return Stream.of( + new byte[] { (byte) 0b11111111, 0, 0, 0, 0, 0, 0, 0, (byte) 0x80 }, + new byte[] { (byte) 0b11111111, m, m, m, m, m, m, m, m } + ); + } + + static Stream testReadUint64_Valid() { + final byte m = (byte) 0xff; + return Stream.of( + Arguments.of(new byte[] { 0 }, 0L), + Arguments.of(new byte[] { Byte.MAX_VALUE }, 0x7FL), + Arguments.of(new byte[] { (byte) 0b10_000001, 2 }, 0x0102L), + Arguments.of(new byte[] { (byte) 0b10_111111, m }, 0x3FFFL), + Arguments.of(new byte[] { (byte) 0b110_00001, 3, 2 }, 0x01_0203L), + Arguments.of(new byte[] { (byte) 0b110_11111, m, m }, 0x1F_FFFFL), + Arguments.of(new byte[] { (byte) 0b1110_0001, 4, 3, 2 }, 0x0102_0304L), + Arguments.of(new byte[] { (byte) 0b1110_1111, m, m, m }, 0x0FFF_FFFFL), + Arguments.of(new byte[] { (byte) 0b11110_001, 5, 4, 3, 2 }, 0x01_0203_0405L), + Arguments.of(new byte[] { (byte) 0b11110_111, m, m, m, m }, 0x07_FFFF_FFFFL), + Arguments.of(new byte[] { (byte) 0b111110_01, 6, 5, 4, 3, 2 }, 0x0102_0304_0506L), + Arguments.of(new byte[] { (byte) 0b111110_11, m, m, m, m, m }, 0x03FF_FFFF_FFFFL), + Arguments.of(new byte[] { (byte) 0b1111110_1, 7, 6, 5, 4, 3, 2 }, 0x01_0203_0405_0607L), + Arguments.of(new byte[] { (byte) 0b1111110_1, m, m, m, m, m, m }, 0x01_FFFF_FFFF_FFFFL), + Arguments.of(new byte[] { (byte) 0b11111110, 7, 6, 5, 4, 3, 2, 1 }, 0x01_0203_0405_0607L), + Arguments.of(new byte[] { (byte) 0b11111110, m, m, m, m, m, m, m }, 0xFF_FFFF_FFFF_FFFFL), + Arguments.of(new byte[] { (byte) 0b11111111, 8, 7, 6, 5, 4, 3, 2, 1 }, 0x0102_0304_0506_0708L), + Arguments.of(new byte[] { (byte) 0b11111111, m, m, m, m, m, m, m, Byte.MAX_VALUE }, 0x7FFF_FFFF_FFFF_FFFFL) + ); + } + private static void writeBindPair(final ByteBuffer buffer, final long inIndex, final long outIndex) { writeUint64(buffer, inIndex); writeUint64(buffer, outIndex); @@ -1028,6 +1088,23 @@ void testReadingBackLZMA2DictSize() throws Exception { } } + @ParameterizedTest + @MethodSource + void testReadRealUint64_Invalid(final byte[] input) throws IOException { + try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(input))) { + assertThrows(IOException.class, () -> SevenZFile.readRealUint64(dis)); + } + } + + @ParameterizedTest + @MethodSource + void testReadRealUint64_Valid(final byte[] input, final long expected) throws IOException { + try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(input))) { + final long actual = SevenZFile.readRealUint64(dis); + assertEquals(expected, actual); + } + } + @Test void testReadTimesFromFile() throws IOException { try (SevenZFile sevenZFile = getSevenZFile("times.7z")) { @@ -1054,6 +1131,33 @@ void testReadTimesFromFile() throws IOException { } } + @ParameterizedTest + @MethodSource + void testReadUint32_Valid(final byte[] input, final long expected) throws IOException { + try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(input))) { + final long actual = SevenZFile.readUint32(dis); + assertEquals(expected, actual); + } + final ByteBuffer buf = ByteBuffer.wrap(input).order(ByteOrder.LITTLE_ENDIAN); + final long actual = SevenZFile.readUint32(buf); + assertEquals(expected, actual); + } + + @ParameterizedTest + @MethodSource + void testReadUint64_Overflow(final byte[] bytes) { + final ByteBuffer buf = ByteBuffer.wrap(bytes); + final ArchiveException ex = assertThrows(ArchiveException.class, () -> SevenZFile.readUint64(buf)); + assertTrue(ex.getMessage().contains("Unsupported 7-Zip archive")); + } + + @ParameterizedTest + @MethodSource + void testReadUint64_Valid(final byte[] bytes, final long expected) throws IOException { + final ByteBuffer buf = ByteBuffer.wrap(bytes); + assertEquals(expected, SevenZFile.readUint64(buf)); + } + @Test void testRemainingBytesUnchangedAfterRead() throws Exception { try (SevenZFile sevenZFile = getSevenZFile("COMPRESS-256.7z")) { From ca4662d301544c1c482bbf2cffc05c6b06b6b44b Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory" Date: Sat, 18 Oct 2025 08:05:17 -0400 Subject: [PATCH 32/40] Normalize some error messages --- .../archivers/ar/ArArchiveInputStream.java | 22 ++-- .../compress/archivers/sevenz/SevenZFile.java | 111 +++++++++--------- .../archivers/tar/TarArchiveInputStream.java | 4 +- .../compress/archivers/tar/TarFile.java | 4 +- .../compress/archivers/tar/TarUtils.java | 4 +- .../bytecode/forms/ByteCodeForm.java | 2 +- .../commons/compress/utils/ArchiveUtils.java | 2 +- .../archivers/sevenz/SevenZFileTest.java | 6 +- 8 files changed, 77 insertions(+), 78 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java index fdd36804071..a4c8091e5a1 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java @@ -217,7 +217,7 @@ private void checkTrailer() throws IOException { final byte[] actualTrailer = IOUtils.readRange(in, expectedTrailer.length); if (actualTrailer.length < expectedTrailer.length) { throw new EOFException(String.format( - "Premature end of ar archive: invalid or incomplete trailer for entry '%s'.", + "Premature end of ar archive: Invalid or incomplete trailer for entry '%s'.", ArchiveUtils.toAsciiString(metaData, NAME_OFFSET, NAME_LEN).trim())); } count(actualTrailer.length); @@ -333,7 +333,7 @@ public ArArchiveEntry getNextEntry() throws IOException { // // Reference: https://man.freebsd.org/cgi/man.cgi?query=ar&sektion=5 if (foundGNUStringTable) { - throw new EOFException("Premature end of ar archive: no regular entry after GNU string table."); + throw new EOFException("Premature end of ar archive: No regular entry after GNU string table."); } currentEntry = null; return null; // End of archive @@ -368,7 +368,7 @@ public ArArchiveEntry getNextEntry() throws IOException { final int nameLen = name.length(); if (nameLen > len) { throw new ArchiveException( - "Invalid BSD long name: file name length (" + nameLen + ") exceeds entry length (" + len + ")"); + "Invalid BSD long name: File name length (" + nameLen + ") exceeds entry length (" + len + ")"); } len -= nameLen; entryOffset += nameLen; @@ -396,7 +396,7 @@ private byte[] getRecord() throws IOException { } if (read < metaData.length) { throw new EOFException(String.format( - "Premature end of ar archive: incomplete entry header (expected %d bytes, got %d).", + "Premature end of ar archive: Incomplete entry header (expected %d bytes, got %d).", metaData.length, read)); } return metaData; @@ -458,7 +458,7 @@ public int read(final byte[] b, final int off, final int len) throws IOException final int ret = in.read(b, off, toRead); if (ret < 0) { throw new EOFException(String.format( - "Premature end of ar archive: entry '%s' is truncated or incomplete.", currentEntry.getName())); + "Premature end of ar archive: Entry '%s' is truncated or incomplete.", currentEntry.getName())); } count(ret); return ret; @@ -477,7 +477,7 @@ private byte[] readGNUStringTable(final ArArchiveEntry entry) throws IOException final byte[] namebuffer = IOUtils.readRange(in, size); final int read = namebuffer.length; if (read < size) { - throw new EOFException("Premature end of ar archive: truncated or incomplete GNU string table."); + throw new EOFException("Premature end of ar archive: Truncated or incomplete GNU string table."); } count(read); return namebuffer; @@ -493,13 +493,11 @@ private void skipGlobalSignature() throws IOException { final byte[] actualMagic = IOUtils.readRange(in, expectedMagic.length); count(actualMagic.length); if (expectedMagic.length != actualMagic.length) { - throw new EOFException(String.format( - "Premature end of ar archive: incomplete global header (expected %d bytes, got %d).", - expectedMagic.length, actualMagic.length)); + throw new EOFException(String.format("Premature end of ar archive: Incomplete global header (expected %d bytes, got %d).", expectedMagic.length, + actualMagic.length)); } if (!Arrays.equals(expectedMagic, actualMagic)) { - throw new ArchiveException( - "Invalid global ar archive header: " + ArchiveUtils.toAsciiString(actualMagic)); + throw new ArchiveException("Invalid global ar archive header: " + ArchiveUtils.toAsciiString(actualMagic)); } } @@ -519,7 +517,7 @@ private void skipRecordPadding() throws IOException { final int c = in.read(); if (c < 0) { throw new EOFException(String.format( - "Premature end of ar archive: missing padding for entry '%s'.", currentEntry.getName())); + "Premature end of ar archive: Missing padding for entry '%s'.", currentEntry.getName())); } count(1); } diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index 9f0de97efad..cdd6e19778c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -97,10 +97,10 @@ private static final class ArchiveStatistics { */ void assertValidity(final int maxMemoryLimitKiB) throws IOException { if (numberOfEntriesWithStream > 0 && numberOfFolders == 0) { - throw new ArchiveException("Archive with entries but no folders"); + throw new ArchiveException("7z archive with entries but no folders"); } if (numberOfEntriesWithStream > numberOfUnpackSubStreams) { - throw new ArchiveException("Archive doesn't contain enough substreams for entries"); + throw new ArchiveException("7z archive doesn't contain enough substreams for entries"); } MemoryLimitException.checkKiB(bytesToKiB(estimateSizeBytes()), maxMemoryLimitKiB); } @@ -160,7 +160,7 @@ private long streamMapSize() { @Override public String toString() { - return String.format("Archive with %,d entries in %,d folders, estimated size %,d KiB.", numberOfEntries, numberOfFolders, + return String.format("7z archive with %,d entries in %,d folders, estimated size %,d KiB.", numberOfEntries, numberOfFolders, bytesToKiB(estimateSizeBytes())); } } @@ -385,7 +385,7 @@ private static int accumulate(final int sum, final int y, final String descripti try { return Math.addExact(sum, y); } catch (final ArithmeticException e) { - throw new ArchiveException("Unsupported 7-Zip archive: cannot handle more than %,d %s, but %,d present", Integer.MAX_VALUE, description, + throw new ArchiveException("7z archive: Unsupported, cannot handle more than %,d %s, but %,d present", Integer.MAX_VALUE, description, Long.sum(sum, y)); } } @@ -403,7 +403,7 @@ private static long accumulate(final long sum, final long y, final String descri try { return Math.addExact(sum, y); } catch (final ArithmeticException e) { - throw new ArchiveException("Unsupported 7-Zip archive: cannot handle more than %,d %s, but %,d present", Integer.MAX_VALUE, description, + throw new ArchiveException("7z archive: Unsupported, cannot handle more than %,d %s, but %,d present", Integer.MAX_VALUE, description, Long.sum(sum, y)); } } @@ -432,7 +432,7 @@ static long bytesToKiB(final long bytes) { */ private static ByteBuffer ensureRemaining(final ByteBuffer header, final long expectRemaining) throws ArchiveException { if (expectRemaining > header.remaining()) { - throw new ArchiveException("Corrupted 7z archive: expecting %,d bytes, remaining header size %,d", expectRemaining, header.remaining()); + throw new ArchiveException("7z archive: Corrupted, expecting %,d bytes, remaining header size %,d", expectRemaining, header.remaining()); } return header; } @@ -501,7 +501,7 @@ static int readFieldSize(final ByteBuffer header) throws ArchiveException { static long readRealUint64(final DataInputStream inputStream) throws IOException { final long value = Long.reverseBytes(inputStream.readLong()); if (value < 0) { - throw new ArchiveException("Unsupported 7-Zip archive: cannot handle integer larger then %d, but was %s", Integer.MAX_VALUE, + throw new ArchiveException("7z archive: Unsupported, cannot handle integer larger then %d, but was %s", Integer.MAX_VALUE, Long.toUnsignedString(value)); } return value; @@ -552,7 +552,7 @@ static long readUint64(final ByteBuffer header) throws ArchiveException { mask >>>= 1; } if (value < 0) { - throw new ArchiveException("Unsupported 7-Zip archive: can not handle integer values larger than %,d", Long.MAX_VALUE); + throw new ArchiveException("7z archive: Unsupported, cannot handle integer values larger than %,d", Long.MAX_VALUE); } return value; } @@ -572,7 +572,7 @@ private static int readUint64ToIntExact(final ByteBuffer header, final String de final long value = readUint64(header); // Values larger than Integer.MAX_VALUE are not formally forbidden, but we cannot handle them. if (value > Integer.MAX_VALUE) { - throw new ArchiveException("Unsupported 7-Zip archive: cannot handle %s larger then %,d, but was %,d", description, Integer.MAX_VALUE, value); + throw new ArchiveException("7z archive: Unsupported, cannot handle %s larger then %,d, but was %,d", description, Integer.MAX_VALUE, value); } return (int) value; } @@ -600,7 +600,7 @@ private static void skipBytesFully(final ByteBuffer header, final long propertyS private static int toNonNegativeInt(final String description, final long value) throws ArchiveException { assert value >= 0 : "value is supposed to be non-negative"; if (value > Integer.MAX_VALUE) { - throw new ArchiveException("Unsupported 7-Zip archive: cannot handle %s larger then %d, but was %s", description, Integer.MAX_VALUE, + throw new ArchiveException("7z archive: Unsupported, cannot handle %s larger then %d, but was %s", description, Integer.MAX_VALUE, Long.toUnsignedString(value)); } return (int) value; @@ -1276,7 +1276,7 @@ private Archive initializeArchive(final StartHeader startHeader, final byte[] pa nid = getUnsignedByte(buf); } if (nid != NID.kHeader) { - throw new ArchiveException("Broken or unsupported archive: no Header"); + throw new ArchiveException("7z archive: Broken or unsupported, no Header"); } readHeader(buf, archive); archive.subStreamsInfo = null; @@ -1556,7 +1556,7 @@ private void readFilesInfo(final ByteBuffer header, final Archive archive) throw } // We should have consumed all the bytes by now if (header.remaining() > 0) { - throw new ArchiveException("Unsupported 7z archive: property 0x%02d has %d trailing bytes.", propertyType, header.remaining()); + throw new ArchiveException("7z archive: Unsupported, property 0x%02d has %d trailing bytes.", propertyType, header.remaining()); } // Restore original limit header.limit(originalLimit); @@ -1571,7 +1571,7 @@ private void readFilesInfo(final ByteBuffer header, final Archive archive) throw entryAtIndex.setHasStream(isEmptyStream == null || !isEmptyStream.get(i)); if (entryAtIndex.hasStream()) { if (archive.subStreamsInfo == null) { - throw new ArchiveException("Archive contains file with streams but no subStreamsInfo"); + throw new ArchiveException("7z archive: Archive contains file with streams but no subStreamsInfo."); } entryAtIndex.setDirectory(false); entryAtIndex.setAntiItem(false); @@ -1595,7 +1595,7 @@ Folder readFolder(final ByteBuffer header) throws IOException { final Folder folder = new Folder(); final long numCoders = readUint64(header); if (numCoders == 0 || numCoders > MAX_CODERS_PER_FOLDER) { - throw new ArchiveException("Unsupported 7z archive: " + numCoders + " coders in folder."); + throw new ArchiveException("7z archive: Unsupported, " + numCoders + " coders in folder."); } final Coder[] coders = new Coder[(int) numCoders]; int totalInStreams = 0; @@ -1615,16 +1615,16 @@ Folder readFolder(final ByteBuffer header) throws IOException { } else { numInStreams = readUint64(header); if (numInStreams > MAX_CODER_STREAMS_PER_FOLDER) { - throw new ArchiveException("Unsupported 7z archive: %,d coder input streams in folder.", numInStreams); + throw new ArchiveException("7z archive: Unsupported, %,d coder input streams in folder.", numInStreams); } numOutStreams = readUint64(header); if (numOutStreams != 1) { - throw new ArchiveException("Unsupported 7z archive: %,d coder output streams in folder.", numOutStreams); + throw new ArchiveException("7z archive: Unsupported, %,d coder output streams in folder.", numOutStreams); } } totalInStreams += (int) numInStreams; if (totalInStreams > MAX_CODER_STREAMS_PER_FOLDER) { - throw new ArchiveException("Unsupported 7z archive: %,d coder input streams in folder.", totalInStreams); + throw new ArchiveException("7z archive: Unsupported, %,d coder input streams in folder.", totalInStreams); } totalOutStreams += (int) numOutStreams; byte[] properties = null; @@ -1634,7 +1634,7 @@ Folder readFolder(final ByteBuffer header) throws IOException { } // would need to keep looping as above: if (moreAlternativeMethods) { - throw new ArchiveException("Unsupported 7z archive: alternative methods are unsupported, please report. " + throw new ArchiveException("7z archive: Unsupported, Alternative methods are unsupported, please report. " + "The reference implementation doesn't support them either."); } coders[i] = new Coder(decompressionMethodId, numInStreams, numOutStreams, properties); @@ -1647,11 +1647,11 @@ Folder readFolder(final ByteBuffer header) throws IOException { for (int i = 0; i < bindPairs.length; i++) { final long inIndex = readUint64(header); if (inIndex >= totalInStreams) { - throw new ArchiveException("Unsupported 7z archive: bind pair inIndex %d out of range.", inIndex); + throw new ArchiveException("7z archive: Unsupported, Bind pair inIndex %d out of range.", inIndex); } final long outIndex = readUint64(header); if (outIndex >= totalOutStreams) { - throw new ArchiveException("Unsupported 7z archive: bind pair outIndex %d out of range.", inIndex); + throw new ArchiveException("7z archive: Unsupported, Bind pair outIndex %d out of range.", inIndex); } bindPairs[i] = new BindPair(inIndex, outIndex); } @@ -1670,7 +1670,7 @@ Folder readFolder(final ByteBuffer header) throws IOException { for (int i = 0; i < numPackedStreams; i++) { packedStreams[i] = readUint64(header); if (packedStreams[i] >= totalInStreams) { - throw new ArchiveException("Unsupported 7z archive: packed stream index %d out of range.", packedStreams[i]); + throw new ArchiveException("7z archive: Unsupported, Packed stream index %d out of range.", packedStreams[i]); } } } @@ -1695,7 +1695,7 @@ private void readHeader(final ByteBuffer header, final Archive archive) throws I nid = getUnsignedByte(header); } if (nid == NID.kAdditionalStreamsInfo) { - throw new ArchiveException("Additional streams unsupported"); + throw new ArchiveException("7z archive: Additional streams unsupported"); // nid = getUnsignedByte(header); } if (nid == NID.kMainStreamsInfo) { @@ -1720,7 +1720,7 @@ private Archive readHeaders(final byte[] password) throws IOException { final byte archiveVersionMajor = buf.get(); final byte archiveVersionMinor = buf.get(); if (archiveVersionMajor != 0) { - throw new ArchiveException("Unsupported 7z version (%d,%d)", archiveVersionMajor, archiveVersionMinor); + throw new ArchiveException("7z archive: Unsupported 7z version (%d,%d)", archiveVersionMajor, archiveVersionMinor); } boolean headerLooksValid = false; // See https://www.7-zip.org/recover.html - "There is no correct End Header at the end of archive" final long startHeaderCrc = readUint32(buf); @@ -1747,7 +1747,7 @@ private Archive readHeaders(final byte[] password) throws IOException { if (tryToRecoverBrokenArchives) { return tryToLocateEndHeader(password); } - throw new ArchiveException("Archive seems to be invalid. You may want to retry and enable the tryToRecoverBrokenArchives if " + throw new ArchiveException("7z archive seems to be invalid. You may want to retry and enable the tryToRecoverBrokenArchives if " + "the archive could be a multi volume archive that has been closed prematurely."); } @@ -1849,7 +1849,7 @@ private void readSubStreamsInfo(final ByteBuffer header, final Archive archive) } } if (totalUnpackSize > folder.getUnpackSize()) { - throw new ArchiveException("Sum of unpack sizes of folder exceeds total unpack size"); + throw new ArchiveException("7z archive: Sum of unpack sizes of folder exceeds total unpack size"); } subStreamsInfo.unpackSizes[nextUnpackStream++] = folder.getUnpackSize() - totalUnpackSize; } @@ -1958,7 +1958,7 @@ private ArchiveStatistics sanityCheckAndCollectStatistics(final ByteBuffer heade nid = getUnsignedByte(header); } if (nid == NID.kAdditionalStreamsInfo) { - throw new ArchiveException("Additional streams unsupported"); + throw new ArchiveException("7z archive: Additional streams unsupported"); // nid = getUnsignedByte(header); } if (nid == NID.kMainStreamsInfo) { @@ -1970,7 +1970,7 @@ private ArchiveStatistics sanityCheckAndCollectStatistics(final ByteBuffer heade nid = getUnsignedByte(header); } if (nid != NID.kEnd) { - throw new ArchiveException("Badly terminated header, found %s", nid); + throw new ArchiveException("7z archive: Badly terminated header, found %s", nid); } return stats; } @@ -2004,14 +2004,14 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti } case NID.kEmptyFile: { if (emptyStreams == -1) { - throw new ArchiveException("Header format error: kEmptyStream must appear before kEmptyFile"); + throw new ArchiveException("7z archive: Header format error: kEmptyStream must appear before kEmptyFile"); } skipBytesFully(header, size); break; } case NID.kAnti: { if (emptyStreams == -1) { - throw new ArchiveException("Header format error: kEmptyStream must appear before kAnti"); + throw new ArchiveException("7z archive: Header format error: kEmptyStream must appear before kAnti"); } skipBytesFully(header, size); break; @@ -2019,11 +2019,11 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti case NID.kName: { // 1 byte for external and sequence of zero-terminated UTF-16 strings. if (size % 2 != 1) { - throw new ArchiveException("File names length invalid"); + throw new ArchiveException("7z archive: File names length invalid"); } final int external = getUnsignedByte(header); if (external != 0) { - throw new ArchiveException("Not implemented"); + throw new ArchiveException("7z archive: Not implemented"); } int filesSeen = 0; while (header.remaining() > 0) { @@ -2033,7 +2033,7 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti } } if (filesSeen != stats.numberOfEntries) { - throw new ArchiveException("Invalid number of file names (%,d instead of %,d)", filesSeen, stats.numberOfEntries); + throw new ArchiveException("7z archive: Invalid number of file names (%,d instead of %,d)", filesSeen, stats.numberOfEntries); } break; } @@ -2044,13 +2044,13 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti final int definedCount = readAllOrBits(header, stats.numberOfEntries).cardinality(); final int external = getUnsignedByte(header); if (external != 0) { - throw new ArchiveException("Not implemented"); + throw new ArchiveException("7z archive: Not implemented"); } skipBytesFully(header, (propertyType == NID.kWinAttributes ? UINT32_BYTES : REAL_UINT64_BYTES) * definedCount); break; } case NID.kStartPos: { - throw new ArchiveException("kStartPos is unsupported, please report"); + throw new ArchiveException("7z archive: kStartPos is unsupported, please report"); } case NID.kDummy: { // 7z 9.20 asserts the content is all zeros and ignores the property @@ -2066,7 +2066,7 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti } // We should have consumed all the bytes by now if (header.remaining() > 0) { - throw new ArchiveException("Unsupported 7z archive: property 0x%02d has %d trailing bytes.", propertyType, header.remaining()); + throw new ArchiveException("7z archive: Unsupported, property 0x%02d has %d trailing bytes.", propertyType, header.remaining()); } // Restore original limit header.limit(originalLimit); @@ -2077,7 +2077,7 @@ private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatisti private long sanityCheckFolder(final ByteBuffer header, final ArchiveStatistics stats) throws IOException { final long numCoders = readUint64(header); if (numCoders == 0 || numCoders > MAX_CODERS_PER_FOLDER) { - throw new ArchiveException("Unsupported 7z archive: %,d coders in folder.", numCoders); + throw new ArchiveException("7z archive: Unsupported, %,d coders in folder.", numCoders); } stats.numberOfCoders = accumulate(stats.numberOfCoders, numCoders, "numCoders"); int totalInStreams = 0; @@ -2089,17 +2089,18 @@ private long sanityCheckFolder(final ByteBuffer header, final ArchiveStatistics final boolean hasAttributes = (bits & 0x20) != 0; final boolean moreAlternativeMethods = (bits & 0x80) != 0; if (moreAlternativeMethods) { - throw new ArchiveException("Alternative methods are unsupported, please report. The reference implementation doesn't support them either."); + throw new ArchiveException( + "7z archive: Alternative methods are unsupported, please report. The reference implementation doesn't support them either."); } if (isSimple) { totalInStreams++; } else { final long numInStreams = readUint64(header); if (numInStreams > MAX_CODER_STREAMS_PER_FOLDER) { - throw new ArchiveException("Unsupported 7z archive: %,d coder input streams in folder.", numInStreams); + throw new ArchiveException("7z archive: Unsupported, %,d coder input streams in folder.", numInStreams); } if (readUint64(header) != 1) { - throw new ArchiveException("Unsupported 7z archive: %,d coder output streams in folder.", readUint64(header)); + throw new ArchiveException("7z archive: Unsupported, %,d coder output streams in folder.", readUint64(header)); } totalInStreams += (int) numInStreams; } @@ -2113,30 +2114,30 @@ private long sanityCheckFolder(final ByteBuffer header, final ArchiveStatistics stats.numberOfInStreams = accumulate(stats.numberOfInStreams, totalInStreams, "numInStreams"); final int numBindPairs = totalOutStreams - 1; if (totalInStreams < numBindPairs) { - throw new ArchiveException("Total input streams can't be less than the number of bind pairs"); + throw new ArchiveException("7z archive: Total input streams can't be less than the number of bind pairs"); } final BitSet inStreamsBound = new BitSet(totalInStreams); for (int i = 0; i < numBindPairs; i++) { final int inIndex = readUint64ToIntExact(header, "inIndex"); if (totalInStreams <= inIndex) { - throw new ArchiveException("inIndex is bigger than number of inStreams"); + throw new ArchiveException("7z archive: inIndex is bigger than number of inStreams"); } inStreamsBound.set(inIndex); final int outIndex = readUint64ToIntExact(header, "outIndex"); if (totalOutStreams <= outIndex) { - throw new ArchiveException("outIndex is bigger than number of outStreams"); + throw new ArchiveException("7z archive: outIndex is bigger than number of outStreams"); } } final int numPackedStreams = toNonNegativeInt("numPackedStreams", totalInStreams - numBindPairs); if (numPackedStreams == 1) { if (inStreamsBound.nextClearBit(0) == -1) { - throw new ArchiveException("Couldn't find stream's bind pair index"); + throw new ArchiveException("7z archive: Couldn't find stream's bind pair index"); } } else { for (int i = 0; i < numPackedStreams; i++) { final int packedStreamIndex = readUint64ToIntExact(header, "packedStreamIndex"); if (packedStreamIndex >= totalInStreams) { - throw new ArchiveException("packedStreamIndex is bigger than number of totalInStreams"); + throw new ArchiveException("7z archive: packedStreamIndex is bigger than number of totalInStreams"); } } } @@ -2146,7 +2147,7 @@ private long sanityCheckFolder(final ByteBuffer header, final ArchiveStatistics private void sanityCheckPackInfo(final ByteBuffer header, final ArchiveStatistics stats) throws IOException { final long packPos = readUint64(header); if (packPos > channel.size() - SIGNATURE_HEADER_SIZE) { - throw new ArchiveException("packPos (%,d) is out of range", packPos); + throw new ArchiveException("7z archive: packPos (%,d) is out of range", packPos); } stats.numberOfPackedStreams = readUint64ToIntExact(header, "numPackStreams"); int nid = getUnsignedByte(header); @@ -2158,7 +2159,7 @@ private void sanityCheckPackInfo(final ByteBuffer header, final ArchiveStatistic totalPackSizes = accumulate(totalPackSizes, packSize, "packSize"); // We check the total pack size against the file size. if (totalPackSizes > channel.size() - SIGNATURE_HEADER_SIZE - packPos) { - throw new ArchiveException("packSize (%,d) is out of range", packSize); + throw new ArchiveException("7z archive: packSize (%,d) is out of range", packSize); } } nid = getUnsignedByte(header); @@ -2169,7 +2170,7 @@ private void sanityCheckPackInfo(final ByteBuffer header, final ArchiveStatistic nid = getUnsignedByte(header); } if (nid != NID.kEnd) { - throw new ArchiveException("Badly terminated PackInfo (%s)", nid); + throw new ArchiveException("7z archive: Badly terminated PackInfo (%s)", nid); } } @@ -2188,7 +2189,7 @@ private void sanityCheckStreamsInfo(final ByteBuffer header, final ArchiveStatis nid = getUnsignedByte(header); } if (nid != NID.kEnd) { - throw new ArchiveException("Badly terminated StreamsInfo"); + throw new ArchiveException("7z archive: Badly terminated StreamsInfo"); } } @@ -2233,19 +2234,19 @@ private void sanityCheckSubStreamsInfo(final ByteBuffer header, final ArchiveSta nid = getUnsignedByte(header); } if (nid != NID.kEnd) { - throw new ArchiveException("Badly terminated SubStreamsInfo"); + throw new ArchiveException("7z archive: Badly terminated SubStreamsInfo"); } } private void sanityCheckUnpackInfo(final ByteBuffer header, final ArchiveStatistics stats) throws IOException { int nid = getUnsignedByte(header); if (nid != NID.kFolder) { - throw new ArchiveException("Expected NID.kFolder, got %s", nid); + throw new ArchiveException("7z archive: Expected NID.kFolder, got %s", nid); } stats.numberOfFolders = readUint64ToIntExact(header, "numFolders"); final int external = getUnsignedByte(header); if (external != 0) { - throw new ArchiveException("External unsupported"); + throw new ArchiveException("7z archive: External unsupported"); } final List numberOfOutputStreamsPerFolder = new LinkedList<>(); for (int i = 0; i < stats.numberOfFolders; i++) { @@ -2254,11 +2255,11 @@ private void sanityCheckUnpackInfo(final ByteBuffer header, final ArchiveStatist final long totalNumberOfBindPairs = stats.numberOfOutStreams - stats.numberOfFolders; final long packedStreamsRequiredByFolders = stats.numberOfInStreams - totalNumberOfBindPairs; if (packedStreamsRequiredByFolders < stats.numberOfPackedStreams) { - throw new ArchiveException("Archive doesn't contain enough packed streams"); + throw new ArchiveException("7z archive: Archive doesn't contain enough packed streams"); } nid = getUnsignedByte(header); if (nid != NID.kCodersUnpackSize) { - throw new ArchiveException("Expected kCodersUnpackSize, got %s", nid); + throw new ArchiveException("7z archive: Expected kCodersUnpackSize, got %s", nid); } for (final long numberOfOutputStreams : numberOfOutputStreamsPerFolder) { for (long i = 0; i < numberOfOutputStreams; i++) { @@ -2273,7 +2274,7 @@ private void sanityCheckUnpackInfo(final ByteBuffer header, final ArchiveStatist nid = getUnsignedByte(header); } if (nid != NID.kEnd) { - throw new ArchiveException("Badly terminated UnpackInfo"); + throw new ArchiveException("7z archive: Badly terminated UnpackInfo"); } } @@ -2419,6 +2420,6 @@ private Archive tryToLocateEndHeader(final byte[] password) throws IOException { } } } - throw new ArchiveException("Start header corrupt and unable to guess end header"); + throw new ArchiveException("7z archive: Start header corrupt and unable to guess end header"); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java index 57cd596ca7a..f45b705eeb2 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java @@ -322,7 +322,7 @@ private void afterRead(final int read) throws IOException { count(read); // Check for truncated entries if (read == -1 && entryOffset < currEntry.getSize()) { - throw new EOFException(String.format("Truncated TAR archive: entry '%s' expected %,d bytes, actual %,d", currEntry.getName(), currEntry.getSize(), + throw new EOFException(String.format("Truncated TAR archive: Entry '%s' expected %,d bytes, actual %,d", currEntry.getName(), currEntry.getSize(), entryOffset)); } entryOffset += Math.max(0, read); @@ -757,7 +757,7 @@ private void skipRecordPadding() throws IOException { final long skipped = org.apache.commons.io.IOUtils.skip(in, padding); count(skipped); if (skipped != padding) { - throw new EOFException(String.format("Truncated TAR archive: failed to skip record padding for entry '%s'", currEntry.getName())); + throw new EOFException(String.format("Truncated TAR archive: Failed to skip record padding for entry '%s'", currEntry.getName())); } } } diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java index d9cd604a610..0612626ed48 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java @@ -81,7 +81,7 @@ protected int read(final long pos, final ByteBuffer buf) throws IOException { final int totalRead = channel.read(buf); if (totalRead == -1) { if (buf.remaining() > 0) { - throw new EOFException(String.format("Truncated TAR archive: expected at least %d bytes, but got only %d bytes", + throw new EOFException(String.format("Truncated TAR archive: Expected at least %d bytes, but got only %d bytes", end, channel.position())); } // Marks the TarFile as having reached EOF. @@ -637,7 +637,7 @@ public IOStream extends TarArchiveEntry> stream() { */ private void throwExceptionIfPositionIsNotInArchive() throws IOException { if (archive.size() < archive.position()) { - throw new EOFException("Truncated TAR archive: archive should be at least " + archive.position() + " bytes but was " + archive.size() + " bytes"); + throw new EOFException("Truncated TAR archive: Archive should be at least " + archive.position() + " bytes but was " + archive.size() + " bytes"); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java index c7451e81576..73bfdf90267 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java @@ -851,7 +851,7 @@ static String readLongName(final InputStream input, final ZipEncoding encoding, final byte[] name = IOUtils.readRange(input, declaredLength); int actualLength = name.length; if (actualLength != declaredLength) { - throw new EOFException(String.format("Truncated long name entry: expected %,d bytes, read %,d bytes.", declaredLength, actualLength)); + throw new EOFException(String.format("Truncated long name entry: Expected %,d bytes, read %,d bytes.", declaredLength, actualLength)); } while (actualLength > 0 && name[actualLength - 1] == 0) { actualLength--; @@ -872,7 +872,7 @@ static List readSparseStructs(final byte[] buffer, final throw new ArchiveException("Corrupted TAR archive: Sparse entry with negative offset."); } if (sparseHeader.getNumbytes() < 0) { - throw new ArchiveException("Corrupted TAR archive: sparse entry with negative numbytes."); + throw new ArchiveException("Corrupted TAR archive: Sparse entry with negative numbytes."); } sparseHeaders.add(sparseHeader); } catch (final IllegalArgumentException e) { diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/forms/ByteCodeForm.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/forms/ByteCodeForm.java index 5e5693cfda1..39a7e2338e6 100644 --- a/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/forms/ByteCodeForm.java +++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/forms/ByteCodeForm.java @@ -378,7 +378,7 @@ protected void calculateOperandPosition() { // If last < first, something is wrong. if (difference < 0) { - throw new IllegalStateException("Logic error: not finding rewrite operands correctly"); + throw new IllegalStateException("Logic error: Not finding rewrite operands correctly"); } operandLength = difference + 1; } diff --git a/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java b/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java index 98654f962ce..711f20054bb 100644 --- a/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java +++ b/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java @@ -49,7 +49,7 @@ public class ArchiveUtils { public static int checkEntryNameLength(long length, int maxEntryNameLength, String archiveType) throws ArchiveException, MemoryLimitException { if (length > maxEntryNameLength) { - throw new ArchiveException("Invalid %s archive entry: invalid file name length %,d (must be between 1 and %,d). If the file is not corrupt, " + + throw new ArchiveException("Invalid %s archive entry: Invalid file name length %,d (must be between 1 and %,d). If the file is not corrupt, " + "consider increasing the `maxEntryNameLength` limit.", archiveType, length, maxEntryNameLength); } MemoryLimitException.checkBytes(length, SOFT_MAX_ARRAY_LENGTH); diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java index 0c338c86ecb..fea72af11c0 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java @@ -668,7 +668,7 @@ void testNoOOMOnCorruptedHeader() throws IOException { // do nothing } }, "Expected IOException: start header corrupt and unable to guess end header"); - assertEquals("Start header corrupt and unable to guess end header", e.getMessage()); + assertEquals("7z archive: Start header corrupt and unable to guess end header", e.getMessage()); } } @@ -1023,7 +1023,7 @@ void testReadFolder_Unsupported(final Consumer folderWriter) throws final ArchiveException e = assertThrows(ArchiveException.class, () -> { file.readFolder(buffer); }); - assertTrue(e.getMessage().contains("Unsupported 7z archive")); + assertTrue(e.getMessage().contains("7z archive: Unsupported")); } } @@ -1148,7 +1148,7 @@ void testReadUint32_Valid(final byte[] input, final long expected) throws IOExce void testReadUint64_Overflow(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final ArchiveException ex = assertThrows(ArchiveException.class, () -> SevenZFile.readUint64(buf)); - assertTrue(ex.getMessage().contains("Unsupported 7-Zip archive")); + assertTrue(ex.getMessage().contains("7z archive: Unsupported"), ex.getMessage()); } @ParameterizedTest From 040b9457e2f56815a605d5844a039a497d1df29e Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Sun, 19 Oct 2025 12:38:49 +0200 Subject: [PATCH 33/40] Simplify `PackingUtils` using `IOUtils` (#737) This change simplifies `PackingUtils` by leveraging `IOUtils` from Commons IO for common I/O operations, reducing boilerplate and improving readability. --- src/changes/changes.xml | 1 + .../harmony/pack200/PackingUtils.java | 40 +++++-------------- 2 files changed, 12 insertions(+), 29 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 6a3517de87f..8b529516b61 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -89,6 +89,7 @@ The type attribute can be add,update,fix,remove. org.apache.commons.compress.harmony.pack200 now throws Pack200Exception, IllegalArgumentException, IllegalStateException, instead of other runtime exceptions and Error. Extract duplicate code in org.apache.commons.compress.harmony.pack200.IntList. +Simplify `PackingUtils` by leveraging Commons IO. CpioArchiveEntry now throws ArchiveException instead of Arithmetic exception. CpioArchiveInputStream.getNextEntry() now throws a MemoryLimitException instead of OutOfMemoryError when it can't process input greater than available memory. diff --git a/src/main/java/org/apache/commons/compress/harmony/pack200/PackingUtils.java b/src/main/java/org/apache/commons/compress/harmony/pack200/PackingUtils.java index 6148ee2cbf1..63d7edd0c7c 100644 --- a/src/main/java/org/apache/commons/compress/harmony/pack200/PackingUtils.java +++ b/src/main/java/org/apache/commons/compress/harmony/pack200/PackingUtils.java @@ -18,7 +18,6 @@ */ package org.apache.commons.compress.harmony.pack200; -import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; @@ -40,6 +39,8 @@ import java.util.logging.SimpleFormatter; import org.apache.commons.compress.harmony.pack200.Archive.PackingFile; +import org.apache.commons.io.IOUtils; +import org.apache.commons.io.function.IOConsumer; public class PackingUtils { @@ -97,16 +98,12 @@ public static void config(final PackingOptions options) throws IOException { public static void copyThroughJar(final JarFile jarFile, final OutputStream outputStream) throws IOException { try (JarOutputStream jarOutputStream = new JarOutputStream(outputStream)) { jarOutputStream.setComment("PACK200"); - final byte[] bytes = new byte[16384]; final Enumerationentries = jarFile.entries(); while (entries.hasMoreElements()) { final JarEntry jarEntry = entries.nextElement(); jarOutputStream.putNextEntry(jarEntry); try (InputStream inputStream = jarFile.getInputStream(jarEntry)) { - int bytesRead; - while ((bytesRead = inputStream.read(bytes)) != -1) { - jarOutputStream.write(bytes, 0, bytesRead); - } + IOUtils.copyLarge(inputStream, jarOutputStream); jarOutputStream.closeEntry(); log("Packed " + jarEntry.getName()); } @@ -128,14 +125,10 @@ public static void copyThroughJar(final JarInputStream jarInputStream, final Out jarOutputStream.setComment("PACK200"); log("Packed " + JarFile.MANIFEST_NAME); - final byte[] bytes = new byte[16384]; JarEntry jarEntry; - int bytesRead; while ((jarEntry = jarInputStream.getNextJarEntry()) != null) { jarOutputStream.putNextEntry(jarEntry); - while ((bytesRead = jarInputStream.read(bytes)) != -1) { - jarOutputStream.write(bytes, 0, bytesRead); - } + IOUtils.copyLarge(jarInputStream, jarOutputStream); log("Packed " + jarEntry.getName()); } jarInputStream.close(); @@ -144,14 +137,11 @@ public static void copyThroughJar(final JarInputStream jarInputStream, final Out public static List getPackingFileListFromJar(final JarFile jarFile, final boolean keepFileOrder) throws IOException { final List packingFileList = new ArrayList<>(); - final Enumeration jarEntries = jarFile.entries(); - while (jarEntries.hasMoreElements()) { - final JarEntry jarEntry = jarEntries.nextElement(); + IOConsumer.forEach(jarFile.stream(), jarEntry -> { try (InputStream inputStream = jarFile.getInputStream(jarEntry)) { - final byte[] bytes = readJarEntry(jarEntry, new BufferedInputStream(inputStream)); - packingFileList.add(new PackingFile(bytes, jarEntry)); + packingFileList.add(new PackingFile(readJarEntry(jarEntry, inputStream), jarEntry)); } - } + }); // check whether it need reorder packing file list if (!keepFileOrder) { @@ -173,10 +163,8 @@ public static List getPackingFileListFromJar(final JarInputStream j // add rest of entries in the jar JarEntry jarEntry; - byte[] bytes; while ((jarEntry = jarInputStream.getNextJarEntry()) != null) { - bytes = readJarEntry(jarEntry, new BufferedInputStream(jarInputStream)); - packingFileList.add(new PackingFile(bytes, jarEntry)); + packingFileList.add(new PackingFile(readJarEntry(jarEntry, jarInputStream), jarEntry)); } // check whether it need reorder packing file list @@ -191,19 +179,13 @@ public static void log(final String message) { } private static byte[] readJarEntry(final JarEntry jarEntry, final InputStream inputStream) throws IOException { - long size = jarEntry.getSize(); + final long size = jarEntry.getSize(); if (size > Integer.MAX_VALUE) { // TODO: Should probably allow this throw new IllegalArgumentException("Large Class!"); } - if (size < 0) { - size = 0; - } - final byte[] bytes = new byte[(int) size]; - if (inputStream.read(bytes) != size) { - throw new IllegalArgumentException("Error reading from stream"); - } - return bytes; + // Negative size means unknown size + return size < 0 ? IOUtils.toByteArray(inputStream) : IOUtils.toByteArray(inputStream, (int) size, IOUtils.DEFAULT_BUFFER_SIZE); } private static void reorderPackingFiles(final List packingFileList) { From 6bf5ee354e2c41aaa8d20ef0e1ef0b8276201935 Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory" Date: Sun, 19 Oct 2025 08:56:00 -0400 Subject: [PATCH 34/40] Fix grammar --- .github/pull_request_template.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 7578b4da036..4cbe168c3e8 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -25,6 +25,6 @@ Before you push a pull request, review this list: - [ ] Read the [ASF Generative Tooling Guidance](https://www.apache.org/legal/generative-tooling.html) if you use Artificial Intelligence (AI). - [ ] I used AI to create any part of, or all of, this pull request. - [ ] Run a successful build using the default [Maven](https://maven.apache.org/) goal with `mvn`; that's `mvn` on the command line by itself. -- [ ] Write unit tests that match behavioral changes, where the tests fail if the changes to the runtime are not applied. This may not always be possible, but it is a best-practice. +- [ ] Write unit tests that match behavioral changes, where the tests fail if the changes to the runtime are not applied. This may not always be possible, but it is a best practice. - [ ] Write a pull request description that is detailed enough to understand what the pull request does, how, and why. - [ ] Each commit in the pull request should have a meaningful subject line and body. Note that a maintainer may squash commits during the merge process. From 971718abd46b64f17f3e094050216ff2d066bf88 Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Sun, 19 Oct 2025 15:02:08 +0200 Subject: [PATCH 35/40] 7z: optimize header loading (#735) * 7z: optimize header loading This change improves the efficiency of 7z header parsing: * Reads the **Signature Header** in a single ByteBuffer instead of multiple small reads, reducing overhead. * Uses a `MappedByteBuffer` to load the **Next Header** when the archive is backed by a `FileChannel`, improving performance for large headers by avoiding unnecessary copies. No new tests are added, as the existing test suite already exercises the affected header loading paths sufficiently. * fix: rename `computeChecksum` to `crc32` * fix: improve error messages --- .../compress/archivers/sevenz/SevenZFile.java | 166 ++++++++---------- .../archivers/sevenz/StartHeader.java | 9 + .../archivers/sevenz/SevenZFileTest.java | 18 +- 3 files changed, 83 insertions(+), 110 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index cdd6e19778c..2d2117dca14 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -20,7 +20,6 @@ import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; -import java.io.DataInputStream; import java.io.EOFException; import java.io.File; import java.io.FilterInputStream; @@ -28,7 +27,7 @@ import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; -import java.nio.channels.Channels; +import java.nio.channels.FileChannel; import java.nio.channels.SeekableByteChannel; import java.util.ArrayList; import java.util.Arrays; @@ -39,7 +38,6 @@ import java.util.Map; import java.util.Objects; import java.util.zip.CRC32; -import java.util.zip.CheckedInputStream; import org.apache.commons.compress.MemoryLimitException; import org.apache.commons.compress.archivers.AbstractArchiveBuilder; @@ -437,6 +435,14 @@ private static ByteBuffer ensureRemaining(final ByteBuffer header, final long ex return header; } + private static long crc32(final ByteBuffer header) { + final int currentPosition = header.position(); + final CRC32 crc = new CRC32(); + crc.update(header); + header.position(currentPosition); + return crc.getValue(); + } + /** * Wrapper of {@link ByteBuffer#get(byte[])} that checks remaining bytes first. */ @@ -492,14 +498,14 @@ static int readFieldSize(final ByteBuffer header) throws ArchiveException { } /** - * Reads a 7z REAL_UINT64 from the stream. + * Reads a 7z REAL_UINT64 from the header. * - * @param inputStream the input stream containing the 7z header. + * @param header the buffer containing the 7z header. * @return a non-negative long. * @throws ArchiveException if the value is truncated or too large. */ - static long readRealUint64(final DataInputStream inputStream) throws IOException { - final long value = Long.reverseBytes(inputStream.readLong()); + static long readRealUint64(final ByteBuffer header) throws IOException { + final long value = header.getLong(); if (value < 0) { throw new ArchiveException("7z archive: Unsupported, cannot handle integer larger then %d, but was %s", Integer.MAX_VALUE, Long.toUnsignedString(value)); @@ -518,18 +524,6 @@ static long readUint32(final ByteBuffer header) throws ArchiveException { return Integer.toUnsignedLong(getInt(header)); } - - /** - * Reads a 7z UINT32 from the stream. - * - * @param inputStream the input stream containing the 7z header. - * @return a non-negative long. - * @throws ArchiveException if the value is truncated. - */ - static long readUint32(final DataInputStream inputStream) throws IOException { - return Integer.toUnsignedLong(Integer.reverseBytes(inputStream.readInt())); - } - /** * Reads a 7z UINT64 from the header. * @@ -1251,34 +1245,22 @@ private boolean hasCurrentEntryBeenRead() { } private Archive initializeArchive(final StartHeader startHeader, final byte[] password, final boolean verifyCrc) throws IOException { - MemoryLimitException.checkKiB(bytesToKiB(startHeader.nextHeaderSize), Math.min(bytesToKiB(org.apache.commons.io.IOUtils.SOFT_MAX_ARRAY_LENGTH), - maxMemoryLimitKiB)); - channel.position(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset); - if (verifyCrc) { - final long position = channel.position(); - final CheckedInputStream cis = new CheckedInputStream(Channels.newInputStream(channel), new CRC32()); - if (cis.skip(startHeader.nextHeaderSize) != startHeader.nextHeaderSize) { - throw new ArchiveException("Problem computing NextHeader CRC-32"); - } - if (startHeader.nextHeaderCrc != cis.getChecksum().getValue()) { - throw new ArchiveException("NextHeader CRC-32 mismatch"); -} - channel.position(position); - } Archive archive = new Archive(); - ByteBuffer buf = ByteBuffer.allocate(startHeader.nextHeaderSize).order(ByteOrder.LITTLE_ENDIAN); - readFully(buf); - int nid = getUnsignedByte(buf); + ByteBuffer header = mapNextHeader(startHeader); + if (verifyCrc && startHeader.nextHeaderCrc != crc32(header)) { + throw new ArchiveException("Corrupted 7z archive: CRC error in next header"); + } + int nid = getUnsignedByte(header); if (nid == NID.kEncodedHeader) { - buf = readEncodedHeader(buf, archive, password); + header = readEncodedHeader(header, archive, password); // Archive gets rebuilt with the new header archive = new Archive(); - nid = getUnsignedByte(buf); + nid = getUnsignedByte(header); } if (nid != NID.kHeader) { throw new ArchiveException("7z archive: Broken or unsupported, no Header"); } - readHeader(buf, archive); + readHeader(header, archive); archive.subStreamsInfo = null; return archive; } @@ -1307,6 +1289,27 @@ private long[] longArray(final int size) throws MemoryLimitException { return new long[size]; } + /** + * Maps the next header into memory. + * + * @param startHeader the start header + * @return the mapped ByteBuffer + * @throws IOException if an I/O error occurs + */ + private ByteBuffer mapNextHeader(final StartHeader startHeader) throws IOException { + MemoryLimitException.checkKiB(bytesToKiB(startHeader.nextHeaderSize), Math.min(bytesToKiB(org.apache.commons.io.IOUtils.SOFT_MAX_ARRAY_LENGTH), + maxMemoryLimitKiB)); + // startHeader is already within the channel's bounds + if (channel instanceof FileChannel) { + final FileChannel fileChannel = (FileChannel) channel; + return fileChannel.map(FileChannel.MapMode.READ_ONLY, startHeader.position(), startHeader.nextHeaderSize).order(ByteOrder.LITTLE_ENDIAN); + } + channel.position(startHeader.position()); + final ByteBuffer buf = ByteBuffer.allocate(startHeader.nextHeaderSize).order(ByteOrder.LITTLE_ENDIAN); + readFully(buf); + return buf; + } + /** * Reads a byte of data. * @@ -1411,7 +1414,7 @@ private ByteBuffer readEncodedHeader(final ByteBuffer header, final Archive arch // FIXME: merge with buildDecodingStream()/buildDecoderStack() at some stage? final Folder folder = archive.folders[0]; final int firstPackStreamIndex = 0; - final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos + 0; + final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos; channel.position(folderOffset); InputStream inputStreamStack = new BoundedSeekableByteChannelInputStream(channel, archive.packSizes[firstPackStreamIndex]); for (final Coder coder : folder.getOrderedCoders()) { @@ -1431,7 +1434,7 @@ private ByteBuffer readEncodedHeader(final ByteBuffer header, final Archive arch .get(); // @formatter:on } - final int unpackSize = toNonNegativeInt("unpackSize", folder.getUnpackSize()); + final int unpackSize = toNonNegativeInt("header", folder.getUnpackSize()); final byte[] nextHeader = IOUtils.readRange(inputStreamStack, unpackSize); if (nextHeader.length < unpackSize) { throw new ArchiveException("Premature end of stream"); @@ -1680,7 +1683,7 @@ Folder readFolder(final ByteBuffer header) throws IOException { private void readFully(final ByteBuffer buf) throws IOException { buf.rewind(); - IOUtils.readFully(channel, buf); + org.apache.commons.io.IOUtils.read(channel, buf); buf.flip(); } @@ -1709,40 +1712,24 @@ private void readHeader(final ByteBuffer header, final Archive archive) throws I } private Archive readHeaders(final byte[] password) throws IOException { - final ByteBuffer buf = ByteBuffer.allocate(12 /* signature + 2 bytes version + 4 bytes CRC */).order(ByteOrder.LITTLE_ENDIAN); - readFully(buf); - final byte[] signature = new byte[6]; - buf.get(signature); + final ByteBuffer startHeader = ByteBuffer.allocate(SIGNATURE_HEADER_SIZE).order(ByteOrder.LITTLE_ENDIAN); + readFully(startHeader); + final byte[] signature = new byte[SIGNATURE.length]; + startHeader.get(signature); if (!Arrays.equals(signature, SIGNATURE)) { throw new ArchiveException("Bad 7z signature"); } // 7zFormat.txt has it wrong - it's first major then minor - final byte archiveVersionMajor = buf.get(); - final byte archiveVersionMinor = buf.get(); + final byte archiveVersionMajor = startHeader.get(); + final byte archiveVersionMinor = startHeader.get(); if (archiveVersionMajor != 0) { throw new ArchiveException("7z archive: Unsupported 7z version (%d,%d)", archiveVersionMajor, archiveVersionMinor); } - boolean headerLooksValid = false; // See https://www.7-zip.org/recover.html - "There is no correct End Header at the end of archive" - final long startHeaderCrc = readUint32(buf); - if (startHeaderCrc == 0) { - // This is an indication of a corrupt header - peek the next 20 bytes - final long currentPosition = channel.position(); - final ByteBuffer peekBuf = ByteBuffer.allocate(20); - readFully(peekBuf); - channel.position(currentPosition); - // Header invalid if all data is 0 - while (peekBuf.hasRemaining()) { - if (peekBuf.get() != 0) { - headerLooksValid = true; - break; - } - } - } else { - headerLooksValid = true; - } - if (headerLooksValid) { - return initializeArchive(readStartHeader(startHeaderCrc), password, true); + final long startHeaderCrc = readUint32(startHeader); + if (startHeaderCrc == crc32(startHeader)) { + return initializeArchive(readStartHeader(startHeader), password, true); } + // See https://www.7-zip.org/recover.html - "There is no correct End Header at the end of archive" // No valid header found - probably first file of multipart archive was removed too early. Scan for end header. if (tryToRecoverBrokenArchives) { return tryToLocateEndHeader(password); @@ -1777,27 +1764,19 @@ private void readPackInfo(final ByteBuffer header, final Archive archive) throws } } - private StartHeader readStartHeader(final long startHeaderCrc) throws IOException { - // using Stream rather than ByteBuffer for the benefit of the built-in CRC check - try (DataInputStream dataInputStream = new DataInputStream(ChecksumInputStream.builder() - // @formatter:off - .setChecksum(new CRC32()) - .setInputStream(new BoundedSeekableByteChannelInputStream(channel, 20)) - .setCountThreshold(20L) - .setExpectedChecksumValue(startHeaderCrc) - .get())) { - // @formatter:on - final long nextHeaderOffset = readRealUint64(dataInputStream); - if (nextHeaderOffset > channel.size() - SIGNATURE_HEADER_SIZE) { - throw new ArchiveException("nextHeaderOffset is out of bounds"); - } - final int nextHeaderSize = toNonNegativeInt("nextHeaderSize", readRealUint64(dataInputStream)); - if (nextHeaderSize > channel.size() - SIGNATURE_HEADER_SIZE - nextHeaderOffset) { - throw new ArchiveException("nextHeaderSize is out of bounds"); - } - final long nextHeaderCrc = readUint32(dataInputStream); - return new StartHeader(nextHeaderOffset, nextHeaderSize, nextHeaderCrc); + private StartHeader readStartHeader(final ByteBuffer startHeader) throws IOException { + final long nextHeaderOffset = readRealUint64(startHeader); + if (nextHeaderOffset > channel.size() - SIGNATURE_HEADER_SIZE) { + throw new ArchiveException("Truncated 7z archive: next header offset %,d exceeds file size (%,d bytes).", + nextHeaderOffset + SIGNATURE_HEADER_SIZE, channel.size()); + } + final int nextHeaderSize = toNonNegativeInt("header", readRealUint64(startHeader)); + if (nextHeaderSize > channel.size() - SIGNATURE_HEADER_SIZE - nextHeaderOffset) { + throw new ArchiveException("Truncated 7z archive: next header size %,d at offset %,d exceeds file size (%,d bytes).", nextHeaderSize, + nextHeaderOffset + SIGNATURE_HEADER_SIZE, channel.size()); } + final long nextHeaderCrc = readUint32(startHeader); + return new StartHeader(nextHeaderOffset, nextHeaderSize, nextHeaderCrc); } private void readStreamsInfo(final ByteBuffer header, final Archive archive) throws IOException { @@ -2383,15 +2362,8 @@ public String toString() { private Archive tryToLocateEndHeader(final byte[] password) throws IOException { final ByteBuffer nidBuf = ByteBuffer.allocate(1); final long searchLimit = 1024L * 1024 * 1; - // Main header, plus bytes that readStartHeader would read - final long previousDataSize = channel.position() + 20; - final long minPos; // Determine minimal position - can't start before current position - if (channel.position() + searchLimit > channel.size()) { - minPos = channel.position(); - } else { - minPos = channel.size() - searchLimit; - } + final long minPos = Math.max(channel.position(), channel.size() - searchLimit); long pos = channel.size() - 1; // Loop: Try from end of archive while (pos > minPos) { @@ -2406,7 +2378,7 @@ private Archive tryToLocateEndHeader(final byte[] password) throws IOException { if (nid == NID.kEncodedHeader || nid == NID.kHeader) { try { // Try to initialize Archive structure from here - final long nextHeaderOffset = pos - previousDataSize; + final long nextHeaderOffset = pos - SIGNATURE_HEADER_SIZE; // Smaller than 1 MiB, so fits in an int final long nextHeaderSize = channel.size() - pos; final StartHeader startHeader = new StartHeader(nextHeaderOffset, (int) nextHeaderSize, 0); diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/StartHeader.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/StartHeader.java index bf7212fe102..c1049e85e38 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/StartHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/StartHeader.java @@ -33,4 +33,13 @@ final class StartHeader { this.nextHeaderSize = nextHeaderSize; this.nextHeaderCrc = nextHeaderCrc; } + + /** + * Gets the position of the next header in the file. + * + * @return the position of the next header + */ + long position() { + return SevenZFile.SIGNATURE_HEADER_SIZE + nextHeaderOffset; + } } diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java index fea72af11c0..15349a29c49 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java @@ -29,9 +29,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -1091,18 +1089,16 @@ void testReadingBackLZMA2DictSize() throws Exception { @ParameterizedTest @MethodSource void testReadRealUint64_Invalid(final byte[] input) throws IOException { - try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(input))) { - assertThrows(IOException.class, () -> SevenZFile.readRealUint64(dis)); - } + final ByteBuffer buf = ByteBuffer.wrap(input).order(ByteOrder.LITTLE_ENDIAN); + assertThrows(IOException.class, () -> SevenZFile.readRealUint64(buf)); } @ParameterizedTest @MethodSource void testReadRealUint64_Valid(final byte[] input, final long expected) throws IOException { - try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(input))) { - final long actual = SevenZFile.readRealUint64(dis); - assertEquals(expected, actual); - } + final ByteBuffer buf = ByteBuffer.wrap(input).order(ByteOrder.LITTLE_ENDIAN); + final long actual = SevenZFile.readRealUint64(buf); + assertEquals(expected, actual); } @Test @@ -1134,10 +1130,6 @@ void testReadTimesFromFile() throws IOException { @ParameterizedTest @MethodSource void testReadUint32_Valid(final byte[] input, final long expected) throws IOException { - try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(input))) { - final long actual = SevenZFile.readUint32(dis); - assertEquals(expected, actual); - } final ByteBuffer buf = ByteBuffer.wrap(input).order(ByteOrder.LITTLE_ENDIAN); final long actual = SevenZFile.readUint32(buf); assertEquals(expected, actual); From 13c2d7b48d5acfc91403fe600510f422a3261a4c Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Sun, 19 Oct 2025 20:45:13 +0200 Subject: [PATCH 36/40] Deprecate `IOUtils.readFully` and `IOUtils.skip` (#736) * Deprecate `IOUtils.readFully` and `IOUtils.skip` This change deprecates `readFully` and `skip` in `o.a.c.compress.utils.IOUtils` in favor of their Commons IO counterparts. Beyond code reuse, this offers two benefits: * `readFully` had several overloads with inconsistent semantics. All read as many bytes as possible (like `read`), but only one threw on EOF (as `readFully` should). * `skip` previously used a per-call byte buffer, unlike the Commons IO version. Since apache/commons-io#801 upstreamed the concurrency fix, this workaround is no longer needed. **Note**: As `o.a.c.compress.utils.IOUtils` is now rarely used, it is always referenced by FQCN to avoid confusion. * fix: unnecessary qualifier * fix: remove qualified name --- src/changes/changes.xml | 1 + .../archivers/ArchiveStreamFactory.java | 8 +++---- .../archivers/ar/ArArchiveInputStream.java | 14 +++++------ .../archivers/arj/ArjArchiveInputStream.java | 13 +++++----- .../cpio/CpioArchiveInputStream.java | 8 +++---- .../dump/DumpArchiveInputStream.java | 6 ++--- .../archivers/dump/TapeInputStream.java | 8 +++---- .../compress/archivers/examples/Archiver.java | 3 +-- .../compress/archivers/sevenz/SevenZFile.java | 24 ++++++++++--------- .../archivers/tar/TarArchiveEntry.java | 7 +++--- .../archivers/tar/TarArchiveInputStream.java | 10 ++++---- .../compress/archivers/tar/TarUtils.java | 7 +++--- .../compress/archivers/zip/BinaryTree.java | 3 +-- .../archivers/zip/ZipArchiveInputStream.java | 8 +++---- .../compress/archivers/zip/ZipFile.java | 16 ++++++------- .../compress/changes/ChangeSetPerformer.java | 3 ++- .../compressors/CompressorStreamFactory.java | 4 ++-- .../deflate/DeflateCompressorInputStream.java | 3 ++- .../lz4/FramedLZ4CompressorInputStream.java | 16 ++++++------- .../AbstractLZ77CompressorInputStream.java | 4 ++-- .../lzma/LZMACompressorInputStream.java | 3 ++- .../FramedSnappyCompressorInputStream.java | 12 +++++----- .../xz/XZCompressorInputStream.java | 3 ++- .../zstandard/ZstdCompressorInputStream.java | 3 ++- .../compress/harmony/unpack200/FileBands.java | 3 +-- .../harmony/unpack200/SegmentHeader.java | 3 +-- .../commons/compress/utils/IOUtils.java | 23 +++++++++--------- .../commons/compress/archivers/ZipTest.java | 4 ++-- .../archivers/zip/ScatterSampleTest.java | 4 ++-- .../archivers/zip/Zip64SupportIT.java | 3 +-- .../compress/archivers/zip/ZipFileTest.java | 6 ++--- .../gzip/GzipCompressorOutputStreamTest.java | 3 +-- 32 files changed, 119 insertions(+), 117 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 8b529516b61..2b4c5850584 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -140,6 +140,7 @@ The type attribute can be add,update,fix,remove. Bump org.apache.commons:commons-parent from 85 to 88 #707. Bump org.apache.commons:commons-lang3 from 3.18.0 to 3.19.0. +Deprecate IOUtils.readFully and IOUtils.skip. diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index 2c33d769d91..4a0e35176b0 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -43,8 +43,8 @@ import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.Sets; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; /** @@ -218,7 +218,7 @@ public static String detect(final InputStream in) throws ArchiveException { in.mark(signature.length); int signatureLength = -1; try { - signatureLength = IOUtils.readFully(in, signature); + signatureLength = IOUtils.read(in, signature); in.reset(); } catch (final IOException e) { throw new ArchiveException("Failure reading signature.", (Throwable) e); @@ -247,7 +247,7 @@ public static String detect(final InputStream in) throws ArchiveException { final byte[] dumpsig = new byte[DUMP_SIGNATURE_SIZE]; in.mark(dumpsig.length); try { - signatureLength = IOUtils.readFully(in, dumpsig); + signatureLength = IOUtils.read(in, dumpsig); in.reset(); } catch (final IOException e) { throw new ArchiveException("IOException while reading dump signature", (Throwable) e); @@ -259,7 +259,7 @@ public static String detect(final InputStream in) throws ArchiveException { final byte[] tarHeader = new byte[TAR_HEADER_SIZE]; in.mark(tarHeader.length); try { - signatureLength = IOUtils.readFully(in, tarHeader); + signatureLength = IOUtils.read(in, tarHeader); in.reset(); } catch (final IOException e) { throw new ArchiveException("IOException while reading tar signature", (Throwable) e); diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java index a4c8091e5a1..cc85ba1c33b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java @@ -30,8 +30,8 @@ import org.apache.commons.compress.archivers.ArchiveException; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.utils.ArchiveUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.ParsingUtils; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.ArrayUtils; /** @@ -214,7 +214,7 @@ private long asLong(final byte[] byteArray, final int offset, final int len) thr private void checkTrailer() throws IOException { // Check and skip the record trailer final byte[] expectedTrailer = ArchiveUtils.toAsciiBytes(ArArchiveEntry.TRAILER); - final byte[] actualTrailer = IOUtils.readRange(in, expectedTrailer.length); + final byte[] actualTrailer = org.apache.commons.compress.utils.IOUtils.readRange(in, expectedTrailer.length); if (actualTrailer.length < expectedTrailer.length) { throw new EOFException(String.format( "Premature end of ar archive: Invalid or incomplete trailer for entry '%s'.", @@ -248,7 +248,7 @@ public void close() throws IOException { */ private String getBSDLongName(final String bsdLongName) throws IOException { final int nameLen = checkEntryNameLength(ParsingUtils.parseIntValue(bsdLongName.substring(BSD_LONGNAME_PREFIX_LEN))); - final byte[] name = IOUtils.readRange(in, nameLen); + final byte[] name = org.apache.commons.compress.utils.IOUtils.readRange(in, nameLen); final int read = name.length; count(read); if (read != nameLen) { @@ -389,7 +389,7 @@ public ArArchiveEntry getNextEntry() throws IOException { * @throws IOException if an I/O error occurs while reading the stream or if the record is malformed. */ private byte[] getRecord() throws IOException { - final int read = IOUtils.readFully(in, metaData); + final int read = IOUtils.read(in, metaData); count(read); if (read == 0) { return null; @@ -442,7 +442,7 @@ private ArArchiveEntry parseEntry(final byte[] headerBuf) throws IOException { @Override public int read(final byte[] b, final int off, final int len) throws IOException { - org.apache.commons.io.IOUtils.checkFromIndexSize(b, off, len); + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } @@ -474,7 +474,7 @@ private byte[] readGNUStringTable(final ArArchiveEntry entry) throws IOException throw new ArchiveException("Invalid GNU string table entry size: " + entry.getLength()); } final int size = (int) entry.getLength(); - final byte[] namebuffer = IOUtils.readRange(in, size); + final byte[] namebuffer = org.apache.commons.compress.utils.IOUtils.readRange(in, size); final int read = namebuffer.length; if (read < size) { throw new EOFException("Premature end of ar archive: Truncated or incomplete GNU string table."); @@ -490,7 +490,7 @@ private byte[] readGNUStringTable(final ArArchiveEntry entry) throws IOException */ private void skipGlobalSignature() throws IOException { final byte[] expectedMagic = ArArchiveEntry.HEADER_BYTES; - final byte[] actualMagic = IOUtils.readRange(in, expectedMagic.length); + final byte[] actualMagic = org.apache.commons.compress.utils.IOUtils.readRange(in, expectedMagic.length); count(actualMagic.length); if (expectedMagic.length != actualMagic.length) { throw new EOFException(String.format("Premature end of ar archive: Incomplete global header (expected %d bytes, got %d).", expectedMagic.length, diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java index 6e6554f7f00..a26b4a88234 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -32,6 +32,7 @@ import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.utils.ArchiveUtils; import org.apache.commons.io.EndianUtils; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.io.input.ChecksumInputStream; @@ -226,7 +227,7 @@ private byte[] findMainHeader() throws IOException { final int basicHeaderSize = readSwappedUnsignedShort(); // At least two bytes are required for the null-terminated name and comment if (MIN_FIRST_HEADER_SIZE + 2 <= basicHeaderSize && basicHeaderSize <= MAX_BASIC_HEADER_SIZE) { - basicHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, basicHeaderSize); + basicHeaderBytes = IOUtils.toByteArray(in, basicHeaderSize); count(basicHeaderSize); if (checkCRC32(basicHeaderBytes)) { return basicHeaderBytes; @@ -262,7 +263,7 @@ public ArjArchiveEntry getNextEntry() throws IOException { if (currentInputStream != null) { // return value ignored as IOUtils.skip ensures the stream is drained completely final InputStream input = currentInputStream; - org.apache.commons.io.IOUtils.skip(input, Long.MAX_VALUE); + IOUtils.skip(input, Long.MAX_VALUE); currentInputStream.close(); currentLocalFileHeader = null; currentInputStream = null; @@ -307,7 +308,7 @@ public ArjArchiveEntry getNextEntry() throws IOException { @Override public int read(final byte[] b, final int off, final int len) throws IOException { - org.apache.commons.io.IOUtils.checkFromIndexSize(b, off, len); + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } @@ -352,7 +353,7 @@ private byte[] readHeader() throws IOException { if (basicHeaderSize < MIN_FIRST_HEADER_SIZE + 2 || basicHeaderSize > MAX_BASIC_HEADER_SIZE) { throw new ArchiveException("Corrupted ARJ archive: Invalid ARJ header size %,d", basicHeaderSize); } - final byte[] basicHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, basicHeaderSize); + final byte[] basicHeaderBytes = IOUtils.toByteArray(in, basicHeaderSize); count(basicHeaderSize); if (!checkCRC32(basicHeaderBytes)) { throw new ArchiveException("Corrupted ARJ archive: Invalid ARJ header CRC32 checksum"); @@ -402,7 +403,7 @@ private LocalFileHeader readLocalFileHeader() throws IOException { final ArrayList extendedHeaders = new ArrayList<>(); int extendedHeaderSize; while ((extendedHeaderSize = readSwappedUnsignedShort()) > 0) { - final byte[] extendedHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, extendedHeaderSize); + final byte[] extendedHeaderBytes = IOUtils.toByteArray(in, extendedHeaderSize); count(extendedHeaderSize); if (!checkCRC32(extendedHeaderBytes)) { throw new ArchiveException("Corrupted ARJ archive: Extended header CRC32 verification failure"); @@ -448,7 +449,7 @@ private MainHeader readMainHeader(final boolean selfExtracting) throws IOExcepti } final int extendedHeaderSize = readSwappedUnsignedShort(); if (extendedHeaderSize > 0) { - header.extendedHeaderBytes = org.apache.commons.io.IOUtils.toByteArray(in, extendedHeaderSize); + header.extendedHeaderBytes = IOUtils.toByteArray(in, extendedHeaderSize); count(extendedHeaderSize); if (!checkCRC32(header.extendedHeaderBytes)) { throw new ArchiveException("Corrupted ARJ archive: Extended header CRC32 verification failure"); diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java index 33fdb6946b1..317755eba77 100644 --- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java @@ -28,8 +28,8 @@ import org.apache.commons.compress.archivers.zip.ZipEncoding; import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; import org.apache.commons.compress.utils.ArchiveUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.ParsingUtils; +import org.apache.commons.io.IOUtils; /** * CpioArchiveInputStream is a stream for reading cpio streams. All formats of cpio are supported (old ASCII, old binary, new portable format and the new @@ -386,7 +386,7 @@ public CpioArchiveEntry getNextEntry() throws IOException { */ @Override public int read(final byte[] b, final int off, final int len) throws IOException { - org.apache.commons.io.IOUtils.checkFromIndexSize(b, off, len); + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } @@ -443,7 +443,7 @@ private String readEntryName(int lengthWithNull) throws IOException { } private int readFully(final byte[] b, final int off, final int len) throws IOException { - final int count = IOUtils.readFully(in, b, off, len); + final int count = IOUtils.read(in, b, off, len); count(count); if (count < len) { throw new EOFException(); @@ -558,7 +558,7 @@ private CpioArchiveEntry readOldBinaryEntry(final boolean swapHalfWord) throws I } private byte[] readRange(final int len) throws IOException { - final byte[] b = IOUtils.readRange(in, len); + final byte[] b = org.apache.commons.compress.utils.IOUtils.readRange(in, len); count(b.length); if (b.length < len) { throw new EOFException(); diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java index f14c50643ed..cb4d2fbd916 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java @@ -36,7 +36,7 @@ import org.apache.commons.compress.archivers.zip.ZipEncoding; import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; import org.apache.commons.compress.utils.ArchiveUtils; -import org.apache.commons.compress.utils.IOUtils; +import org.apache.commons.io.IOUtils; /** * The DumpArchiveInputStream reads a Unix dump archive as an InputStream. Methods are provided to position at each successive entry in the archive, and the @@ -400,7 +400,7 @@ public DumpArchiveSummary getSummary() { */ @Override public int read(final byte[] buf, int off, int len) throws IOException { - org.apache.commons.io.IOUtils.checkFromIndexSize(buf, off, len); + IOUtils.checkFromIndexSize(buf, off, len); if (len == 0) { return 0; } @@ -527,7 +527,7 @@ private void readDirectoryEntry(DumpArchiveEntry entry) throws IOException { final int datalen = DumpArchiveConstants.TP_SIZE * entry.getHeaderCount(); if (blockBuffer.length < datalen) { - blockBuffer = IOUtils.readRange(raw, datalen); + blockBuffer = org.apache.commons.compress.utils.IOUtils.readRange(raw, datalen); if (blockBuffer.length != datalen) { throw new EOFException(); } diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java index 48ed69d59e8..ace547a96f7 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java @@ -29,7 +29,7 @@ import java.util.zip.Inflater; import org.apache.commons.compress.archivers.ArchiveException; -import org.apache.commons.compress.utils.IOUtils; +import org.apache.commons.io.IOUtils; /** * Filter stream that mimics a physical tape drive capable of compressing the data stream. @@ -130,7 +130,7 @@ public int read() throws IOException { */ @Override public int read(final byte[] b, int off, final int len) throws IOException { - org.apache.commons.io.IOUtils.checkFromIndexSize(b, off, len); + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } @@ -245,14 +245,14 @@ private void readBlock(final boolean decompress) throws IOException { * @throws IOException Thrown if an I/O error occurs. */ private void readFully(final byte[] b, final int off, final int len) throws IOException { - final int count = IOUtils.readFully(in, b, off, len); + final int count = IOUtils.read(in, b, off, len); if (count < len) { throw new ShortFileException(); } } private byte[] readRange(final int len) throws IOException { - final byte[] ret = IOUtils.readRange(in, len); + final byte[] ret = org.apache.commons.compress.utils.IOUtils.readRange(in, len); if (ret.length < len) { throw new ShortFileException(); } diff --git a/src/main/java/org/apache/commons/compress/archivers/examples/Archiver.java b/src/main/java/org/apache/commons/compress/archivers/examples/Archiver.java index df06fa3cd07..25ef04f61e4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/examples/Archiver.java +++ b/src/main/java/org/apache/commons/compress/archivers/examples/Archiver.java @@ -42,7 +42,6 @@ import org.apache.commons.compress.archivers.sevenz.SevenZArchiveEntry; import org.apache.commons.compress.archivers.sevenz.SevenZOutputFile; import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; -import org.apache.commons.compress.utils.IOUtils; /** * Provides a high level API for creating archives. @@ -61,7 +60,7 @@ private static class ArchiverFileVisitor , E ext private ArchiverFileVisitor(final O target, final Path directory, final LinkOption... linkOptions) { this.outputStream = target; this.directory = directory; - this.linkOptions = linkOptions == null ? IOUtils.EMPTY_LINK_OPTIONS : linkOptions.clone(); + this.linkOptions = linkOptions == null ? org.apache.commons.compress.utils.IOUtils.EMPTY_LINK_OPTIONS : linkOptions.clone(); } @Override diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index 2d2117dca14..05ad45c0af4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -44,8 +44,8 @@ import org.apache.commons.compress.archivers.ArchiveException; import org.apache.commons.compress.archivers.ArchiveFile; import org.apache.commons.compress.utils.ArchiveUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.function.IOStream; import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.io.input.ChecksumInputStream; @@ -1104,7 +1104,7 @@ private InputStream getCurrentStream() throws IOException { // streams to get access to an entry. We defer this until really needed // so that entire blocks can be skipped without wasting time for decompression. try (InputStream stream = deferredBlockStreams.remove(0)) { - org.apache.commons.io.IOUtils.skip(stream, Long.MAX_VALUE, org.apache.commons.io.IOUtils::byteArray); + IOUtils.skip(stream, Long.MAX_VALUE); } compressedBytesReadFromCurrentEntry = 0; } @@ -1297,8 +1297,7 @@ private long[] longArray(final int size) throws MemoryLimitException { * @throws IOException if an I/O error occurs */ private ByteBuffer mapNextHeader(final StartHeader startHeader) throws IOException { - MemoryLimitException.checkKiB(bytesToKiB(startHeader.nextHeaderSize), Math.min(bytesToKiB(org.apache.commons.io.IOUtils.SOFT_MAX_ARRAY_LENGTH), - maxMemoryLimitKiB)); + MemoryLimitException.checkKiB(bytesToKiB(startHeader.nextHeaderSize), Math.min(bytesToKiB(IOUtils.SOFT_MAX_ARRAY_LENGTH), maxMemoryLimitKiB)); // startHeader is already within the channel's bounds if (channel instanceof FileChannel) { final FileChannel fileChannel = (FileChannel) channel; @@ -1306,7 +1305,7 @@ private ByteBuffer mapNextHeader(final StartHeader startHeader) throws IOExcepti } channel.position(startHeader.position()); final ByteBuffer buf = ByteBuffer.allocate(startHeader.nextHeaderSize).order(ByteOrder.LITTLE_ENDIAN); - readFully(buf); + readFully(buf, "next header"); return buf; } @@ -1435,7 +1434,7 @@ private ByteBuffer readEncodedHeader(final ByteBuffer header, final Archive arch // @formatter:on } final int unpackSize = toNonNegativeInt("header", folder.getUnpackSize()); - final byte[] nextHeader = IOUtils.readRange(inputStreamStack, unpackSize); + final byte[] nextHeader = org.apache.commons.compress.utils.IOUtils.readRange(inputStreamStack, unpackSize); if (nextHeader.length < unpackSize) { throw new ArchiveException("Premature end of stream"); } @@ -1681,10 +1680,13 @@ Folder readFolder(final ByteBuffer header) throws IOException { return folder; } - private void readFully(final ByteBuffer buf) throws IOException { - buf.rewind(); - org.apache.commons.io.IOUtils.read(channel, buf); - buf.flip(); + private void readFully(final ByteBuffer buf, final String description) throws IOException { + try { + IOUtils.readFully(channel, buf); + buf.flip(); + } catch (final EOFException e) { + throw new ArchiveException("Truncated 7z archive: end of file reached while reading %s.", description); + } } private void readHeader(final ByteBuffer header, final Archive archive) throws IOException { @@ -1713,7 +1715,7 @@ private void readHeader(final ByteBuffer header, final Archive archive) throws I private Archive readHeaders(final byte[] password) throws IOException { final ByteBuffer startHeader = ByteBuffer.allocate(SIGNATURE_HEADER_SIZE).order(ByteOrder.LITTLE_ENDIAN); - readFully(startHeader); + readFully(startHeader, "signature header"); final byte[] signature = new byte[SIGNATURE.length]; startHeader.get(signature); if (!Arrays.equals(signature, SIGNATURE)) { diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java index 980b9f85fe1..e0720114eca 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java @@ -50,7 +50,6 @@ import org.apache.commons.compress.archivers.EntryStreamOffsets; import org.apache.commons.compress.archivers.zip.ZipEncoding; import org.apache.commons.compress.utils.ArchiveUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.ParsingUtils; import org.apache.commons.io.file.attribute.FileTimes; import org.apache.commons.lang3.StringUtils; @@ -407,7 +406,7 @@ private TarArchiveEntry(final boolean preserveAbsolutePath) { } this.userName = user; this.file = null; - this.linkOptions = IOUtils.EMPTY_LINK_OPTIONS; + this.linkOptions = org.apache.commons.compress.utils.IOUtils.EMPTY_LINK_OPTIONS; this.preserveAbsolutePath = preserveAbsolutePath; } @@ -502,7 +501,7 @@ public TarArchiveEntry(final File file) { public TarArchiveEntry(final File file, final String fileName) { final String normalizedName = normalizeFileName(fileName, false); this.file = file.toPath(); - this.linkOptions = IOUtils.EMPTY_LINK_OPTIONS; + this.linkOptions = org.apache.commons.compress.utils.IOUtils.EMPTY_LINK_OPTIONS; try { readFileMode(this.file, normalizedName); } catch (final IOException e) { @@ -592,7 +591,7 @@ public TarArchiveEntry(final Path file) throws IOException { public TarArchiveEntry(final Path file, final String fileName, final LinkOption... linkOptions) throws IOException { final String normalizedName = normalizeFileName(fileName, false); this.file = file; - this.linkOptions = linkOptions == null ? IOUtils.EMPTY_LINK_OPTIONS : linkOptions; + this.linkOptions = linkOptions == null ? org.apache.commons.compress.utils.IOUtils.EMPTY_LINK_OPTIONS : linkOptions; readFileMode(file, normalizedName, linkOptions); this.userName = ""; readOsSpecificProperties(file); diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java index f45b705eeb2..fa93dea545d 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java @@ -43,7 +43,7 @@ import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.utils.ArchiveUtils; -import org.apache.commons.compress.utils.IOUtils; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; /** @@ -651,7 +651,7 @@ public boolean markSupported() { */ @Override public int read(final byte[] buf, final int offset, int numToRead) throws IOException { - org.apache.commons.io.IOUtils.checkFromIndexSize(buf, offset, numToRead); + IOUtils.checkFromIndexSize(buf, offset, numToRead); if (numToRead == 0) { return 0; } @@ -690,7 +690,7 @@ private void readOldGNUSparse() throws IOException { * @throws IOException on error. */ protected byte[] readRecord() throws IOException { - final int readCount = IOUtils.readFully(in, recordBuffer); + final int readCount = IOUtils.read(in, recordBuffer); count(readCount); if (readCount != getRecordSize()) { return null; @@ -742,7 +742,7 @@ public long skip(final long n) throws IOException { throw new IllegalStateException("No current tar entry"); } // Use Apache Commons IO to skip as it handles skipping fully - return org.apache.commons.io.IOUtils.skip(currentInputStream, n); + return IOUtils.skip(currentInputStream, n); } /** @@ -754,7 +754,7 @@ private void skipRecordPadding() throws IOException { final long entrySize = currEntry != null ? currEntry.getSize() : 0; if (!isDirectory() && entrySize > 0 && entrySize % getRecordSize() != 0) { final long padding = getRecordSize() - (entrySize % getRecordSize()); - final long skipped = org.apache.commons.io.IOUtils.skip(in, padding); + final long skipped = IOUtils.skip(in, padding); count(skipped); if (skipped != padding) { throw new EOFException(String.format("Truncated TAR archive: Failed to skip record padding for entry '%s'", currEntry.getName())); diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java index 73bfdf90267..0dcf31bec12 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java @@ -39,8 +39,8 @@ import org.apache.commons.compress.archivers.zip.ZipEncoding; import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; import org.apache.commons.compress.utils.ArchiveUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.ParsingUtils; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.ByteArrayOutputStream; /** @@ -726,8 +726,7 @@ static Map parsePaxHeaders(final InputStream inputStream, final if (TarArchiveEntry.PAX_NAME_KEY.equals(keyword) || TarArchiveEntry.PAX_LINK_NAME_KEY.equals(keyword)) { ArchiveUtils.checkEntryNameLength(restLen - 1, maxEntryPathLength, "TAR"); } - final byte[] rest = org.apache.commons.io.IOUtils.toByteArray(inputStream, restLen, - org.apache.commons.io.IOUtils.DEFAULT_BUFFER_SIZE); + final byte[] rest = IOUtils.toByteArray(inputStream, restLen, IOUtils.DEFAULT_BUFFER_SIZE); totalRead += restLen; // Drop trailing NL if (rest[restLen - 1] != '\n') { @@ -848,7 +847,7 @@ private static long[] readLineOfNumberForPax1x(final InputStream inputStream) th static String readLongName(final InputStream input, final ZipEncoding encoding, final int maxEntryNameLength, final TarArchiveEntry entry) throws IOException { final int declaredLength = ArchiveUtils.checkEntryNameLength(entry.getSize(), maxEntryNameLength, "TAR"); - final byte[] name = IOUtils.readRange(input, declaredLength); + final byte[] name = org.apache.commons.compress.utils.IOUtils.readRange(input, declaredLength); int actualLength = name.length; if (actualLength != declaredLength) { throw new EOFException(String.format("Truncated long name entry: Expected %,d bytes, read %,d bytes.", declaredLength, actualLength)); diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/BinaryTree.java b/src/main/java/org/apache/commons/compress/archivers/zip/BinaryTree.java index 0df1cebd1d5..b87d1670745 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/BinaryTree.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/BinaryTree.java @@ -24,7 +24,6 @@ import java.io.InputStream; import org.apache.commons.compress.archivers.ArchiveException; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.lang3.ArrayFill; /** @@ -53,7 +52,7 @@ static BinaryTree decode(final InputStream inputStream, final int totalNumberOfV throw new ArchiveException("Cannot read the size of the encoded tree, unexpected end of stream"); } - final byte[] encodedTree = IOUtils.readRange(inputStream, size); + final byte[] encodedTree = org.apache.commons.compress.utils.IOUtils.readRange(inputStream, size); if (encodedTree.length != size) { throw new EOFException(); } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java index bc61281517d..7c77fefcc44 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java @@ -50,8 +50,8 @@ import org.apache.commons.compress.compressors.deflate64.Deflate64CompressorInputStream; import org.apache.commons.compress.compressors.zstandard.ZstdCompressorInputStream; import org.apache.commons.compress.utils.ArchiveUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.lang3.ArrayUtils; @@ -1047,7 +1047,7 @@ private void pushback(final byte[] buf, final int offset, final int length) thro @Override public int read(final byte[] buffer, final int offset, final int length) throws IOException { - org.apache.commons.io.IOUtils.checkFromIndexSize(buffer, offset, length); + IOUtils.checkFromIndexSize(buffer, offset, length); if (length == 0) { return 0; } @@ -1231,7 +1231,7 @@ private void readFully(final byte[] b) throws IOException { private void readFully(final byte[] b, final int off) throws IOException { final int len = b.length - off; - final int count = IOUtils.readFully(in, b, off, len); + final int count = IOUtils.read(in, b, off, len); count(count); if (count < len) { throw new EOFException(); @@ -1268,7 +1268,7 @@ private int readOneByte() throws IOException { } private byte[] readRange(final int len) throws IOException { - final byte[] ret = IOUtils.readRange(in, len); + final byte[] ret = org.apache.commons.compress.utils.IOUtils.readRange(in, len); count(ret.length); if (ret.length < len) { throw new EOFException(); diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java index 900cd3e14dd..e0cd181e8e4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java @@ -60,9 +60,9 @@ import org.apache.commons.compress.utils.ArchiveUtils; import org.apache.commons.compress.utils.BoundedArchiveInputStream; import org.apache.commons.compress.utils.BoundedSeekableByteChannelInputStream; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.InputStreamStatistics; import org.apache.commons.io.FilenameUtils; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.function.IOFunction; import org.apache.commons.io.function.IOStream; import org.apache.commons.io.input.BoundedInputStream; @@ -500,7 +500,7 @@ public static Builder builder() { * @param zipFile file to close, can be null */ public static void closeQuietly(final ZipFile zipFile) { - org.apache.commons.io.IOUtils.closeQuietly(zipFile); + IOUtils.closeQuietly(zipFile); } private static SeekableByteChannel openZipChannel(final Path path, final long maxNumberOfDisks, final OpenOption[] openOptions) throws IOException { @@ -551,7 +551,7 @@ private static SeekableByteChannel openZipChannel(final Path path, final long ma return lowercase; }).collect(Collectors.toList()), openOptions); } catch (final Throwable ex) { - org.apache.commons.io.IOUtils.closeQuietly(channel); + IOUtils.closeQuietly(channel); throw ex; } } @@ -1323,7 +1323,7 @@ public InputStream getRawInputStream(final ZipArchiveEntry entry) throws IOExcep public String getUnixSymlink(final ZipArchiveEntry entry) throws IOException { if (entry != null && entry.isUnixSymlink()) { try (InputStream in = getInputStream(entry)) { - return zipEncoding.decode(org.apache.commons.io.IOUtils.toByteArray(in)); + return zipEncoding.decode(IOUtils.toByteArray(in)); } } return null; @@ -1544,7 +1544,7 @@ private void readCentralDirectoryEntry(final Map < fileNameLen) { throw new EOFException(); } @@ -1555,7 +1555,7 @@ private void readCentralDirectoryEntry(final Map < extraLen) { throw new EOFException(); } @@ -1568,7 +1568,7 @@ private void readCentralDirectoryEntry(final Map < commentLen) { throw new EOFException(); } @@ -1595,7 +1595,7 @@ private void resolveLocalFileHeaderData(final Map < extraFieldLen) { throw new EOFException(); } diff --git a/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java b/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java index 871094bcf53..47b3f710f04 100644 --- a/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java +++ b/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java @@ -31,6 +31,7 @@ import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipFile; import org.apache.commons.compress.changes.Change.ChangeType; +import org.apache.commons.io.IOUtils; /** * Performs ChangeSet operations on a stream. This class is thread safe and can be used multiple times. It operates on a copy of the ChangeSet. If the ChangeSet @@ -134,7 +135,7 @@ public ChangeSetPerformer(final ChangeSet changeSet) { */ private void copyStream(final InputStream inputStream, final O outputStream, final E archiveEntry) throws IOException { outputStream.putArchiveEntry(archiveEntry); - org.apache.commons.io.IOUtils.copy(inputStream, outputStream); + IOUtils.copy(inputStream, outputStream); outputStream.closeArchiveEntry(); } diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index ccc097c7d36..7b93ca40ca5 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -57,8 +57,8 @@ import org.apache.commons.compress.compressors.zstandard.ZstdCompressorInputStream; import org.apache.commons.compress.compressors.zstandard.ZstdCompressorOutputStream; import org.apache.commons.compress.compressors.zstandard.ZstdUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.Sets; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; /** @@ -236,7 +236,7 @@ static String detect(final InputStream inputStream, final Set compressor inputStream.mark(signature.length); int signatureLength = -1; try { - signatureLength = IOUtils.readFully(inputStream, signature); + signatureLength = IOUtils.read(inputStream, signature); inputStream.reset(); } catch (final IOException e) { throw new CompressorException("Failed to read signature.", e); diff --git a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java index 274c8ef4437..5a8b265dbad 100644 --- a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java @@ -25,6 +25,7 @@ import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; /** @@ -120,6 +121,6 @@ public int read(final byte[] buf, final int off, final int len) throws IOExcepti /** {@inheritDoc} */ @Override public long skip(final long n) throws IOException { - return org.apache.commons.io.IOUtils.skip(in, n); + return IOUtils.skip(in, n); } } diff --git a/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java index 243fc5d2e4c..3cd88e4b648 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java @@ -26,8 +26,8 @@ import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.ByteUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; /** @@ -159,7 +159,7 @@ private void appendToBlockDependencyBuffer(final byte[] b, final int off, int le @Override public void close() throws IOException { try { - org.apache.commons.io.IOUtils.close(currentBlock); + IOUtils.close(currentBlock); currentBlock = null; } finally { inputStream.close(); @@ -237,7 +237,7 @@ public int read() throws IOException { @Override public int read(final byte[] b, final int off, final int len) throws IOException { - org.apache.commons.io.IOUtils.checkFromIndexSize(b, off, len); + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } @@ -289,7 +289,7 @@ private void readFrameDescriptor() throws IOException { contentHash.update(bdByte); if (expectContentSize) { // for now, we don't care, contains the uncompressed size final byte[] contentSize = new byte[8]; - final int skipped = IOUtils.readFully(inputStream, contentSize); + final int skipped = IOUtils.read(inputStream, contentSize); count(skipped); if (8 != skipped) { throw new CompressorException("Premature end of stream while reading content size"); @@ -332,7 +332,7 @@ private int readOneByte() throws IOException { private boolean readSignature(final boolean firstFrame) throws IOException { final String garbageMessage = firstFrame ? "Not a LZ4 frame stream" : "LZ4 frame stream followed by garbage"; final byte[] b = new byte[4]; - int read = IOUtils.readFully(inputStream, b); + int read = IOUtils.read(inputStream, b); count(read); if (0 == read && !firstFrame) { // good LZ4 frame and nothing after it @@ -369,12 +369,12 @@ private int skipSkippableFrame(final byte[] b) throws IOException { if (len < 0) { throw new CompressorException("Found illegal skippable frame with negative size"); } - final long skipped = org.apache.commons.io.IOUtils.skip(inputStream, len); + final long skipped = IOUtils.skip(inputStream, len); count(skipped); if (len != skipped) { throw new CompressorException("Premature end of stream while skipping frame"); } - read = IOUtils.readFully(inputStream, b); + read = IOUtils.read(inputStream, b); count(read); } return read; @@ -382,7 +382,7 @@ private int skipSkippableFrame(final byte[] b) throws IOException { private void verifyChecksum(final org.apache.commons.codec.digest.XXHash32 hash, final String kind) throws IOException { final byte[] checksum = new byte[4]; - final int read = IOUtils.readFully(inputStream, checksum); + final int read = IOUtils.read(inputStream, checksum); count(read); if (4 != read) { throw new CompressorException("Premature end of stream while reading %s checksum", kind); diff --git a/src/main/java/org/apache/commons/compress/compressors/lz77support/AbstractLZ77CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lz77support/AbstractLZ77CompressorInputStream.java index 16a37181819..9502ed6479a 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lz77support/AbstractLZ77CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lz77support/AbstractLZ77CompressorInputStream.java @@ -25,8 +25,8 @@ import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.ByteUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; /** @@ -332,7 +332,7 @@ private void tryToCopy(final int bytesToCopy) { private void tryToReadLiteral(final int bytesToRead) throws IOException { // min of "what is still inside the literal", "what does the user want" and "how much can fit into the buffer" final int reallyTryToRead = Math.min((int) Math.min(bytesToRead, bytesRemaining), buf.length - writeIndex); - final int bytesRead = reallyTryToRead > 0 ? IOUtils.readFully(in, buf, writeIndex, reallyTryToRead) : 0 /* happens for bytesRemaining == 0 */; + final int bytesRead = reallyTryToRead > 0 ? IOUtils.read(in, buf, writeIndex, reallyTryToRead) : 0 /* happens for bytesRemaining == 0 */; count(bytesRead); if (reallyTryToRead != bytesRead) { throw new CompressorException("Premature end of stream reading literal"); diff --git a/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorInputStream.java index 3743f4ecbeb..0df33b06905 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorInputStream.java @@ -24,6 +24,7 @@ import org.apache.commons.compress.MemoryLimitException; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.build.AbstractStreamBuilder; import org.apache.commons.io.input.BoundedInputStream; import org.tukaani.xz.LZMA2Options; @@ -179,6 +180,6 @@ public int read(final byte[] buf, final int off, final int len) throws IOExcepti /** {@inheritDoc} */ @Override public long skip(final long n) throws IOException { - return org.apache.commons.io.IOUtils.skip(in, n); + return IOUtils.skip(in, n); } } diff --git a/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java index 733be9582c2..000c113b37c 100644 --- a/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java @@ -27,8 +27,8 @@ import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.ByteUtils; -import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; /** @@ -177,7 +177,7 @@ public int available() throws IOException { @Override public void close() throws IOException { try { - org.apache.commons.io.IOUtils.close(currentCompressedChunk); + IOUtils.close(currentCompressedChunk); currentCompressedChunk = null; } finally { inputStream.close(); @@ -200,7 +200,7 @@ public int read() throws IOException { @Override public int read(final byte[] b, final int off, final int len) throws IOException { - org.apache.commons.io.IOUtils.checkFromIndexSize(b, off, len); + IOUtils.checkFromIndexSize(b, off, len); if (len == 0) { return 0; } @@ -217,7 +217,7 @@ public int read(final byte[] b, final int off, final int len) throws IOException private long readCrc() throws IOException { final byte[] b = new byte[4]; - final int read = IOUtils.readFully(inputStream, b); + final int read = IOUtils.read(inputStream, b); count(read); if (read != 4) { throw new CompressorException("Premature end of stream"); @@ -324,7 +324,7 @@ private int readSize() throws IOException { private void readStreamIdentifier() throws IOException { final byte[] b = new byte[10]; - final int read = IOUtils.readFully(inputStream, b); + final int read = IOUtils.read(inputStream, b); count(read); if (10 != read || !matches(b, 10)) { throw new CompressorException("Not a framed Snappy stream"); @@ -336,7 +336,7 @@ private void skipBlock() throws IOException { if (size < 0) { throw new CompressorException("Found illegal chunk with negative size"); } - final long read = org.apache.commons.io.IOUtils.skip(inputStream, size); + final long read = IOUtils.skip(inputStream, size); count(read); if (read != size) { throw new CompressorException("Premature end of stream"); diff --git a/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorInputStream.java index bc97d0f2d7a..a4623c751dc 100644 --- a/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorInputStream.java @@ -26,6 +26,7 @@ import org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream; import org.apache.commons.compress.compressors.lzma.LZMACompressorOutputStream; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.build.AbstractStreamBuilder; import org.apache.commons.io.input.BoundedInputStream; import org.tukaani.xz.LZMA2Options; @@ -253,7 +254,7 @@ public int read(final byte[] buf, final int off, final int len) throws IOExcepti @Override public long skip(final long n) throws IOException { try { - return org.apache.commons.io.IOUtils.skip(in, n); + return IOUtils.skip(in, n); } catch (final org.tukaani.xz.MemoryLimitException e) { // Convert to Commons Compress MemoryLimtException throw newMemoryLimitException(e); diff --git a/src/main/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStream.java index 32b934568f4..6f36f64c6f0 100644 --- a/src/main/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStream.java @@ -24,6 +24,7 @@ import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.InputStreamStatistics; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; import com.github.luben.zstd.BufferPool; @@ -125,7 +126,7 @@ public synchronized void reset() throws IOException { @Override public long skip(final long n) throws IOException { - return org.apache.commons.io.IOUtils.skip(decIS, n); + return IOUtils.skip(decIS, n); } @Override diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/FileBands.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/FileBands.java index 5575d4b05d1..ae01a821b2d 100644 --- a/src/main/java/org/apache/commons/compress/harmony/unpack200/FileBands.java +++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/FileBands.java @@ -23,7 +23,6 @@ import org.apache.commons.compress.harmony.pack200.Codec; import org.apache.commons.compress.harmony.pack200.Pack200Exception; -import org.apache.commons.compress.utils.IOUtils; /** * Parses the file band headers (not including the actual bits themselves). At the end of this parse call, the input stream will be positioned at the start of @@ -83,7 +82,7 @@ public void processFileBits() throws IOException { fileBits = new byte[numberOfFiles][]; for (int i = 0; i < numberOfFiles; i++) { final int size = (int) fileSize[i]; - fileBits[i] = IOUtils.readRange(in, size); + fileBits[i] = org.apache.commons.compress.utils.IOUtils.readRange(in, size); final int read = fileBits[i].length; if (size != 0 && read < size) { throw new Pack200Exception("Expected to read " + size + " bytes but read " + read); diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/SegmentHeader.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/SegmentHeader.java index 69830e63fc4..23a74d3edb7 100644 --- a/src/main/java/org/apache/commons/compress/harmony/unpack200/SegmentHeader.java +++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/SegmentHeader.java @@ -25,7 +25,6 @@ import org.apache.commons.compress.harmony.pack200.BHSDCodec; import org.apache.commons.compress.harmony.pack200.Codec; import org.apache.commons.compress.harmony.pack200.Pack200Exception; -import org.apache.commons.compress.utils.IOUtils; /** * SegmentHeader is the header band of a {@link Segment}. @@ -301,7 +300,7 @@ public void read(final InputStream in) throws IOException, Error, Pack200Excepti parseCpCounts(in); parseClassCounts(in); if (getBandHeadersSize() > 0) { - setBandHeadersData(IOUtils.readRange(in, getBandHeadersSize())); + setBandHeadersData(org.apache.commons.compress.utils.IOUtils.readRange(in, getBandHeadersSize())); } archiveSizeOffset -= in.available(); } diff --git a/src/main/java/org/apache/commons/compress/utils/IOUtils.java b/src/main/java/org/apache/commons/compress/utils/IOUtils.java index 0bdd1a36bfc..2c9da310744 100644 --- a/src/main/java/org/apache/commons/compress/utils/IOUtils.java +++ b/src/main/java/org/apache/commons/compress/utils/IOUtils.java @@ -159,7 +159,7 @@ public static long copyRange(final InputStream input, final long length, final O @Deprecated public static int read(final File file, final byte[] array) throws IOException { try (InputStream inputStream = Files.newInputStream(file.toPath())) { - return readFully(inputStream, array, 0, array.length); + return org.apache.commons.io.IOUtils.read(inputStream, array, 0, array.length); } } @@ -173,9 +173,11 @@ public static int read(final File file, final byte[] array) throws IOException { * @param array buffer to fill. * @return the number of bytes actually read. * @throws IOException Thrown if an I/O error occurs. + * @deprecated Since 1.29.0, use {@link org.apache.commons.io.IOUtils#read(InputStream, byte[])} instead. */ + @Deprecated public static int readFully(final InputStream input, final byte[] array) throws IOException { - return readFully(input, array, 0, array.length); + return org.apache.commons.io.IOUtils.read(input, array); } /** @@ -190,11 +192,10 @@ public static int readFully(final InputStream input, final byte[] array) throws * @param length of bytes to read. * @return the number of bytes actually read. * @throws IOException Thrown if an I/O error occurs. + * @deprecated Since 1.29.0, use {@link org.apache.commons.io.IOUtils#read(InputStream, byte[], int, int)} instead. */ + @Deprecated public static int readFully(final InputStream input, final byte[] array, final int offset, final int length) throws IOException { - if (length < 0 || offset < 0 || length + offset > array.length || length + offset < 0) { - throw new IndexOutOfBoundsException(); - } return org.apache.commons.io.IOUtils.read(input, array, offset, length); } @@ -209,13 +210,11 @@ public static int readFully(final InputStream input, final byte[] array, final i * @param byteBuffer the buffer into which the data is read. * @throws IOException Thrown if an I/O error occurs. * @throws EOFException if the channel reaches the end before reading all the bytes. + * @deprecated Since 1.29.0, use {@link org.apache.commons.io.IOUtils#readFully(ReadableByteChannel, ByteBuffer)} instead. */ + @Deprecated public static void readFully(final ReadableByteChannel channel, final ByteBuffer byteBuffer) throws IOException { - final int expectedLength = byteBuffer.remaining(); - final int read = org.apache.commons.io.IOUtils.read(channel, byteBuffer); - if (read < expectedLength) { - throw new EOFException(); - } + org.apache.commons.io.IOUtils.readFully(channel, byteBuffer); } /** @@ -272,9 +271,11 @@ public static byte[] readRange(final ReadableByteChannel input, final int length * @param toSkip the number of bytes to skip. * @return the number of bytes actually skipped. * @throws IOException Thrown if an I/O error occurs. + * @deprecated Since 1.29.0, use {@link org.apache.commons.io.IOUtils#skip(InputStream, long)} instead. */ + @Deprecated public static long skip(final InputStream input, final long toSkip) throws IOException { - return org.apache.commons.io.IOUtils.skip(input, toSkip, org.apache.commons.io.IOUtils::byteArray); + return org.apache.commons.io.IOUtils.skip(input, toSkip); } /** diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTest.java b/src/test/java/org/apache/commons/compress/archivers/ZipTest.java index d1cebc36f25..9e39f930578 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTest.java @@ -113,8 +113,8 @@ private void assertSameFileContents(final File expectedFile, final File actualFi try (InputStream actualIs = actual.getInputStream(actualElement); InputStream expectedIs = expected.getInputStream(expectedElement)) { - org.apache.commons.compress.utils.IOUtils.readFully(expectedIs, expectedBuf); - org.apache.commons.compress.utils.IOUtils.readFully(actualIs, actualBuf); + IOUtils.read(expectedIs, expectedBuf); + IOUtils.read(actualIs, actualBuf); } assertArrayEquals(expectedBuf, actualBuf); // Buffers are larger than payload. don't care } diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java index 04533ee9846..d24df192367 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java @@ -30,7 +30,7 @@ import org.apache.commons.compress.AbstractTempDirTest; import org.apache.commons.compress.parallel.InputStreamSupplier; -import org.apache.commons.compress.utils.IOUtils; +import org.apache.commons.io.IOUtils; import org.junit.jupiter.api.Test; class ScatterSampleTest extends AbstractTempDirTest { @@ -41,7 +41,7 @@ private void checkFile(final File result) throws IOException { assertEquals("test1.xml", archiveEntry1.getName()); try (InputStream inputStream = zipFile.getInputStream(archiveEntry1)) { final byte[] b = new byte[6]; - final int i = IOUtils.readFully(inputStream, b); + final int i = IOUtils.read(inputStream, b); assertEquals(5, i); assertEquals('H', b[0]); assertEquals('o', b[4]); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java index 9911b6b5296..3f6db849979 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java @@ -42,12 +42,11 @@ import java.util.zip.ZipEntry; import org.apache.commons.compress.AbstractTest; +import org.apache.commons.io.IOUtils; import org.apache.commons.io.RandomAccessFileMode; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; -import shaded.org.apache.commons.io.IOUtils; - /** * Tests {@link ZipFile} Zip64 support. */ diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java index 6dc78d31581..7f297c3b084 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java @@ -258,7 +258,7 @@ void testAlternativeZstdInputStream() throws Exception { try (InputStream inputStream = zf.getInputStream(ze)) { assertNotNull(inputStream); assertFalse(zf.isUsed()); - final int bytesRead = org.apache.commons.compress.utils.IOUtils.readFully(inputStream, buffer); + final int bytesRead = IOUtils.read(inputStream, buffer); assertEquals(6066, bytesRead); assertTrue(zf.isUsed()); } @@ -271,7 +271,7 @@ void testAlternativeZstdInputStream() throws Exception { try (InputStream inputStream = builtZipFile.getInputStream(ze)) { assertTrue(inputStream instanceof ZstdInputStream); assertNotNull(inputStream); - final int bytesRead = org.apache.commons.compress.utils.IOUtils.readFully(inputStream, buffer); + final int bytesRead = IOUtils.read(inputStream, buffer); assertEquals(6066, bytesRead); } } @@ -905,7 +905,7 @@ void testSelfExtractingZipUsingUnzipsfx() throws IOException, InterruptedExcepti } try (InputStream inputStream = Files.newInputStream(extractedFile.toPath())) { - bytesRead = org.apache.commons.compress.utils.IOUtils.readFully(inputStream, buffer); + bytesRead = IOUtils.read(inputStream, buffer); assertEquals(testData.length, bytesRead); assertArrayEquals(testData, Arrays.copyOfRange(buffer, 0, bytesRead)); } diff --git a/src/test/java/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStreamTest.java index 1d3595f5f68..f00a44bee31 100644 --- a/src/test/java/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStreamTest.java @@ -45,13 +45,12 @@ import org.apache.commons.codec.binary.Hex; import org.apache.commons.compress.compressors.gzip.ExtraField.SubField; import org.apache.commons.compress.compressors.gzip.GzipParameters.OS; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.ArrayFill; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; -import shaded.org.apache.commons.io.IOUtils; - /** * Tests {@link GzipCompressorOutputStream}. */ From 86e7021f696ef020a50ea9ccdfcd733dff9bb44d Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz" Date: Mon, 20 Oct 2025 13:39:31 +0200 Subject: [PATCH 37/40] Use `consume` instead of `skip` (#738) Replace `IOUtils.skip(in, Long.MAX_VALUE)`` with `IOUtils.consume(in)`` for clarity and intent, removing the need for a magic constant. --- .../archivers/ar/ArArchiveInputStream.java | 2 +- .../archivers/arj/ArjArchiveInputStream.java | 2 +- .../archivers/cpio/CpioArchiveInputStream.java | 15 +-------------- .../compress/archivers/sevenz/SevenZFile.java | 2 +- .../archivers/tar/TarArchiveInputStream.java | 2 +- 5 files changed, 5 insertions(+), 18 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java index cc85ba1c33b..d8b74e639f1 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java @@ -318,7 +318,7 @@ public ArArchiveEntry getNextEntry() throws IOException { do { // If there is a current entry, skip any unread data and padding if (currentEntry != null) { - IOUtils.skip(this, Long.MAX_VALUE); // Skip to end of current entry + IOUtils.consume(this); // Skip to end of current entry skipRecordPadding(); // Skip padding to align to the next record } diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java index a26b4a88234..60d8d39d9e3 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -263,7 +263,7 @@ public ArjArchiveEntry getNextEntry() throws IOException { if (currentInputStream != null) { // return value ignored as IOUtils.skip ensures the stream is drained completely final InputStream input = currentInputStream; - IOUtils.skip(input, Long.MAX_VALUE); + IOUtils.consume(input); currentInputStream.close(); currentLocalFileHeader = null; currentInputStream = null; diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java index 317755eba77..1d580dc6b70 100644 --- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java @@ -308,19 +308,6 @@ public void close() throws IOException { } } - /** - * Closes the current CPIO entry and positions the stream for reading the next entry. - * - * @throws IOException if an I/O error has occurred or if a CPIO file error has occurred - */ - private void closeEntry() throws IOException { - // the skip implementation of this class will not skip more - // than Integer.MAX_VALUE bytes - while (skip((long) Integer.MAX_VALUE) == Integer.MAX_VALUE) { // NOPMD NOSONAR - // do nothing - } - } - /** * Reads the next CPIO file entry and positions stream at the beginning of the entry data. * @@ -332,7 +319,7 @@ private void closeEntry() throws IOException { public CpioArchiveEntry getNextCPIOEntry() throws IOException { checkOpen(); if (entry != null) { - closeEntry(); + IOUtils.consume(this); } readFully(buffer2, 0, buffer2.length); if (CpioUtil.byteArray2long(buffer2, false) == MAGIC_OLD_BINARY) { diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index 05ad45c0af4..11b3cf2d0d3 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -1104,7 +1104,7 @@ private InputStream getCurrentStream() throws IOException { // streams to get access to an entry. We defer this until really needed // so that entire blocks can be skipped without wasting time for decompression. try (InputStream stream = deferredBlockStreams.remove(0)) { - IOUtils.skip(stream, Long.MAX_VALUE); + IOUtils.consume(stream); } compressedBytesReadFromCurrentEntry = 0; } diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java index fa93dea545d..dee3f08f03d 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java @@ -494,7 +494,7 @@ public TarArchiveEntry getNextEntry() throws IOException { do { // If there is a current entry, skip any unread data and padding if (currentInputStream != null) { - IOUtils.skip(currentInputStream, Long.MAX_VALUE); // Skip to end of current entry + IOUtils.consume(currentInputStream); // Skip to end of current entry skipRecordPadding(); // Skip padding to align to the next record } // Read the next header record From eef0abd7fc89b80a3e425466f8efcf93c4033fe5 Mon Sep 17 00:00:00 2001 From: Gary Gregory Date: Sun, 2 Nov 2025 11:46:07 -0500 Subject: [PATCH 38/40] Use HTTPS in URL --- .../compress/archivers/arj/ArjArchiveInputStream.java | 2 +- .../commons/compress/archivers/cpio/CpioConstants.java | 2 +- .../commons/compress/archivers/zip/ZipArchiveEntry.java | 2 +- .../commons/compress/compressors/bzip2/BlockSort.java | 6 +++--- src/site/xdoc/conventions.xml | 2 +- src/site/xdoc/examples.xml | 2 +- src/site/xdoc/index.xml | 4 ++-- src/site/xdoc/tar.xml | 6 +++--- 8 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java index 60d8d39d9e3..7257e5cd6c5 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -40,7 +40,7 @@ * Implements the "arj" archive format as an InputStream. * *
* * @NotThreadSafe diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioConstants.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioConstants.java index 608ef5d4d59..f9c37d8156b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioConstants.java +++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioConstants.java @@ -24,7 +24,7 @@ * Based on code from the jRPM project. * *- Reference 1
- *- Reference 2
+ *- Reference 2
*- * A list of the {@code C_xxx} constants is here. + * A list of the {@code C_xxx} constants is here. *
** TODO Next major version: Update to a class. diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index 59879f562d4..c47650c5685 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -45,7 +45,7 @@ * Extension that adds better handling of extra fields and provides access to the internal and external file attributes. * *
- * The extra data is expected to follow the recommendation of APPNOTE.TXT: + * The extra data is expected to follow the recommendation of APPNOTE.TXT: *
**
- the extra byte array consists of a sequence of extra fields
diff --git a/src/main/java/org/apache/commons/compress/compressors/bzip2/BlockSort.java b/src/main/java/org/apache/commons/compress/compressors/bzip2/BlockSort.java index b4e77ab0866..2a6117d292c 100644 --- a/src/main/java/org/apache/commons/compress/compressors/bzip2/BlockSort.java +++ b/src/main/java/org/apache/commons/compress/compressors/bzip2/BlockSort.java @@ -62,10 +62,10 @@ * For more information see for example: * *- *
* diff --git a/src/site/xdoc/conventions.xml b/src/site/xdoc/conventions.xml index e18561230b5..4457a8639b6 100644 --- a/src/site/xdoc/conventions.xml +++ b/src/site/xdoc/conventions.xml @@ -36,7 +36,7 @@- Burrows, M. and Wheeler, D.: A Block-sorting Lossless Data Compression + *
- Burrows, M. and Wheeler, D.: A Block-sorting Lossless Data Compression * Algorithm
- *- Manber, U. and Myers, G.: Suffix arrays: A new method for on-line string searches
- *- Bentley, J.L. and Sedgewick, R.: Fast Algorithms for Sorting and + *
- Manber, U. and Myers, G.: Suffix arrays: A new method for on-line string searches
+ *- Bentley, J.L. and Sedgewick, R.: Fast Algorithms for Sorting and * Searching Strings
*We use some of the annotations from - JCIP + JCIP as Javadoc tags. The used tags are:
diff --git a/src/site/xdoc/examples.xml b/src/site/xdoc/examples.xml index 4b781074c45..83a9c99d42e 100644 --- a/src/site/xdoc/examples.xml +++ b/src/site/xdoc/examples.xml @@ -946,7 +946,7 @@ in.close();
From 520a57476ff780c73c4ec056f7c5e7404c62add6 Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz"There are two different "formats" used for lz4. The format called + href="https://lz4.github.io/lz4/">lz4. The format called "block format" only contains the raw compressed data while the other provides a higher level "frame format" - Commons Compress offers two different stream classes for reading or diff --git a/src/site/xdoc/index.xml b/src/site/xdoc/index.xml index 7fc5569a616..8c44f57a4d9 100644 --- a/src/site/xdoc/index.xml +++ b/src/site/xdoc/index.xml @@ -41,13 +41,13 @@ Apache goes. The tar package is originally Tim Endres' public domain package. The bzip2 package is based on the work done by Keiron Liddle as well as Julian Seward's - libbzip2. + libbzip2. It has migrated via:
Ant -> Avalon-Excalibur -> Commons-IO -> Commons-Compress.- The cpio package has been contributed by Michael Kuss and - the jRPM + the jRPM project.
- The pack200 code has originally been part of the now retired Apache diff --git a/src/site/xdoc/tar.xml b/src/site/xdoc/tar.xml index 38f02f16517..7cbf603cb36 100644 --- a/src/site/xdoc/tar.xml +++ b/src/site/xdoc/tar.xml @@ -63,7 +63,7 @@ many other tar implementations like the ones of OpenBSD, Solaris or MacOS X.
- @@ -88,11 +88,11 @@ entry is added. This is the default.
LONGFILE_POSIX: use a PAX extended + href="https://pubs.opengroup.org/onlinepubs/009695399/utilities/pax.html#tag_04_100_13_03">extended header as defined by POSIX 1003.1. Most modern tar implementations are able to extract such archives. since Commons Compress 1.4BIGNUMBER_STAR: use a variant first introduced by Jörg Schilling's star + href="https://developer.berlios.de/projects/star">star and later adopted by GNU and BSD tar. This method is not supported by all implementations.BIGNUMBER_POSIX: use a PAX extended + href="https://pubs.opengroup.org/onlinepubs/009695399/utilities/pax.html#tag_04_100_13_03">extended header as defined by POSIX 1003.1. Most modern tar implementations are able to extract such archives.Date: Tue, 4 Nov 2025 22:27:32 +0100 Subject: [PATCH 39/40] Test with `commons-io` 2.21.0 RC1 --- pom.xml | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/pom.xml b/pom.xml index 0648fa85a58..6cfc20884aa 100644 --- a/pom.xml +++ b/pom.xml @@ -91,14 +91,19 @@ Brotli, Zstandard and ar, cpio, jar, tar, zip, dump, 7z, arj. LICENSE.txt, NOTICE.txt, **/maven-archiver/pom.properties + + + -central +https://repo.maven.apache.org/maven2/ +- apache.snapshots -Apache Snapshot Repository -https://repository.apache.org/content/groups/snapshots -+ ++ apache.staging +Apache Staging Repository +https://repository.apache.org/content/repositories/orgapachecommons-1871/ +false -@@ -214,7 +219,7 @@ Brotli, Zstandard and ar, cpio, jar, tar, zip, dump, 7z, arj. commons-io commons-io -2.21.0-SNAPSHOT +2.21.0 org.apache.commons From fac0d66be018f5d937a6efd1dde59883b01b0ea7 Mon Sep 17 00:00:00 2001 From: "Piotr P. Karwasz"Date: Fri, 7 Nov 2025 09:42:31 +0100 Subject: [PATCH 40/40] fix: remove staging repository --- pom.xml | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/pom.xml b/pom.xml index 6cfc20884aa..418b0a96234 100644 --- a/pom.xml +++ b/pom.xml @@ -90,22 +90,6 @@ Brotli, Zstandard and ar, cpio, jar, tar, zip, dump, 7z, arj. ${basedir}/src/conf/checkstyle/checkstyle-suppressions.xml LICENSE.txt, NOTICE.txt, **/maven-archiver/pom.properties -- - - - -central -https://repo.maven.apache.org/maven2/ -- -apache.staging -Apache Staging Repository -https://repository.apache.org/content/repositories/orgapachecommons-1871/ -- -false -jira https://issues.apache.org/jira/browse/COMPRESS