This is an automated email from the ASF dual-hosted git repository.

ggregory pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-compress.git

commit 92d382e3cd6f1199340121ee8ad3bdf95f2154d0
Author: Gary Gregory <garydgreg...@gmail.com>
AuthorDate: Wed Jan 17 09:22:49 2024 -0500

    Reduce duplication by having ArchiveInputStream extend FilterInputStream
---
 src/changes/changes.xml                            |  1 +
 .../compress/archivers/ArchiveInputStream.java     | 13 +++++---
 .../archivers/ar/ArArchiveInputStream.java         | 23 ++++++-------
 .../archivers/arj/ArjArchiveInputStream.java       | 32 +++++++++---------
 .../archivers/cpio/CpioArchiveInputStream.java     |  3 +-
 .../archivers/dump/DumpArchiveInputStream.java     |  2 +-
 .../archivers/tar/TarArchiveInputStream.java       | 38 ++++++++++------------
 .../archivers/zip/ZipArchiveInputStream.java       | 31 ++++++++----------
 8 files changed, 68 insertions(+), 75 deletions(-)

diff --git a/src/changes/changes.xml b/src/changes/changes.xml
index c0819d2e4..8aefbcba8 100644
--- a/src/changes/changes.xml
+++ b/src/changes/changes.xml
@@ -74,6 +74,7 @@ The <action> type attribute can be add,update,fix,remove.
       <action type="fix" dev="ggregory" due-to="Gary Gregory">Deprecate 
ByteUtils.InputStreamByteSupplier.</action>
       <action type="fix" dev="ggregory" due-to="Gary Gregory">Deprecate 
ByteUtils.fromLittleEndian(InputStream, int).</action>
       <action type="fix" dev="ggregory" due-to="Gary Gregory">Deprecate 
ByteUtils.toLittleEndian(DataOutput, long, int).</action>
+      <action type="fix" dev="ggregory" due-to="Gary Gregory">Reduce 
duplication by having ArchiveInputStream extend FilterInputStream.</action>
       <!-- ADD -->
       <action type="add" dev="ggregory" due-to="Gary Gregory">Add and use 
ZipFile.builder(), ZipFile.Builder, and deprecate constructors.</action>
       <action type="add" dev="ggregory" due-to="Gary Gregory">Add and use 
SevenZFile.builder(), SevenZFile.Builder, and deprecate constructors.</action>
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java 
b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java
index 22cd2e98a..8787cceee 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java
@@ -18,11 +18,13 @@
  */
 package org.apache.commons.compress.archivers;
 
+import java.io.FilterInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.charset.Charset;
 
 import org.apache.commons.io.Charsets;
+import org.apache.commons.io.input.NullInputStream;
 
 /**
  * Archive input streams <b>MUST</b> override the {@link #read(byte[], int, 
int)} - or {@link #read()} - method so that reading from the stream generates 
EOF
@@ -42,7 +44,7 @@ import org.apache.commons.io.Charsets;
  *
  * @param <E> The type of {@link ArchiveEntry} produced.
  */
-public abstract class ArchiveInputStream<E extends ArchiveEntry> extends 
InputStream {
+public abstract class ArchiveInputStream<E extends ArchiveEntry> extends 
FilterInputStream {
 
     private static final int BYTE_MASK = 0xFF;
 
@@ -57,7 +59,7 @@ public abstract class ArchiveInputStream<E extends 
ArchiveEntry> extends InputSt
      * Constructs a new instance.
      */
     public ArchiveInputStream() {
-        this(Charset.defaultCharset());
+        this(NullInputStream.INSTANCE, Charset.defaultCharset());
     }
 
     /**
@@ -67,7 +69,8 @@ public abstract class ArchiveInputStream<E extends 
ArchiveEntry> extends InputSt
      * @since 1.26.0
      */
     // This will be protected once subclasses use builders.
-    private ArchiveInputStream(final Charset charset) {
+    private ArchiveInputStream(final InputStream inputStream, final Charset 
charset) {
+        super(inputStream);
         this.charset = Charsets.toCharset(charset);
     }
 
@@ -77,8 +80,8 @@ public abstract class ArchiveInputStream<E extends 
ArchiveEntry> extends InputSt
      * @param charsetName charset name.
      * @since 1.26.0
      */
-    protected ArchiveInputStream(final String charsetName) {
-        this(Charsets.toCharset(charsetName));
+    protected ArchiveInputStream(final InputStream inputStream, final String 
charsetName) {
+        this(inputStream, Charsets.toCharset(charsetName));
     }
 
     /**
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java
 
b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java
index 940408fc9..2ccae9bf9 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java
@@ -118,8 +118,6 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
                 && signature[5] == 0x68 && signature[6] == 0x3e && 
signature[7] == 0x0a;
     }
 
-    private final InputStream input;
-
     private long offset;
 
     private boolean closed;
@@ -146,8 +144,7 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
      * @param inputStream the ar input stream
      */
     public ArArchiveInputStream(final InputStream inputStream) {
-        super(StandardCharsets.US_ASCII.name());
-        this.input = inputStream;
+        super(inputStream, StandardCharsets.US_ASCII.name());
     }
 
     private int asInt(final byte[] byteArray, final int offset, final int len) 
throws IOException {
@@ -183,7 +180,7 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
     public void close() throws IOException {
         if (!closed) {
             closed = true;
-            input.close();
+            in.close();
         }
         currentEntry = null;
     }
@@ -197,7 +194,7 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
      */
     private String getBSDLongName(final String bsdLongName) throws IOException 
{
         final int nameLen = 
ParsingUtils.parseIntValue(bsdLongName.substring(BSD_LONGNAME_PREFIX_LEN));
-        final byte[] name = IOUtils.readRange(input, nameLen);
+        final byte[] name = IOUtils.readRange(in, nameLen);
         final int read = name.length;
         trackReadBytes(read);
         if (read != nameLen) {
@@ -248,14 +245,14 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
     public ArArchiveEntry getNextArEntry() throws IOException {
         if (currentEntry != null) {
             final long entryEnd = entryOffset + currentEntry.getLength();
-            final long skipped = org.apache.commons.io.IOUtils.skip(input, 
entryEnd - offset);
+            final long skipped = org.apache.commons.io.IOUtils.skip(in, 
entryEnd - offset);
             trackReadBytes(skipped);
             currentEntry = null;
         }
 
         if (offset == 0) {
             final byte[] expected = 
ArchiveUtils.toAsciiBytes(ArArchiveEntry.HEADER);
-            final byte[] realized = IOUtils.readRange(input, expected.length);
+            final byte[] realized = IOUtils.readRange(in, expected.length);
             final int read = realized.length;
             trackReadBytes(read);
             if (read != expected.length) {
@@ -267,7 +264,7 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
         }
 
         if (offset % 2 != 0) {
-            if (input.read() < 0) {
+            if (in.read() < 0) {
                 // hit eof
                 return null;
             }
@@ -275,7 +272,7 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
         }
 
         {
-            final int read = IOUtils.readFully(input, metaData);
+            final int read = IOUtils.readFully(in, metaData);
             trackReadBytes(read);
             if (read == 0) {
                 return null;
@@ -287,7 +284,7 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
 
         {
             final byte[] expected = 
ArchiveUtils.toAsciiBytes(ArArchiveEntry.TRAILER);
-            final byte[] realized = IOUtils.readRange(input, expected.length);
+            final byte[] realized = IOUtils.readRange(in, expected.length);
             final int read = realized.length;
             trackReadBytes(read);
             if (read != expected.length) {
@@ -381,7 +378,7 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
             return -1;
         }
         final int toRead = (int) Math.min(len, entryEnd - offset);
-        final int ret = this.input.read(b, off, toRead);
+        final int ret = this.in.read(b, off, toRead);
         trackReadBytes(ret);
         return ret;
     }
@@ -399,7 +396,7 @@ public class ArArchiveInputStream extends 
ArchiveInputStream<ArArchiveEntry> {
             throw new IOException("Broken archive, unable to parse GNU string 
table length field as a number", ex);
         }
 
-        namebuffer = IOUtils.readRange(input, bufflen);
+        namebuffer = IOUtils.readRange(in, bufflen);
         final int read = namebuffer.length;
         trackReadBytes(read);
         if (read != bufflen) {
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java
 
b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java
index a7b30744a..951e946c0 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java
@@ -58,7 +58,7 @@ public class ArjArchiveInputStream extends 
ArchiveInputStream<ArjArchiveEntry> {
         return length >= 2 && (0xff & signature[0]) == ARJ_MAGIC_1 && (0xff & 
signature[1]) == ARJ_MAGIC_2;
     }
 
-    private final DataInputStream in;
+    private final DataInputStream dis;
     private final MainHeader mainHeader;
     private LocalFileHeader currentLocalFileHeader;
     private InputStream currentInputStream;
@@ -81,8 +81,8 @@ public class ArjArchiveInputStream extends 
ArchiveInputStream<ArjArchiveEntry> {
      * @throws ArchiveException if an exception occurs while reading
      */
     public ArjArchiveInputStream(final InputStream inputStream, final String 
charsetName) throws ArchiveException {
-        super(charsetName);
-        in = new DataInputStream(inputStream);
+        super(inputStream, charsetName);
+        in = dis = new DataInputStream(inputStream);
         try {
             mainHeader = readMainHeader();
             if ((mainHeader.arjFlags & MainHeader.Flags.GARBLED) != 0) {
@@ -103,7 +103,7 @@ public class ArjArchiveInputStream extends 
ArchiveInputStream<ArjArchiveEntry> {
 
     @Override
     public void close() throws IOException {
-        in.close();
+        dis.close();
     }
 
     /**
@@ -137,7 +137,7 @@ public class ArjArchiveInputStream extends 
ArchiveInputStream<ArjArchiveEntry> {
 
         currentLocalFileHeader = readLocalFileHeader();
         if (currentLocalFileHeader != null) {
-            currentInputStream = new BoundedInputStream(in, 
currentLocalFileHeader.compressedSize);
+            currentInputStream = new BoundedInputStream(dis, 
currentLocalFileHeader.compressedSize);
             if (currentLocalFileHeader.method == 
LocalFileHeader.Methods.STORED) {
                 currentInputStream = new 
CRC32VerifyingInputStream(currentInputStream, 
currentLocalFileHeader.originalSize,
                         currentLocalFileHeader.originalCrc32);
@@ -198,19 +198,19 @@ public class ArjArchiveInputStream extends 
ArchiveInputStream<ArjArchiveEntry> {
         byte[] basicHeaderBytes = null;
         do {
             int first;
-            int second = read8(in);
+            int second = read8(dis);
             do {
                 first = second;
-                second = read8(in);
+                second = read8(dis);
             } while (first != ARJ_MAGIC_1 && second != ARJ_MAGIC_2);
-            final int basicHeaderSize = read16(in);
+            final int basicHeaderSize = read16(dis);
             if (basicHeaderSize == 0) {
                 // end of archive
                 return null;
             }
             if (basicHeaderSize <= 2600) {
-                basicHeaderBytes = readRange(in, basicHeaderSize);
-                final long basicHeaderCrc32 = read32(in) & 0xFFFFFFFFL;
+                basicHeaderBytes = readRange(dis, basicHeaderSize);
+                final long basicHeaderCrc32 = read32(dis) & 0xFFFFFFFFL;
                 final CRC32 crc32 = new CRC32();
                 crc32.update(basicHeaderBytes);
                 if (basicHeaderCrc32 == crc32.getValue()) {
@@ -258,9 +258,9 @@ public class ArjArchiveInputStream extends 
ArchiveInputStream<ArjArchiveEntry> {
 
                 final ArrayList<byte[]> extendedHeaders = new ArrayList<>();
                 int extendedHeaderSize;
-                while ((extendedHeaderSize = read16(in)) > 0) {
-                    final byte[] extendedHeaderBytes = readRange(in, 
extendedHeaderSize);
-                    final long extendedHeaderCrc32 = 0xffffFFFFL & read32(in);
+                while ((extendedHeaderSize = read16(dis)) > 0) {
+                    final byte[] extendedHeaderBytes = readRange(dis, 
extendedHeaderSize);
+                    final long extendedHeaderCrc32 = 0xffffFFFFL & read32(dis);
                     final CRC32 crc32 = new CRC32();
                     crc32.update(extendedHeaderBytes);
                     if (extendedHeaderCrc32 != crc32.getValue()) {
@@ -316,10 +316,10 @@ public class ArjArchiveInputStream extends 
ArchiveInputStream<ArjArchiveEntry> {
         hdr.name = readString(basicHeader);
         hdr.comment = readString(basicHeader);
 
-        final int extendedHeaderSize = read16(in);
+        final int extendedHeaderSize = read16(dis);
         if (extendedHeaderSize > 0) {
-            hdr.extendedHeaderBytes = readRange(in, extendedHeaderSize);
-            final long extendedHeaderCrc32 = 0xffffFFFFL & read32(in);
+            hdr.extendedHeaderBytes = readRange(dis, extendedHeaderSize);
+            final long extendedHeaderCrc32 = 0xffffFFFFL & read32(dis);
             final CRC32 crc32 = new CRC32();
             crc32.update(hdr.extendedHeaderBytes);
             if (extendedHeaderCrc32 != crc32.getValue()) {
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
 
b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
index 0b5d7439d..620f9dca5 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
@@ -137,7 +137,6 @@ public class CpioArchiveInputStream extends 
ArchiveInputStream<CpioArchiveEntry>
 
     private long crc;
 
-    private final InputStream in;
     // cached buffers - must only be used locally in the class (COMPRESS-172 - 
reduce garbage collection)
     private final byte[] twoBytesBuf = new byte[2];
     private final byte[] fourBytesBuf = new byte[4];
@@ -181,7 +180,7 @@ public class CpioArchiveInputStream extends 
ArchiveInputStream<CpioArchiveEntry>
      * @since 1.6
      */
     public CpioArchiveInputStream(final InputStream in, final int blockSize, 
final String encoding) {
-        super(encoding);
+        super(in, encoding);
         this.in = in;
         if (blockSize <= 0) {
             throw new IllegalArgumentException("blockSize must be bigger than 
0");
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java
 
b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java
index d4cd84a91..82454c6e4 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java
@@ -118,7 +118,7 @@ public class DumpArchiveInputStream extends 
ArchiveInputStream<DumpArchiveEntry>
      * @throws ArchiveException on error
      */
     public DumpArchiveInputStream(final InputStream is, final String encoding) 
throws ArchiveException {
-        super(encoding);
+        super(is, encoding);
         this.raw = new TapeInputStream(is);
         this.hasHitEOF = false;
         this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding);
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
 
b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
index b7491de0b..0899d179d 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
@@ -96,9 +96,6 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
     /** How far into the entry the stream is at. */
     private long entryOffset;
 
-    /** An input stream to read from. */
-    private final InputStream inputStream;
-
     /** Input streams for reading sparse entries. **/
     private List<InputStream> sparseInputStreams;
 
@@ -186,8 +183,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
      * @since 1.19
      */
     public TarArchiveInputStream(final InputStream inputStream, final int 
blockSize, final int recordSize, final String encoding, final boolean lenient) {
-        super(encoding);
-        this.inputStream = inputStream;
+        super(inputStream, encoding);
         this.atEof = false;
         this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding);
         this.recordSize = recordSize;
@@ -273,7 +269,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
 
             // only store the input streams with non-zero size
             if (sparseHeader.getNumbytes() > 0) {
-                sparseInputStreams.add(new BoundedInputStream(inputStream, 
sparseHeader.getNumbytes()));
+                sparseInputStreams.add(new BoundedInputStream(in, 
sparseHeader.getNumbytes()));
             }
 
             offset = sparseHeader.getOffset() + sparseHeader.getNumbytes();
@@ -308,7 +304,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
             }
         }
 
-        inputStream.close();
+        in.close();
     }
 
     /**
@@ -318,7 +314,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
     private void consumeRemainderOfLastBlock() throws IOException {
         final long bytesReadOfLastBlock = getBytesRead() % blockSize;
         if (bytesReadOfLastBlock > 0) {
-            final long skipped = 
org.apache.commons.io.IOUtils.skip(inputStream, blockSize - 
bytesReadOfLastBlock);
+            final long skipped = org.apache.commons.io.IOUtils.skip(in, 
blockSize - bytesReadOfLastBlock);
             count(skipped);
         }
     }
@@ -334,7 +330,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
      */
     private long getActuallySkipped(final long available, final long skipped, 
final long expected) throws IOException {
         long actuallySkipped = skipped;
-        if (inputStream instanceof FileInputStream) {
+        if (in instanceof FileInputStream) {
             actuallySkipped = Math.min(skipped, available);
         }
 
@@ -593,7 +589,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
 
         // for 1.0 PAX Format, the sparse map is stored in the file data block
         if (currEntry.isPaxGNU1XSparse()) {
-            sparseHeaders = TarUtils.parsePAX1XSparseHeaders(inputStream, 
recordSize);
+            sparseHeaders = TarUtils.parsePAX1XSparseHeaders(in, recordSize);
             currEntry.setSparseHeaders(sparseHeaders);
         }
 
@@ -638,7 +634,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
             // for sparse entries, we need to read them in another way
             totalRead = readSparse(buf, offset, numToRead);
         } else {
-            totalRead = inputStream.read(buf, offset, numToRead);
+            totalRead = in.read(buf, offset, numToRead);
         }
 
         if (totalRead == -1) {
@@ -693,7 +689,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
      * @throws IOException on error
      */
     protected byte[] readRecord() throws IOException {
-        final int readNow = IOUtils.readFully(inputStream, recordBuffer);
+        final int readNow = IOUtils.readFully(in, recordBuffer);
         count(readNow);
         if (readNow != recordSize) {
             return null;
@@ -717,7 +713,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
     private int readSparse(final byte[] buf, final int offset, final int 
numToRead) throws IOException {
         // if there are no actual input streams, just read from the original 
input stream
         if (sparseInputStreams == null || sparseInputStreams.isEmpty()) {
-            return inputStream.read(buf, offset, numToRead);
+            return in.read(buf, offset, numToRead);
         }
 
         if (currentSparseInputStreamIndex >= sparseInputStreams.size()) {
@@ -786,13 +782,13 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
             return 0;
         }
 
-        final long availableOfInputStream = inputStream.available();
+        final long availableOfInputStream = in.available();
         final long available = currEntry.getRealSize() - entryOffset;
         final long numToSkip = Math.min(n, available);
         long skipped;
 
         if (!currEntry.isSparse()) {
-            skipped = org.apache.commons.io.IOUtils.skip(inputStream, 
numToSkip);
+            skipped = org.apache.commons.io.IOUtils.skip(in, numToSkip);
             // for non-sparse entry, we should get the bytes actually skipped 
bytes along with
             // inputStream.available() if inputStream is instance of 
FileInputStream
             skipped = getActuallySkipped(availableOfInputStream, skipped, 
numToSkip);
@@ -812,10 +808,10 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
      */
     private void skipRecordPadding() throws IOException {
         if (!isDirectory() && this.entrySize > 0 && this.entrySize % 
this.recordSize != 0) {
-            final long available = inputStream.available();
+            final long available = in.available();
             final long numRecords = this.entrySize / this.recordSize + 1;
             final long padding = numRecords * this.recordSize - this.entrySize;
-            long skipped = org.apache.commons.io.IOUtils.skip(inputStream, 
padding);
+            long skipped = org.apache.commons.io.IOUtils.skip(in, padding);
 
             skipped = getActuallySkipped(available, skipped, padding);
 
@@ -833,7 +829,7 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
      */
     private long skipSparse(final long n) throws IOException {
         if (sparseInputStreams == null || sparseInputStreams.isEmpty()) {
-            return inputStream.skip(n);
+            return in.skip(n);
         }
 
         long bytesSkipped = 0;
@@ -861,16 +857,16 @@ public class TarArchiveInputStream extends 
ArchiveInputStream<TarArchiveEntry> {
      */
     private void tryToConsumeSecondEOFRecord() throws IOException {
         boolean shouldReset = true;
-        final boolean marked = inputStream.markSupported();
+        final boolean marked = in.markSupported();
         if (marked) {
-            inputStream.mark(recordSize);
+            in.mark(recordSize);
         }
         try {
             shouldReset = !isEOFRecord(readRecord());
         } finally {
             if (shouldReset && marked) {
                 pushedBackBytes(recordSize);
-                inputStream.reset();
+                in.reset();
             }
         }
     }
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java
 
b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java
index 84c43a852..078ad68cf 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java
@@ -240,9 +240,6 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
     /** Whether to look for and use Unicode extra fields. */
     private final boolean useUnicodeExtraFields;
 
-    /** Wrapped stream, will always be a PushbackInputStream. */
-    private final InputStream inputStream;
-
     /** Inflater used for all deflated entries. */
     private final Inflater inf = new Inflater(true);
 
@@ -351,10 +348,10 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
      */
     public ZipArchiveInputStream(final InputStream inputStream, final String 
encoding, final boolean useUnicodeExtraFields,
             final boolean allowStoredEntriesWithDataDescriptor, final boolean 
skipSplitSig) {
-        super(encoding);
+        super(inputStream, encoding);
+        this.in = new PushbackInputStream(inputStream, buf.capacity());
         this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding);
         this.useUnicodeExtraFields = useUnicodeExtraFields;
-        this.inputStream = new PushbackInputStream(inputStream, 
buf.capacity());
         this.allowStoredEntriesWithDataDescriptor = 
allowStoredEntriesWithDataDescriptor;
         this.skipSplitSig = skipSplitSig;
         // haven't read anything so far
@@ -438,7 +435,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
         if (!closed) {
             closed = true;
             try {
-                inputStream.close();
+                in.close();
             } finally {
                 inf.end();
             }
@@ -518,7 +515,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
     private void drainCurrentEntryData() throws IOException {
         long remaining = current.entry.getCompressedSize() - 
current.bytesReadFromStream;
         while (remaining > 0) {
-            final long n = inputStream.read(buf.array(), 0, (int) 
Math.min(buf.capacity(), remaining));
+            final long n = in.read(buf.array(), 0, (int) 
Math.min(buf.capacity(), remaining));
             if (n < 0) {
                 throw new EOFException("Truncated ZIP entry: " + 
ArchiveUtils.sanitize(current.entry.getName()));
             }
@@ -531,7 +528,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
         if (closed) {
             throw new IOException("The stream is closed");
         }
-        final int length = inputStream.read(buf.array());
+        final int length = in.read(buf.array());
         if (length > 0) {
             buf.limit(length);
             count(buf.limit());
@@ -741,7 +738,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
         final ZipMethod m = 
ZipMethod.getMethodByCode(current.entry.getMethod());
         if (current.entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN) {
             if (ZipUtil.canHandleEntryData(current.entry) && m != 
ZipMethod.STORED && m != ZipMethod.DEFLATED) {
-                final InputStream bis = new BoundCountInputStream(inputStream, 
current.entry.getCompressedSize());
+                final InputStream bis = new BoundCountInputStream(in, 
current.entry.getCompressedSize());
                 switch (m) {
                 case UNSHRINKING:
                     current.inputStream = new UnshrinkingInputStream(bis);
@@ -768,7 +765,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
                 }
             }
         } else if (m == ZipMethod.ENHANCED_DEFLATED) {
-            current.inputStream = new 
Deflate64CompressorInputStream(inputStream);
+            current.inputStream = new Deflate64CompressorInputStream(in);
         }
 
         entriesRead++;
@@ -881,7 +878,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
             // Instead of ArrayIndexOutOfBoundsException
             throw new IOException(String.format("Negative offset %,d into 
buffer", offset));
         }
-        ((PushbackInputStream) inputStream).unread(buf, offset, length);
+        ((PushbackInputStream) in).unread(buf, offset, length);
         pushedBackBytes(length);
     }
 
@@ -1068,7 +1065,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
 
     private void readFully(final byte[] b, final int off) throws IOException {
         final int len = b.length - off;
-        final int count = IOUtils.readFully(inputStream, b, off, len);
+        final int count = IOUtils.readFully(in, b, off, len);
         count(count);
         if (count < len) {
             throw new EOFException();
@@ -1081,7 +1078,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
      * Also updates bytes-read counter.
      */
     private int readOneByte() throws IOException {
-        final int b = inputStream.read();
+        final int b = in.read();
         if (b != -1) {
             count(1);
         }
@@ -1089,7 +1086,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
     }
 
     private byte[] readRange(final int len) throws IOException {
-        final byte[] ret = IOUtils.readRange(inputStream, len);
+        final byte[] ret = IOUtils.readRange(in, len);
         count(ret.length);
         if (ret.length < len) {
             throw new EOFException();
@@ -1116,7 +1113,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
 
         if (buf.position() >= buf.limit()) {
             buf.position(0);
-            final int l = inputStream.read(buf.array());
+            final int l = in.read(buf.array());
             if (l == -1) {
                 buf.limit(0);
                 throw new IOException("Truncated ZIP file");
@@ -1160,7 +1157,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
         final int ddLen = current.usesZip64 ? WORD + 2 * DWORD : 3 * WORD;
 
         while (!done) {
-            final int r = inputStream.read(buf.array(), off, 
ZipArchiveOutputStream.BUFFER_SIZE - off);
+            final int r = in.read(buf.array(), off, 
ZipArchiveOutputStream.BUFFER_SIZE - off);
             if (r <= 0) {
                 // read the whole archive without ever finding a
                 // central directory
@@ -1197,7 +1194,7 @@ public class ZipArchiveInputStream extends 
ArchiveInputStream<ZipArchiveEntry> i
             long skipped = 0;
             while (skipped < value) {
                 final long rem = value - skipped;
-                final int x = inputStream.read(skipBuf, 0, (int) 
(skipBuf.length > rem ? rem : skipBuf.length));
+                final int x = in.read(skipBuf, 0, (int) (skipBuf.length > rem 
? rem : skipBuf.length));
                 if (x == -1) {
                     return;
                 }

Reply via email to