This is an automated email from the ASF dual-hosted git repository.

ggregory pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-compress.git


The following commit(s) were added to refs/heads/master by this push:
     new 16e5bd7a9 feat: Align DUMP archive block size with Linux `dump` 
utility (#697)
16e5bd7a9 is described below

commit 16e5bd7a9073d538a062f37b053d0ea2ac198912
Author: Piotr P. Karwasz <pi...@github.copernik.eu>
AuthorDate: Mon Aug 25 18:40:16 2025 +0200

    feat: Align DUMP archive block size with Linux `dump` utility (#697)
    
    * feat: Align DUMP archive block size with Linux `dump` utility
    
    This update standardizes the block size limit in `DumpArchiveInputStream` 
to match the value used by the Linux `dump` utility for ext2/3/4 filesystems.
    
    By using the same limit as the reference implementation, we ensure 
consistent parsing behavior and improve compatibility with archives generated 
by standard `dump` tools.
    
    * fix: test archive generation
    
    * fix: add Javadoc to new constants
    
    * fix: checkstyle
    
    * fix: Javadoc fix
    
    * fix: inline Javadoc constants due to Javadoc 8 bug
---
 src/changes/changes.xml                            |  2 +-
 .../archivers/dump/DumpArchiveConstants.java       | 21 +++++++-
 .../compress/archivers/dump/DumpArchiveEntry.java  | 14 -----
 .../archivers/dump/DumpArchiveSummary.java         |  6 ++-
 .../compress/archivers/dump/TapeInputStream.java   | 15 +++---
 .../archivers/dump/DumpArchiveInputStreamTest.java | 42 +++++++++++++++
 .../archivers/dump/DumpArchiveTestFactory.java     | 60 ++++++++++++++++++++++
 .../archivers/tar/TarArchiveOutputStreamTest.java  |  1 -
 .../compress/archivers/tar/TarFileTest.java        |  1 -
 9 files changed, 133 insertions(+), 29 deletions(-)

diff --git a/src/changes/changes.xml b/src/changes/changes.xml
index f7a3207b7..c4b82bd62 100644
--- a/src/changes/changes.xml
+++ b/src/changes/changes.xml
@@ -61,7 +61,7 @@ The <action> type attribute can be add,update,fix,remove.
       <action type="fix" dev="ggregory" due-to="Tyler Nighswander, Gary 
Gregory">org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream
 constructors now throws CompressorException instead of 
ArrayIndexOutOfBoundsException.</action>
       <action type="fix" dev="ggregory" due-to="Gary 
Gregory">org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream
 throws CompressorException (an IOException subclass) instead of IOException 
when it encounters formatting problems.</action>
       <!-- FIX dump -->      
-      <action type="fix" dev="ggregory" due-to="Tyler Nighswander, Gary 
Gregory">org.apache.commons.compress.archivers.dump.TapeInputStream.resetBlockSize(int,
 boolean) now throws a MemoryLimitException (an IOException subclass) instead 
of an OutOfMemoryError.</action>
+      <action type="fix" dev="pkarwasz" due-to="Tyler Nighswander">Align DUMP 
archive block size with Linux `dump` utility.</action>
       <action type="fix" dev="ggregory" due-to="Tyler Nighswander, Gary 
Gregory">org.apache.commons.compress.archivers.dump.DumpArchiveInputStream.getNextEntry()
 throws an ArchiveException instead of ArrayIndexOutOfBoundsException.</action>
       <!-- FIX zip -->      
       <action type="fix" dev="ggregory" due-to="Dominik Stadler, Gary Gregory" 
issue="COMPRESS-598">org.apache.commons.compress.archivers.zip.ZipArchiveInputStream.read(byte[],
 int, int) now throws an IOException instead of a NullPointerException.</action>
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java
 
b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java
index 3e94366e7..8ffd11597 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java
@@ -135,15 +135,32 @@ public static SEGMENT_TYPE find(final int code) {
     public static final int TP_SIZE = 1024;
 
     /**
-     * NTREC value {@value}.
+     * Minimum number of kilobytes per record: {@value}.
+     */
+    static final int MIN_NTREC = 1;
+
+    /**
+     * Default number of kilobytes per record: {@value}.
      */
     public static final int NTREC = 10;
 
     /**
-     * HIGH_DENSITY_NTREC value {@value}.
+     * Number of kilobytes per record for high density tapes: {@value}.
      */
     public static final int HIGH_DENSITY_NTREC = 32;
 
+    /**
+     * Maximum number of kilobytes per record: {@value}.
+     * <p>
+     *     This limit matches the one used by the Linux ext2/3/4 dump/restore 
utilities.
+     *     For more details, see the
+     *     <a href="https://dump.sourceforge.io/";>Linux dump/restore utilities 
documentation</a>
+     *     and the
+     *     <a 
href="https://manpages.debian.org/unstable/dump/dump.8.en.html#b,";>dump(8) man 
page</a>.
+     * </p>
+     */
+    static final int MAX_NTREC = 1024;
+
     /**
      * OFS_MAGIC value {@value}.
      */
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java
 
b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java
index e1570c9c1..83e469f44 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java
@@ -952,18 +952,4 @@ public String toString() {
         return getName();
     }
 
-    /**
-     * Update entry with information from next tape segment header.
-     */
-    void update(final byte[] buffer) {
-        header.volume = DumpArchiveUtil.convert32(buffer, 16);
-        header.count = DumpArchiveUtil.convert32(buffer, 160);
-        header.holes = 0;
-        for (int i = 0; i < 512 && i < header.count; i++) {
-            if (buffer[164 + i] == 0) {
-                header.holes++;
-            }
-        }
-        System.arraycopy(buffer, 164, header.cdata, 0, 512);
-    }
 }
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
 
b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
index 1f22db766..703133b54 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
@@ -150,8 +150,10 @@ public int getLevel() {
     }
 
     /**
-     * Gets the number of records per tape block. This is typically between 10 
and 32.
-     *
+     * Gets the number of records per tape block.
+     * <p>
+     *     This is typically either 10 (for standard density tapes) or 32 (for 
high density tapes).
+     * </p>
      * @return the number of records per tape block
      */
     public int getNTRec() {
diff --git 
a/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java 
b/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java
index 3b663c4c5..720bd10ea 100644
--- 
a/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java
+++ 
b/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java
@@ -18,6 +18,9 @@
  */
 package org.apache.commons.compress.archivers.dump;
 
+import static 
org.apache.commons.compress.archivers.dump.DumpArchiveConstants.MAX_NTREC;
+import static 
org.apache.commons.compress.archivers.dump.DumpArchiveConstants.MIN_NTREC;
+
 import java.io.FilterInputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -25,7 +28,6 @@
 import java.util.zip.DataFormatException;
 import java.util.zip.Inflater;
 
-import org.apache.commons.compress.MemoryLimitException;
 import org.apache.commons.compress.archivers.ArchiveException;
 import org.apache.commons.compress.utils.IOUtils;
 
@@ -285,16 +287,13 @@ public byte[] readRecord() throws IOException {
      * @throws IOException there was an error reading additional blocks.
      * @throws IOException recsPerBlock is smaller than 1.
      */
-    public void resetBlockSize(final int recsPerBlock, final boolean 
isCompressed) throws IOException {
+    void resetBlockSize(final int recsPerBlock, final boolean isCompressed) 
throws IOException {
         this.isCompressed = isCompressed;
-        if (recsPerBlock < 1) {
-            throw new ArchiveException("Block with %,d records found, must be 
at least 1", recsPerBlock);
+        if (recsPerBlock < MIN_NTREC || recsPerBlock > MAX_NTREC) {
+            throw new ArchiveException(
+                    "Invalid DUMP block size: %d KiB (expected between %d and 
%d)", recsPerBlock, MIN_NTREC, MAX_NTREC);
         }
-        MemoryLimitException.checkKiB(recsPerBlock, 
Runtime.getRuntime().maxMemory());
         blockSize = RECORD_SIZE * recsPerBlock;
-        if (blockSize < 1) {
-            throw new ArchiveException("Block size cannot be less than or 
equal to 0: %,d", blockSize);
-        }
         // save first block in case we need it again
         final byte[] oldBuffer = blockBuffer;
         // read rest of new block
diff --git 
a/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java
 
b/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java
index 6012dbb7c..92d47e1d5 100644
--- 
a/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java
+++ 
b/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java
@@ -18,6 +18,9 @@
  */
 package org.apache.commons.compress.archivers.dump;
 
+import static 
org.apache.commons.compress.archivers.dump.DumpArchiveConstants.TP_SIZE;
+import static 
org.apache.commons.compress.archivers.dump.DumpArchiveTestFactory.createSegment;
+import static 
org.apache.commons.compress.archivers.dump.DumpArchiveTestFactory.createSummary;
 import static org.junit.jupiter.api.Assertions.assertArrayEquals;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertInstanceOf;
@@ -26,6 +29,7 @@
 import static org.junit.jupiter.api.Assertions.assertTimeoutPreemptively;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
+import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.file.Files;
@@ -39,6 +43,8 @@
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Timeout;
 import org.junit.jupiter.api.Timeout.ThreadMode;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
 
 class DumpArchiveInputStreamTest extends AbstractTest {
 
@@ -140,4 +146,40 @@ void testSingleByteReadConsistentlyReturnsMinusOneAtEof() 
throws Exception {
         }
     }
 
+    @ParameterizedTest
+    @ValueSource(ints = {1, 10, 32, 1024})
+    void checkSupportedRecordSizes(final int ntrec) throws Exception {
+        try (InputStream is = createArchive(ntrec);
+                DumpArchiveInputStream dump = new DumpArchiveInputStream(is)) {
+            final DumpArchiveSummary summary = dump.getSummary();
+            assertNotNull(summary);
+            assertEquals(ntrec, summary.getNTRec());
+        }
+    }
+
+    @ParameterizedTest
+    @ValueSource(ints = {Integer.MIN_VALUE, -1, 0, 1025, Integer.MAX_VALUE})
+    void checkUnsupportedRecordSizes(final int ntrec) throws Exception {
+        try (InputStream is = createArchive(ntrec)) {
+            final ArchiveException ex = assertThrows(ArchiveException.class, 
() -> new DumpArchiveInputStream(is));
+            assertInstanceOf(ArchiveException.class, ex.getCause());
+            assertTrue(
+                    ex.getMessage().contains(Integer.toString(ntrec)),
+                    "message should contain the invalid ntrec value");
+        }
+    }
+
+    private InputStream createArchive(final int ntrec) {
+        final byte[] dump = new byte[1024 * TP_SIZE];
+        int offset = 0;
+        // summary
+        System.arraycopy(createSummary(ntrec), 0, dump, offset, TP_SIZE);
+        offset += TP_SIZE;
+        // CLRI segment
+        
System.arraycopy(createSegment(DumpArchiveConstants.SEGMENT_TYPE.CLRI), 0, 
dump, offset, TP_SIZE);
+        offset += TP_SIZE;
+        // BITS segment
+        
System.arraycopy(createSegment(DumpArchiveConstants.SEGMENT_TYPE.BITS), 0, 
dump, offset, TP_SIZE);
+        return new ByteArrayInputStream(dump);
+    }
 }
diff --git 
a/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveTestFactory.java
 
b/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveTestFactory.java
new file mode 100644
index 000000000..44f8e002e
--- /dev/null
+++ 
b/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveTestFactory.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.commons.compress.archivers.dump;
+
+import org.apache.commons.compress.utils.ByteUtils;
+
+final class DumpArchiveTestFactory {
+    private static final int MAGIC_OFFSET = 24;
+    private static final int CHECKSUM_OFFSET = 28;
+    private static final int NTREC_OFFSET = 896;
+    private static final int SEGMENT_TYPE_OFFSET = 0;
+
+    static byte[] createSummary(final int ntrec) {
+        final byte[] buffer = new byte[DumpArchiveConstants.TP_SIZE];
+        // magic
+        convert32(DumpArchiveConstants.NFS_MAGIC, buffer, MAGIC_OFFSET);
+        // ntrec
+        convert32(ntrec, buffer, NTREC_OFFSET);
+        // checksum
+        final int checksum = DumpArchiveUtil.calculateChecksum(buffer);
+        convert32(checksum, buffer, CHECKSUM_OFFSET);
+        return buffer;
+    }
+
+    static byte[] createSegment(final DumpArchiveConstants.SEGMENT_TYPE 
segmentType) {
+        final byte[] buffer = new byte[DumpArchiveConstants.TP_SIZE];
+        // magic
+        convert32(DumpArchiveConstants.NFS_MAGIC, buffer, MAGIC_OFFSET);
+        // type
+        ByteUtils.toLittleEndian(buffer, segmentType.code, 
SEGMENT_TYPE_OFFSET, 2);
+        // checksum
+        final int checksum = DumpArchiveUtil.calculateChecksum(buffer);
+        convert32(checksum, buffer, CHECKSUM_OFFSET);
+        return buffer;
+    }
+
+    private static void convert32(final long value, final byte[] buffer, final 
int offset) {
+        ByteUtils.toLittleEndian(buffer, value, offset, 4);
+    }
+
+    private DumpArchiveTestFactory() {
+        // prevent instantiation
+    }
+}
diff --git 
a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
 
b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
index 38c88a066..db6a9e174 100644
--- 
a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
+++ 
b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
@@ -42,7 +42,6 @@
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.TimeZone;
 
 import org.apache.commons.compress.AbstractTest;
 import org.apache.commons.compress.archivers.ArchiveEntry;
diff --git 
a/src/test/java/org/apache/commons/compress/archivers/tar/TarFileTest.java 
b/src/test/java/org/apache/commons/compress/archivers/tar/TarFileTest.java
index 8b2689cf1..38b0cced4 100644
--- a/src/test/java/org/apache/commons/compress/archivers/tar/TarFileTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarFileTest.java
@@ -38,7 +38,6 @@
 import java.util.Calendar;
 import java.util.Date;
 import java.util.List;
-import java.util.TimeZone;
 import java.util.zip.GZIPInputStream;
 
 import org.apache.commons.compress.AbstractTest;

Reply via email to