This is an automated email from the ASF dual-hosted git repository.

weichiu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 23a91637ff HDDS-7625. Do not compress OM/SCM checkpoints (#4130)
23a91637ff is described below

commit 23a91637ffec6941357a9ad70ed991d283c69cb0
Author: Chung En Lee <[email protected]>
AuthorDate: Mon Jan 23 07:33:31 2023 +0800

    HDDS-7625. Do not compress OM/SCM checkpoints (#4130)
---
 .../container/common/helpers/ContainerUtils.java   | 14 ++---
 .../container/keyvalue/TarContainerPacker.java     |  2 +-
 .../replication/GrpcReplicationClient.java         |  2 +-
 .../common/helpers/TestContainerUtils.java         |  8 +--
 .../container/keyvalue/TestKeyValueContainer.java  |  6 +-
 .../container/keyvalue/TestTarContainerPacker.java |  4 +-
 .../upgrade/TestDatanodeUpgradeToScmHA.java        |  2 +-
 .../hadoop/hdds/utils/DBCheckpointServlet.java     | 69 ++--------------------
 .../apache/hadoop/hdds/utils/HddsServerUtil.java   | 18 ++----
 .../hadoop/hdds/scm/ha/SCMSnapshotProvider.java    |  2 +-
 .../hdds/scm/TestSCMDbCheckpointServlet.java       |  2 +-
 .../hadoop/ozone/om/TestOMDbCheckpointServlet.java | 58 ++++++++----------
 .../om/snapshot/OzoneManagerSnapshotProvider.java  |  2 +-
 .../org/apache/hadoop/ozone/recon/ReconUtils.java  | 22 ++-----
 .../spi/impl/OzoneManagerServiceProviderImpl.java  |  2 +-
 .../impl/StorageContainerServiceProviderImpl.java  |  2 +-
 .../ozone/debug/container/ExportSubcommand.java    |  2 +-
 17 files changed, 63 insertions(+), 154 deletions(-)

diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
index 81a6935098..11f54be8b9 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
@@ -280,22 +280,22 @@ public final class ContainerUtils {
     return Long.parseLong(containerBaseDir.getName());
   }
 
-  public static String getContainerTarGzName(long containerId) {
-    return "container-" + containerId + ".tar.gz";
+  public static String getContainerTarName(long containerId) {
+    return "container-" + containerId + ".tar";
   }
 
-  public static long retrieveContainerIdFromTarGzName(String tarGzName)
+  public static long retrieveContainerIdFromTarName(String tarName)
       throws IOException {
-    assert tarGzName != null;
-    Pattern pattern = Pattern.compile("container-(\\d+).tar.gz");
+    assert tarName != null;
+    Pattern pattern = Pattern.compile("container-(\\d+).tar");
     // Now create matcher object.
-    Matcher m = pattern.matcher(tarGzName);
+    Matcher m = pattern.matcher(tarName);
 
     if (m.find()) {
       return Long.parseLong(m.group(1));
     } else {
       throw new IOException("Illegal container tar gz file " +
-          tarGzName);
+          tarName);
     }
   }
 }
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java
index 9ad9ad758d..40d1c1676a 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/TarContainerPacker.java
@@ -58,7 +58,7 @@ import static 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Res
 import static org.apache.hadoop.ozone.OzoneConsts.SCHEMA_V3;
 
 /**
- * Compress/uncompress KeyValueContainer data to a tar.gz archive.
+ * Compress/uncompress KeyValueContainer data to a tar archive.
  */
 public class TarContainerPacker
     implements ContainerPacker<KeyValueContainerData> {
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java
index 88da4d994f..fa4140040c 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/GrpcReplicationClient.java
@@ -109,7 +109,7 @@ public class GrpcReplicationClient implements AutoCloseable 
{
     CompletableFuture<Path> response = new CompletableFuture<>();
 
     Path destinationPath = getWorkingDirectory()
-        .resolve(ContainerUtils.getContainerTarGzName(containerId));
+        .resolve(ContainerUtils.getContainerTarName(containerId));
 
     client.download(request,
         new StreamDownloader(containerId, response, destinationPath));
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
index 51b929eb2b..1bfdf8ae94 100644
--- 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestContainerUtils.java
@@ -59,12 +59,12 @@ public class TestContainerUtils {
   }
 
   @Test
-  public void testTarGzName() throws IOException {
+  public void testTarName() throws IOException {
     long containerId = 100;
-    String tarGzName = "container-100.tar.gz";
-    assertEquals(tarGzName, ContainerUtils.getContainerTarGzName(containerId));
+    String tarName = "container-100.tar";
+    assertEquals(tarName, ContainerUtils.getContainerTarName(containerId));
 
     assertEquals(containerId,
-        ContainerUtils.retrieveContainerIdFromTarGzName(tarGzName));
+        ContainerUtils.retrieveContainerIdFromTarName(tarName));
   }
 }
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
index 62a432e514..734b8d3193 100644
--- 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
@@ -193,7 +193,7 @@ public class TestKeyValueContainer {
     checkContainerFilesPresent(data, 0);
 
     //destination path
-    File exportTar = folder.newFile("exported.tar.gz");
+    File exportTar = folder.newFile("exported.tar");
     TarContainerPacker packer = new TarContainerPacker();
     //export the container
     try (FileOutputStream fos = new FileOutputStream(exportTar)) {
@@ -220,7 +220,7 @@ public class TestKeyValueContainer {
     populate(numberOfKeysToWrite);
 
     //destination path
-    File folderToExport = folder.newFile("exported.tar.gz");
+    File folderToExport = folder.newFile("exported.tar");
     for (Map.Entry<CopyContainerCompression, String> entry :
         CopyContainerCompression.getCompressionMapping().entrySet()) {
       TarContainerPacker packer = new TarContainerPacker(entry.getValue());
@@ -368,7 +368,7 @@ public class TestKeyValueContainer {
     List<Thread> threads = IntStream.range(0, 20)
         .mapToObj(i -> new Thread(() -> {
           try {
-            File file = folder.newFile("concurrent" + i + ".tar.gz");
+            File file = folder.newFile("concurrent" + i + ".tar");
             try (OutputStream out = new FileOutputStream(file)) {
               keyValueContainer.exportContainerData(out, packer);
             }
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
index e5cd638192..73df8f2964 100644
--- 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
@@ -186,7 +186,7 @@ public class TestTarContainerPacker {
     //sample container descriptor file
     writeDescriptor(sourceContainer);
 
-    Path targetFile = TEMP_DIR.resolve("container.tar.gz");
+    Path targetFile = TEMP_DIR.resolve("container.tar");
 
     //WHEN: pack it
     SpyOutputStream outputForPack =
@@ -377,7 +377,7 @@ public class TestTarContainerPacker {
 
   private File packContainerWithSingleFile(File file, String entryName)
       throws Exception {
-    File targetFile = TEMP_DIR.resolve("container.tar.gz").toFile();
+    File targetFile = TEMP_DIR.resolve("container.tar").toFile();
     try (FileOutputStream output = new FileOutputStream(targetFile);
          OutputStream compressed = packer.compress(output);
          ArchiveOutputStream archive = new TarArchiveOutputStream(compressed)) 
{
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToScmHA.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToScmHA.java
index 1e01d08080..b73c04d399 100644
--- 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToScmHA.java
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/upgrade/TestDatanodeUpgradeToScmHA.java
@@ -674,7 +674,7 @@ public class TestDatanodeUpgradeToScmHA {
             new TarContainerPacker(), dsm.getContainer().getVolumeSet());
 
     File tempFile = tempFolder.newFile(
-        ContainerUtils.getContainerTarGzName(containerID));
+        ContainerUtils.getContainerTarName(containerID));
     Files.copy(source.toPath(), tempFile.toPath(),
         StandardCopyOption.REPLACE_EXISTING);
     replicator.importContainer(containerID, tempFile.toPath(), null);
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/DBCheckpointServlet.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/DBCheckpointServlet.java
index 03f2fdca39..a8f99149e5 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/DBCheckpointServlet.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/DBCheckpointServlet.java
@@ -21,31 +21,19 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.file.Files;
 import java.nio.file.Path;
 import java.time.Duration;
 import java.time.Instant;
 import java.util.Collection;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 import org.apache.hadoop.hdds.server.OzoneAdmins;
 import org.apache.hadoop.hdds.utils.db.DBCheckpoint;
 import org.apache.hadoop.hdds.utils.db.DBStore;
 
-import org.apache.commons.compress.archivers.ArchiveEntry;
-import org.apache.commons.compress.archivers.ArchiveOutputStream;
-import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
-import org.apache.commons.compress.compressors.CompressorException;
-import org.apache.commons.compress.compressors.CompressorOutputStream;
-import org.apache.commons.compress.compressors.CompressorStreamFactory;
-import org.apache.commons.compress.utils.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 
+import static 
org.apache.hadoop.hdds.utils.HddsServerUtil.writeDBCheckpointToStream;
 import static 
org.apache.hadoop.ozone.OzoneConsts.OZONE_DB_CHECKPOINT_REQUEST_FLUSH;
 
 import org.apache.hadoop.security.UserGroupInformation;
@@ -53,7 +41,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Provides the current checkpoint Snapshot of the OM/SCM DB. (tar.gz)
+ * Provides the current checkpoint Snapshot of the OM/SCM DB. (tar)
  */
 public class DBCheckpointServlet extends HttpServlet {
 
@@ -168,10 +156,10 @@ public class DBCheckpointServlet extends HttpServlet {
       if (file == null) {
         return;
       }
-      response.setContentType("application/x-tgz");
+      response.setContentType("application/x-tar");
       response.setHeader("Content-Disposition",
           "attachment; filename=\"" +
-               file.toString() + ".tgz\"");
+               file + ".tar\"");
 
       Instant start = Instant.now();
       writeDBCheckpointToStream(checkpoint,
@@ -200,53 +188,4 @@ public class DBCheckpointServlet extends HttpServlet {
     }
   }
 
-  /**
-   * Write DB Checkpoint to an output stream as a compressed file (tgz).
-   *
-   * @param checkpoint  checkpoint file
-   * @param destination desination output stream.
-   * @throws IOException
-   */
-  public static void writeDBCheckpointToStream(DBCheckpoint checkpoint,
-      OutputStream destination)
-      throws IOException {
-
-    try (CompressorOutputStream gzippedOut = new CompressorStreamFactory()
-        .createCompressorOutputStream(CompressorStreamFactory.GZIP,
-            destination)) {
-
-      try (ArchiveOutputStream archiveOutputStream =
-          new TarArchiveOutputStream(gzippedOut)) {
-
-        Path checkpointPath = checkpoint.getCheckpointLocation();
-        try (Stream<Path> files = Files.list(checkpointPath)) {
-          for (Path path : files.collect(Collectors.toList())) {
-            if (path != null) {
-              Path fileName = path.getFileName();
-              if (fileName != null) {
-                includeFile(path.toFile(), fileName.toString(),
-                    archiveOutputStream);
-              }
-            }
-          }
-        }
-      }
-    } catch (CompressorException e) {
-      throw new IOException(
-          "Can't compress the checkpoint: " +
-              checkpoint.getCheckpointLocation(), e);
-    }
-  }
-
-  private static void includeFile(File file, String entryName,
-      ArchiveOutputStream archiveOutputStream)
-      throws IOException {
-    ArchiveEntry archiveEntry =
-        archiveOutputStream.createArchiveEntry(file, entryName);
-    archiveOutputStream.putArchiveEntry(archiveEntry);
-    try (FileInputStream fis = new FileInputStream(file)) {
-      IOUtils.copy(fis, archiveOutputStream);
-    }
-    archiveOutputStream.closeArchiveEntry();
-  }
 }
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
index 80970ff1bf..ca11919284 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
@@ -37,9 +37,6 @@ import com.google.protobuf.BlockingService;
 import org.apache.commons.compress.archivers.ArchiveEntry;
 import org.apache.commons.compress.archivers.ArchiveOutputStream;
 import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
-import org.apache.commons.compress.compressors.CompressorException;
-import org.apache.commons.compress.compressors.CompressorOutputStream;
-import org.apache.commons.compress.compressors.CompressorStreamFactory;
 import org.apache.commons.compress.utils.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.DFSConfigKeysLegacy;
@@ -526,7 +523,7 @@ public final class HddsServerUtil {
   }
 
   /**
-   * Write DB Checkpoint to an output stream as a compressed file (tgz).
+   * Write DB Checkpoint to an output stream as a compressed file (tar).
    *
    * @param checkpoint  checkpoint file
    * @param destination destination output stream.
@@ -535,11 +532,8 @@ public final class HddsServerUtil {
   public static void writeDBCheckpointToStream(DBCheckpoint checkpoint,
       OutputStream destination)
       throws IOException {
-    try (CompressorOutputStream gzippedOut = new CompressorStreamFactory()
-        .createCompressorOutputStream(CompressorStreamFactory.GZIP,
-            destination);
-        ArchiveOutputStream archiveOutputStream =
-            new TarArchiveOutputStream(gzippedOut);
+    try (ArchiveOutputStream archiveOutputStream =
+            new TarArchiveOutputStream(destination);
         Stream<Path> files =
             Files.list(checkpoint.getCheckpointLocation())) {
       for (Path path : files.collect(Collectors.toList())) {
@@ -551,15 +545,11 @@ public final class HddsServerUtil {
           }
         }
       }
-    } catch (CompressorException e) {
-      throw new IOException(
-          "Can't compress the checkpoint: " +
-              checkpoint.getCheckpointLocation(), e);
     }
   }
 
   private static void includeFile(File file, String entryName,
-      ArchiveOutputStream archiveOutputStream)
+                                 ArchiveOutputStream archiveOutputStream)
       throws IOException {
     ArchiveEntry archiveEntry =
         archiveOutputStream.createArchiveEntry(file, entryName);
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMSnapshotProvider.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMSnapshotProvider.java
index c221463595..07dfe34422 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMSnapshotProvider.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMSnapshotProvider.java
@@ -104,7 +104,7 @@ public class SCMSnapshotProvider {
     String snapshotFilePath =
         Paths.get(scmSnapshotDir.getAbsolutePath(), snapshotFileName).toFile()
             .getAbsolutePath();
-    File targetFile = new File(snapshotFilePath + ".tar.gz");
+    File targetFile = new File(snapshotFilePath + ".tar");
 
 
     // the downloadClient instance will be created as and when install snapshot
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestSCMDbCheckpointServlet.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestSCMDbCheckpointServlet.java
index 8a91388def..f473b62e74 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestSCMDbCheckpointServlet.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/TestSCMDbCheckpointServlet.java
@@ -131,7 +131,7 @@ public class TestSCMDbCheckpointServlet {
           Matchers.anyString());
 
       tempFile = File.createTempFile("testDoGet_" + System
-          .currentTimeMillis(), ".tar.gz");
+          .currentTimeMillis(), ".tar");
 
       FileOutputStream fileOutputStream = new FileOutputStream(tempFile);
       when(responseMock.getOutputStream()).thenReturn(
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java
index 90743f2e17..403c99f586 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java
@@ -24,7 +24,6 @@ import javax.servlet.WriteListener;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
@@ -38,7 +37,6 @@ import java.util.LinkedHashSet;
 
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.utils.db.DBCheckpoint;
-import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ozone.MiniOzoneCluster;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -46,12 +44,12 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.commons.io.FileUtils;
 
 import static 
org.apache.hadoop.hdds.recon.ReconConfig.ConfigStrings.OZONE_RECON_KERBEROS_PRINCIPAL_KEY;
+import static 
org.apache.hadoop.hdds.utils.HddsServerUtil.writeDBCheckpointToStream;
 import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ACL_ENABLED;
 import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS;
 import static 
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS_WILDCARD;
 import static 
org.apache.hadoop.ozone.OzoneConsts.OZONE_DB_CHECKPOINT_REQUEST_FLUSH;
 import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_HTTP_AUTH_TYPE;
-import static 
org.apache.hadoop.ozone.om.OMDBCheckpointServlet.writeDBCheckpointToStream;
 
 import org.junit.After;
 import org.junit.Assert;
@@ -101,7 +99,7 @@ public class TestOMDbCheckpointServlet {
     conf = new OzoneConfiguration();
 
     tempFile = File.createTempFile("testDoGet_" + System
-        .currentTimeMillis(), ".tar.gz");
+        .currentTimeMillis(), ".tar");
 
     FileOutputStream fileOutputStream = new FileOutputStream(tempFile);
 
@@ -181,7 +179,7 @@ public class TestOMDbCheckpointServlet {
         om.getOmAdminGroups(),
         om.isSpnegoEnabled());
 
-    doNothing().when(responseMock).setContentType("application/x-tgz");
+    doNothing().when(responseMock).setContentType("application/x-tar");
     doNothing().when(responseMock).setHeader(Matchers.anyString(),
         Matchers.anyString());
 
@@ -261,34 +259,28 @@ public class TestOMDbCheckpointServlet {
   @Test
   public void testWriteCheckpointToOutputStream() throws Exception {
 
-    FileInputStream fis = null;
-    FileOutputStream fos = null;
-
-    try {
-      String testDirName = folder.newFolder().getAbsolutePath();
-      File file = new File(testDirName + "/temp1.txt");
-      OutputStreamWriter writer = new OutputStreamWriter(
-          new FileOutputStream(file), StandardCharsets.UTF_8);
-      writer.write("Test data 1");
-      writer.close();
-
-      file = new File(testDirName + "/temp2.txt");
-      writer = new OutputStreamWriter(
-          new FileOutputStream(file), StandardCharsets.UTF_8);
-      writer.write("Test data 2");
-      writer.close();
-
-      File outputFile =
-          new File(Paths.get(testDirName, "output_file.tgz").toString());
-      TestDBCheckpoint dbCheckpoint = new TestDBCheckpoint(
-          Paths.get(testDirName));
-      writeDBCheckpointToStream(dbCheckpoint,
-          new FileOutputStream(outputFile));
-      assertNotNull(outputFile);
-    } finally {
-      IOUtils.closeStream(fis);
-      IOUtils.closeStream(fos);
-    }
+    String testDirName = folder.newFolder().getAbsolutePath();
+    File checkpoint = new File(testDirName, "checkpoint");
+    checkpoint.mkdir();
+    File file = new File(checkpoint, "temp1.txt");
+    OutputStreamWriter writer = new OutputStreamWriter(
+        new FileOutputStream(file), StandardCharsets.UTF_8);
+    writer.write("Test data 1");
+    writer.close();
+
+    file = new File(checkpoint, "/temp2.txt");
+    writer = new OutputStreamWriter(
+        new FileOutputStream(file), StandardCharsets.UTF_8);
+    writer.write("Test data 2");
+    writer.close();
+
+    File outputFile =
+        new File(Paths.get(testDirName, "output_file.tar").toString());
+    TestDBCheckpoint dbCheckpoint = new TestDBCheckpoint(
+        checkpoint.toPath());
+    writeDBCheckpointToStream(dbCheckpoint,
+        new FileOutputStream(outputFile));
+    assertNotNull(outputFile);
   }
 }
 
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java
index 3ef2661f82..5c043a1acc 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OzoneManagerSnapshotProvider.java
@@ -114,7 +114,7 @@ public class OzoneManagerSnapshotProvider {
         + "-" + snapshotTime;
     String snapshotFilePath = Paths.get(omSnapshotDir.getAbsolutePath(),
         snapshotFileName).toFile().getAbsolutePath();
-    File targetFile = new File(snapshotFilePath + ".tar.gz");
+    File targetFile = new File(snapshotFilePath + ".tar");
 
     String omCheckpointUrl = peerNodesMap.get(leaderOMNodeID)
         .getOMDBCheckpointEnpointUrl(httpPolicy.isHttpEnabled());
diff --git 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java
 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java
index c6f735e7af..06b57eb3dd 100644
--- 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java
+++ 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java
@@ -31,7 +31,6 @@ import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.security.KeyPair;
 import java.sql.Timestamp;
-import java.util.zip.GZIPOutputStream;
 
 import com.google.inject.Singleton;
 import org.apache.hadoop.hdds.HddsConfigKeys;
@@ -46,7 +45,6 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
 import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
-import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
 import static org.apache.hadoop.hdds.server.ServerUtils.getDirectoryFromConfig;
 import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath;
 import static 
org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SCM_DB_DIR;
@@ -104,23 +102,20 @@ public class ReconUtils {
   }
 
   /**
-   * Given a source directory, create a tar.gz file from it.
+   * Given a source directory, create a tar file from it.
    *
    * @param sourcePath the path to the directory to be archived.
-   * @return tar.gz file
+   * @return tar file
    * @throws IOException
    */
   public static File createTarFile(Path sourcePath) throws IOException {
     TarArchiveOutputStream tarOs = null;
     FileOutputStream fileOutputStream = null;
-    GZIPOutputStream gzipOutputStream = null;
     try {
       String sourceDir = sourcePath.toString();
-      String fileName = sourceDir.concat(".tar.gz");
+      String fileName = sourceDir.concat(".tar");
       fileOutputStream = new FileOutputStream(fileName);
-      gzipOutputStream =
-          new GZIPOutputStream(new BufferedOutputStream(fileOutputStream));
-      tarOs = new TarArchiveOutputStream(gzipOutputStream);
+      tarOs = new TarArchiveOutputStream(fileOutputStream);
       File folder = new File(sourceDir);
       File[] filesInDir = folder.listFiles();
       if (filesInDir != null) {
@@ -133,7 +128,6 @@ public class ReconUtils {
       try {
         org.apache.hadoop.io.IOUtils.closeStream(tarOs);
         org.apache.hadoop.io.IOUtils.closeStream(fileOutputStream);
-        org.apache.hadoop.io.IOUtils.closeStream(gzipOutputStream);
       } catch (Exception e) {
         LOG.error("Exception encountered when closing " +
             "TAR file output stream: " + e);
@@ -177,12 +171,8 @@ public class ReconUtils {
       throws IOException {
 
     FileInputStream fileInputStream = null;
-    BufferedInputStream buffIn = null;
-    GzipCompressorInputStream gzIn = null;
     try {
       fileInputStream = new FileInputStream(tarFile);
-      buffIn = new BufferedInputStream(fileInputStream);
-      gzIn = new GzipCompressorInputStream(buffIn);
 
       //Create Destination directory if it does not exist.
       if (!destPath.toFile().exists()) {
@@ -193,7 +183,7 @@ public class ReconUtils {
       }
 
       try (TarArchiveInputStream tarInStream =
-               new TarArchiveInputStream(gzIn)) {
+               new TarArchiveInputStream(fileInputStream)) {
         TarArchiveEntry entry;
 
         while ((entry = (TarArchiveEntry) tarInStream.getNextEntry()) != null) 
{
@@ -223,8 +213,6 @@ public class ReconUtils {
         }
       }
     } finally {
-      IOUtils.closeStream(gzIn);
-      IOUtils.closeStream(buffIn);
       IOUtils.closeStream(fileInputStream);
     }
   }
diff --git 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java
 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java
index cc4bdffb07..a3897d9a55 100644
--- 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java
+++ 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java
@@ -341,7 +341,7 @@ public class OzoneManagerServiceProviderImpl
     String snapshotFileName = RECON_OM_SNAPSHOT_DB + "_" +
         System.currentTimeMillis();
     File targetFile = new File(omSnapshotDBParentDir, snapshotFileName +
-        ".tar.gz");
+        ".tar");
     try {
       SecurityUtil.doAsLoginUser(() -> {
         try (InputStream inputStream = reconUtils.makeHttpCall(
diff --git 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/StorageContainerServiceProviderImpl.java
 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/StorageContainerServiceProviderImpl.java
index bb94c6d298..b726c8a516 100644
--- 
a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/StorageContainerServiceProviderImpl.java
+++ 
b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/StorageContainerServiceProviderImpl.java
@@ -175,7 +175,7 @@ public class StorageContainerServiceProviderImpl
     String snapshotFileName = RECON_SCM_SNAPSHOT_DB + "_" +
         System.currentTimeMillis();
     File targetFile = new File(scmSnapshotDBParentDir, snapshotFileName +
-            ".tar.gz");
+            ".tar");
 
     try {
       if (!SCMHAUtils.isSCMHAEnabled(configuration)) {
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/container/ExportSubcommand.java
 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/container/ExportSubcommand.java
index e2f0a25569..c5e37a1867 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/container/ExportSubcommand.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/container/ExportSubcommand.java
@@ -73,7 +73,7 @@ public class ExportSubcommand implements Callable<Void> {
     for (int i = 0; i < containerCount; i++) {
       replicationSource.prepare(containerId);
       final File destinationFile =
-          new File(destination, "container-" + containerId + ".tar.gz");
+          new File(destination, "container-" + containerId + ".tar");
       try (FileOutputStream fos = new FileOutputStream(destinationFile)) {
         try {
           replicationSource.copyData(containerId, fos, GZIP);


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to