This is an automated email from the ASF dual-hosted git repository.
weichiu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new af9a72a230 HDDS-10192. Verify container checksum after downloaded
(#6078)
af9a72a230 is described below
commit af9a72a2300aca1509c16ab8e57f24d46c0b50a6
Author: DaveTeng0 <[email protected]>
AuthorDate: Mon Jan 29 18:07:06 2024 -0800
HDDS-10192. Verify container checksum after downloaded (#6078)
Co-authored-by: Doroszlai, Attila <[email protected]>
---
.../container/replication/ContainerImporter.java | 25 ++++++++++--
.../replication/TestContainerImporter.java | 45 +++++++++++++++++++++-
2 files changed, 66 insertions(+), 4 deletions(-)
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/ContainerImporter.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/ContainerImporter.java
index d7c71b1d1d..1929c16089 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/ContainerImporter.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/ContainerImporter.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hdds.conf.StorageUnit;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import
org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml;
import org.apache.hadoop.ozone.container.common.impl.ContainerSet;
import org.apache.hadoop.ozone.container.common.interfaces.Container;
@@ -64,6 +65,8 @@ public class ContainerImporter {
private final Set<Long> importContainerProgress
= Collections.synchronizedSet(new HashSet<>());
+ private final ConfigurationSource conf;
+
public ContainerImporter(@Nonnull ConfigurationSource conf,
@Nonnull ContainerSet containerSet,
@Nonnull ContainerController controller,
@@ -79,6 +82,7 @@ public class ContainerImporter {
containerSize = (long) conf.getStorageSize(
ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE,
ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_DEFAULT, StorageUnit.BYTES);
+ this.conf = conf;
}
public boolean isAllowedContainerImport(long containerID) {
@@ -112,14 +116,14 @@ public class ContainerImporter {
}
KeyValueContainerData containerData;
- TarContainerPacker packer = new TarContainerPacker(compression);
+ TarContainerPacker packer = getPacker(compression);
try (FileInputStream input = new FileInputStream(tarFilePath.toFile())) {
byte[] containerDescriptorYaml =
packer.unpackContainerDescriptor(input);
- containerData = (KeyValueContainerData) ContainerDataYaml
- .readContainer(containerDescriptorYaml);
+ containerData = getKeyValueContainerData(containerDescriptorYaml);
}
+ ContainerUtils.verifyChecksum(containerData, conf);
containerData.setVolume(targetVolume);
try (FileInputStream input = new FileInputStream(tarFilePath.toFile())) {
@@ -154,4 +158,19 @@ public class ContainerImporter {
return Paths.get(hddsVolume.getVolumeRootDir())
.resolve(CONTAINER_COPY_TMP_DIR).resolve(CONTAINER_COPY_DIR);
}
+
+ protected KeyValueContainerData getKeyValueContainerData(
+ byte[] containerDescriptorYaml) throws IOException {
+ return (KeyValueContainerData) ContainerDataYaml
+ .readContainer(containerDescriptorYaml);
+ }
+
+ protected Set<Long> getImportContainerProgress() {
+ return this.importContainerProgress;
+ }
+
+ protected TarContainerPacker getPacker(CopyContainerCompression compression)
{
+ return new TarContainerPacker(compression);
+ }
+
}
diff --git
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerImporter.java
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerImporter.java
index bac9f458e3..1b989e6bc7 100644
---
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerImporter.java
+++
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerImporter.java
@@ -21,7 +21,9 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
+import java.util.HashSet;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Semaphore;
@@ -41,8 +43,10 @@ import
org.apache.hadoop.ozone.container.common.volume.MutableVolumeSet;
import org.apache.hadoop.ozone.container.common.volume.StorageVolume;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainer;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
+import org.apache.hadoop.ozone.container.keyvalue.TarContainerPacker;
import org.apache.hadoop.ozone.container.ozoneimpl.ContainerController;
import org.apache.ozone.test.GenericTestUtils;
+import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -51,8 +55,11 @@ import static
org.apache.hadoop.ozone.container.replication.CopyContainerCompres
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.mockito.Mockito.any;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
/**
@@ -138,6 +145,42 @@ class TestContainerImporter {
semaphore.release();
}
+ @Test
+ public void testInconsistentChecksumContainerShouldThrowError() throws
Exception {
+ // create container
+ long containerId = 1;
+ KeyValueContainerData containerData = spy(new
KeyValueContainerData(containerId,
+ ContainerLayoutVersion.FILE_PER_BLOCK, 100, "test", "test"));
+ // mock to return different checksum
+ when(containerData.getChecksum()).thenReturn("checksum1", "checksum2");
+ doNothing().when(containerData).setChecksumTo0ByteArray();
+ // create containerImporter object
+ ContainerController controllerMock = mock(ContainerController.class);
+ ContainerSet containerSet = new ContainerSet(0);
+ MutableVolumeSet volumeSet = new MutableVolumeSet("test", conf, null,
+ StorageVolume.VolumeType.DATA_VOLUME, null);
+ ContainerImporter containerImporter = spy(new ContainerImporter(conf,
+ containerSet, controllerMock, volumeSet));
+
+ TarContainerPacker packer = mock(TarContainerPacker.class);
+ when(packer.unpackContainerDescriptor(any())).thenReturn("test".getBytes(
+ StandardCharsets.UTF_8));
+ when(containerImporter.getPacker(any())).thenReturn(packer);
+
+
doReturn(containerData).when(containerImporter).getKeyValueContainerData(any(byte[].class));
+ when(containerImporter.getImportContainerProgress()).thenReturn(new
HashSet<>());
+
+ File tarFile = File.createTempFile("temp_" + System
+ .currentTimeMillis(), ".tar");
+
+ StorageContainerException scException =
+ assertThrows(StorageContainerException.class,
+ () -> containerImporter.importContainer(containerId,
+ tarFile.toPath(), null, NO_COMPRESSION));
+ Assertions.assertTrue(scException.getMessage().
+ contains("Container checksum error"));
+ }
+
private File containerTarFile(
long containerId, ContainerData containerData) throws IOException {
File yamlFile = new File(tempDir, "container.yaml");
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]