This is an automated email from the ASF dual-hosted git repository.
szetszwo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new d94aadc4b8 HDDS-10480. Avoid proto2 ByteString.toByteArray() calls.
(#6342)
d94aadc4b8 is described below
commit d94aadc4b8447bd092a581b116d3b19949334774
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Thu Mar 7 16:43:51 2024 -0800
HDDS-10480. Avoid proto2 ByteString.toByteArray() calls. (#6342)
---
.../container/keyvalue/KeyValueContainerCheck.java | 6 +--
.../hadoop/hdds/scm/ha/SCMRatisResponse.java | 20 ++++-----
.../client/checksum/ECBlockChecksumComputer.java | 50 ++++++++++------------
.../checksum/ReplicatedBlockChecksumComputer.java | 23 ++++++----
.../hadoop/ozone/freon/DatanodeChunkValidator.java | 2 +-
5 files changed, 50 insertions(+), 51 deletions(-)
diff --git
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java
index f0713469e6..70539111fb 100644
---
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java
+++
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.ozone.container.keyvalue;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.StringUtils;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdfs.util.Canceler;
@@ -45,7 +46,6 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
-import java.util.Arrays;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.slf4j.Logger;
@@ -421,8 +421,8 @@ public class KeyValueContainerCheck {
" for block %s",
ChunkInfo.getFromProtoBuf(chunk),
i,
- Arrays.toString(expected.toByteArray()),
- Arrays.toString(actual.toByteArray()),
+ StringUtils.bytes2Hex(expected.asReadOnlyByteBuffer()),
+ StringUtils.bytes2Hex(actual.asReadOnlyByteBuffer()),
block.getBlockID());
return ScanResult.unhealthy(
ScanResult.FailureType.CORRUPT_CHUNK, chunkFile,
diff --git
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java
index 15163bf3e6..9d65eae06b 100644
---
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java
+++
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java
@@ -17,12 +17,13 @@
package org.apache.hadoop.hdds.scm.ha;
-import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import
org.apache.hadoop.hdds.protocol.proto.SCMRatisProtocol.SCMRatisResponseProto;
import org.apache.hadoop.hdds.scm.ha.io.CodecFactory;
import org.apache.ratis.protocol.Message;
import org.apache.ratis.protocol.RaftClientReply;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* Represents the response from RatisServer.
@@ -72,13 +73,11 @@ public final class SCMRatisResponse {
}
final Class<?> type = result.getClass();
- final ByteString value = CodecFactory.getCodec(type).serialize(result);
-
final SCMRatisResponseProto response = SCMRatisResponseProto.newBuilder()
- .setType(type.getName()).setValue(value).build();
- return Message.valueOf(
- org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom(
- response.toByteArray()));
+ .setType(type.getName())
+ .setValue(CodecFactory.getCodec(type).serialize(result))
+ .build();
+ return
Message.valueOf(UnsafeByteOperations.unsafeWrap(response.toByteString().asReadOnlyByteBuffer()));
}
public static SCMRatisResponse decode(RaftClientReply reply)
@@ -87,14 +86,13 @@ public final class SCMRatisResponse {
return new SCMRatisResponse(reply.getException());
}
- final byte[] response = reply.getMessage().getContent().toByteArray();
+ final ByteString response = reply.getMessage().getContent();
- if (response.length == 0) {
+ if (response.isEmpty()) {
return new SCMRatisResponse();
}
- final SCMRatisResponseProto responseProto = SCMRatisResponseProto
- .parseFrom(response);
+ final SCMRatisResponseProto responseProto =
SCMRatisResponseProto.parseFrom(response.toByteArray());
try {
final Class<?> type = ReflectionUtil.getClass(responseProto.getType());
diff --git
a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECBlockChecksumComputer.java
b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECBlockChecksumComputer.java
index e0b82bebc3..220bef7149 100644
---
a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECBlockChecksumComputer.java
+++
b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECBlockChecksumComputer.java
@@ -25,12 +25,13 @@ import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
import org.apache.hadoop.util.DataChecksum;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
+import java.security.MessageDigest;
import java.util.List;
@@ -42,8 +43,8 @@ public class ECBlockChecksumComputer extends
AbstractBlockChecksumComputer {
private static final Logger LOG =
LoggerFactory.getLogger(ECBlockChecksumComputer.class);
- private List<ContainerProtos.ChunkInfo> chunkInfoList;
- private OmKeyInfo keyInfo;
+ private final List<ContainerProtos.ChunkInfo> chunkInfoList;
+ private final OmKeyInfo keyInfo;
public ECBlockChecksumComputer(
@@ -68,7 +69,7 @@ public class ECBlockChecksumComputer extends
AbstractBlockChecksumComputer {
}
- private void computeMd5Crc() throws IOException {
+ private void computeMd5Crc() {
Preconditions.checkArgument(chunkInfoList.size() > 0);
final ContainerProtos.ChunkInfo firstChunkInfo = chunkInfoList.get(0);
@@ -77,32 +78,28 @@ public class ECBlockChecksumComputer extends
AbstractBlockChecksumComputer {
// Total parity checksum bytes per stripe to remove
int parityBytes = getParityBytes(chunkSize, bytesPerCrc);
- ByteArrayOutputStream out = new ByteArrayOutputStream();
+ final MessageDigest digester = MD5Hash.getDigester();
for (ContainerProtos.ChunkInfo chunkInfo : chunkInfoList) {
ByteString stripeChecksum = chunkInfo.getStripeChecksum();
Preconditions.checkNotNull(stripeChecksum);
- byte[] checksumBytes = stripeChecksum.toByteArray();
-
- Preconditions.checkArgument(checksumBytes.length % 4 == 0,
+ final int checksumSize = stripeChecksum.size();
+ Preconditions.checkArgument(checksumSize % 4 == 0,
"Checksum Bytes size does not match");
- ByteBuffer byteWrap = ByteBuffer
- .wrap(checksumBytes, 0, checksumBytes.length - parityBytes);
- byte[] currentChecksum = new byte[4];
-
- while (byteWrap.hasRemaining()) {
- byteWrap.get(currentChecksum);
- out.write(currentChecksum);
- }
+ final ByteBuffer byteWrap = stripeChecksum.asReadOnlyByteBuffer();
+ byteWrap.limit(checksumSize - parityBytes);
+ digester.update(byteWrap);
}
- MD5Hash fileMD5 = MD5Hash.digest(out.toByteArray());
- setOutBytes(fileMD5.getDigest());
+ final byte[] fileMD5 = digester.digest();
+ setOutBytes(digester.digest());
- LOG.debug("Number of chunks={}, md5hash={}",
- chunkInfoList.size(), fileMD5);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Number of chunks={}, md5hash={}",
+ chunkInfoList.size(), StringUtils.bytes2HexString(fileMD5));
+ }
}
private void computeCompositeCrc() throws IOException {
@@ -149,17 +146,15 @@ public class ECBlockChecksumComputer extends
AbstractBlockChecksumComputer {
ByteString stripeChecksum = chunkInfo.getStripeChecksum();
Preconditions.checkNotNull(stripeChecksum);
- byte[] checksumBytes = stripeChecksum.toByteArray();
-
- Preconditions.checkArgument(checksumBytes.length % 4 == 0,
+ final int checksumSize = stripeChecksum.size();
+ Preconditions.checkArgument(checksumSize % 4 == 0,
"Checksum Bytes size does not match");
CrcComposer chunkCrcComposer =
CrcComposer.newCrcComposer(dataChecksumType, bytesPerCrc);
// Limit parity bytes as they do not contribute to fileChecksum
- ByteBuffer byteWrap = ByteBuffer
- .wrap(checksumBytes, 0, checksumBytes.length - parityBytes);
- byte[] currentChecksum = new byte[4];
+ final ByteBuffer byteWrap = stripeChecksum.asReadOnlyByteBuffer();
+ byteWrap.limit(checksumSize - parityBytes);
long chunkOffsetIndex = 1;
while (byteWrap.hasRemaining()) {
@@ -177,8 +172,7 @@ public class ECBlockChecksumComputer extends
AbstractBlockChecksumComputer {
currentChunkOffset = bytesPerCrcOffset;
}
- byteWrap.get(currentChecksum);
- int checksumDataCrc = CrcUtil.readInt(currentChecksum, 0);
+ final int checksumDataCrc = byteWrap.getInt();
//To handle last chunk when it size is lower than 1524K in the case
// of rs-3-2-1524k.
long chunkSizePerChecksum = Math.min(Math.min(keySize, bytesPerCrc),
diff --git
a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedBlockChecksumComputer.java
b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedBlockChecksumComputer.java
index cf976e3bd3..2c0fc0c0d3 100644
---
a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedBlockChecksumComputer.java
+++
b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedBlockChecksumComputer.java
@@ -26,8 +26,9 @@ import
org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.ByteArrayOutputStream;
import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.MessageDigest;
import java.util.List;
/**
@@ -39,7 +40,13 @@ public class ReplicatedBlockChecksumComputer extends
private static final Logger LOG =
LoggerFactory.getLogger(ReplicatedBlockChecksumComputer.class);
- private List<ContainerProtos.ChunkInfo> chunkInfoList;
+ static MD5Hash digest(ByteBuffer data) {
+ final MessageDigest digester = MD5Hash.getDigester();
+ digester.update(data);
+ return new MD5Hash(digester.digest());
+ }
+
+ private final List<ContainerProtos.ChunkInfo> chunkInfoList;
public ReplicatedBlockChecksumComputer(
List<ContainerProtos.ChunkInfo> chunkInfoList) {
@@ -62,20 +69,20 @@ public class ReplicatedBlockChecksumComputer extends
}
// compute the block checksum, which is the md5 of chunk checksums
- private void computeMd5Crc() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
-
+ private void computeMd5Crc() {
+ ByteString bytes = ByteString.EMPTY;
for (ContainerProtos.ChunkInfo chunkInfo : chunkInfoList) {
ContainerProtos.ChecksumData checksumData =
chunkInfo.getChecksumData();
List<ByteString> checksums = checksumData.getChecksumsList();
for (ByteString checksum : checksums) {
- baos.write(checksum.toByteArray());
+ bytes = bytes.concat(checksum);
}
}
- MD5Hash fileMD5 = MD5Hash.digest(baos.toByteArray());
+ final MD5Hash fileMD5 = digest(bytes.asReadOnlyByteBuffer());
+
setOutBytes(fileMD5.getDigest());
LOG.debug("number of chunks={}, md5out={}",
@@ -121,7 +128,7 @@ public class ReplicatedBlockChecksumComputer extends
Preconditions.checkArgument(remainingChunkSize <=
checksums.size() * chunkSize);
for (ByteString checksum : checksums) {
- int checksumDataCrc = CrcUtil.readInt(checksum.toByteArray(), 0);
+ final int checksumDataCrc = checksum.asReadOnlyByteBuffer().getInt();
chunkCrcComposer.update(checksumDataCrc,
Math.min(bytesPerCrc, remainingChunkSize));
remainingChunkSize -= bytesPerCrc;
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/DatanodeChunkValidator.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/DatanodeChunkValidator.java
index b290da2da1..2bbf8b6d5b 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/DatanodeChunkValidator.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/DatanodeChunkValidator.java
@@ -193,7 +193,7 @@ public class DatanodeChunkValidator extends
BaseFreonGenerator
throws OzoneChecksumException {
ContainerProtos.ReadChunkResponseProto readChunk = response.getReadChunk();
if (readChunk.hasData()) {
- return checksum.computeChecksum(readChunk.getData().toByteArray());
+ return
checksum.computeChecksum(readChunk.getData().asReadOnlyByteBuffer());
} else {
return checksum.computeChecksum(
readChunk.getDataBuffers().getBuffersList());
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]