duongkame commented on code in PR #4424:
URL: https://github.com/apache/ozone/pull/4424#discussion_r1147002935
##########
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBBatchOperation.java:
##########
@@ -18,18 +18,225 @@
*/
package org.apache.hadoop.hdds.utils.db;
+import com.google.common.base.Preconditions;
import org.apache.hadoop.hdds.utils.db.RocksDatabase.ColumnFamily;
import org.apache.hadoop.hdds.utils.db.managed.ManagedWriteBatch;
import org.apache.hadoop.hdds.utils.db.managed.ManagedWriteOptions;
+import org.apache.ratis.util.StringUtils;
+import org.apache.ratis.util.TraditionalBinaryPrefix;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
/**
* Batch operation implementation for rocks db.
*/
public class RDBBatchOperation implements BatchOperation {
Review Comment:
It can be a good idea to put a comment stating this is not thread-safe and
designed for single-thread use.
##########
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBBatchOperation.java:
##########
@@ -39,26 +246,38 @@ public RDBBatchOperation(ManagedWriteBatch writeBatch) {
this.writeBatch = writeBatch;
}
+ @Override
+ public String toString() {
+ return name;
+ }
+
public void commit(RocksDatabase db) throws IOException {
+ debug(() -> String.format("%s: commit %s",
+ name, opCache.getCommitString()));
+ opCache.prepareBatchWrite();
db.batchWrite(writeBatch);
}
public void commit(RocksDatabase db, ManagedWriteOptions writeOptions)
throws IOException {
+ debug(() -> String.format("%s: commit-with-writeOptions %s",
+ name, opCache.getCommitString()));
+ opCache.prepareBatchWrite();
Review Comment:
I'd just pass `writeBatch` to `opCache.prepareBatchWrite()` to remove the
nested dependency. `OpCache` can be considered being outside with a better
name, e.g. `BatchOperationCache`.
##########
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBBatchOperation.java:
##########
@@ -18,18 +18,225 @@
*/
package org.apache.hadoop.hdds.utils.db;
+import com.google.common.base.Preconditions;
import org.apache.hadoop.hdds.utils.db.RocksDatabase.ColumnFamily;
import org.apache.hadoop.hdds.utils.db.managed.ManagedWriteBatch;
import org.apache.hadoop.hdds.utils.db.managed.ManagedWriteOptions;
+import org.apache.ratis.util.StringUtils;
+import org.apache.ratis.util.TraditionalBinaryPrefix;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
/**
* Batch operation implementation for rocks db.
*/
public class RDBBatchOperation implements BatchOperation {
+ static final Logger LOG = LoggerFactory.getLogger(RDBBatchOperation.class);
+ private static final Object DELETE_OP = new Object();
+
+ private static void debug(Supplier<String> message) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("\n{}", message.get());
+ }
+ }
+
+ private static String byteSize2String(long length) {
+ return TraditionalBinaryPrefix.long2String(length, "B", 2);
+ }
+
+ private static String countSize2String(int count, long size) {
+ return count + " (" + byteSize2String(size) + ")";
+ }
+
+ /**
+ * To implement {@link #equals(Object)} and {@link #hashCode()}
+ * based on the contents of {@link #bytes}.
+ * <p>
+ * Note that it is incorrect to directly use
+ * {@link #bytes#equals(Object)} and {@link #bytes#hashCode()} here since
+ * they do not use the contents of {@link #bytes} in the computations.
+ * These methods simply inherit from {@link Object).
+ */
+ private static final class ByteArray {
+ private final byte[] bytes;
+
+ ByteArray(byte[] bytes) {
+ this.bytes = bytes;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ } else if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ final ByteArray that = (ByteArray) obj;
+ return Arrays.equals(this.bytes, that.bytes);
+ }
+
+ @Override
+ public int hashCode() {
+ return Arrays.hashCode(bytes);
Review Comment:
This hashcode can/should be calculated at the constructor and cached.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]