This is an automated email from the ASF dual-hosted git repository.

adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 5110a03bfe HDDS-10248. Remove unused, dead code in hdds-client module 
(#6126)
5110a03bfe is described below

commit 5110a03bfeca57ccf3b8fd4d6ae0f04703fb7889
Author: Istvan Fajth <[email protected]>
AuthorDate: Wed Jan 31 17:29:38 2024 +0100

    HDDS-10248. Remove unused, dead code in hdds-client module (#6126)
---
 .../hadoop/hdds/scm/client/HddsClientUtils.java    | 47 ----------------------
 .../hdds/scm/storage/BlockDataStreamOutput.java    | 15 -------
 .../hadoop/hdds/scm/storage/BlockInputStream.java  |  4 --
 .../hadoop/hdds/scm/storage/BlockOutputStream.java | 13 ------
 .../hdds/scm/storage/RatisBlockOutputStream.java   |  4 --
 .../hadoop/ozone/client/io/ECBlockInputStream.java | 11 -----
 .../hdds/scm/client/TestHddsClientUtils.java       |  1 -
 .../hdds/scm/storage/TestBlockInputStream.java     |  8 ----
 .../hadoop/ozone/client/io/ECStreamTestUtil.java   |  9 +----
 .../client/io/TestECBlockInputStreamProxy.java     |  4 +-
 10 files changed, 3 insertions(+), 113 deletions(-)

diff --git 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
index cf6e09d95a..2b07dacf1e 100644
--- 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
+++ 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
@@ -18,11 +18,6 @@
 
 package org.apache.hadoop.hdds.scm.client;
 
-import java.text.ParseException;
-import java.time.Instant;
-import java.time.ZoneId;
-import java.time.ZonedDateTime;
-import java.time.format.DateTimeFormatter;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -72,37 +67,6 @@ public final class HddsClientUtils {
           .add(NotReplicatedException.class)
           .build();
 
-  /**
-   * Date format that used in ozone. Here the format is thread safe to use.
-   */
-  private static final ThreadLocal<DateTimeFormatter> DATE_FORMAT =
-      ThreadLocal.withInitial(() -> {
-        DateTimeFormatter format =
-            DateTimeFormatter.ofPattern(OzoneConsts.OZONE_DATE_FORMAT);
-        return format.withZone(ZoneId.of(OzoneConsts.OZONE_TIME_ZONE));
-      });
-
-
-  /**
-   * Convert time in millisecond to a human readable format required in ozone.
-   * @return a human readable string for the input time
-   */
-  public static String formatDateTime(long millis) {
-    ZonedDateTime dateTime = ZonedDateTime.ofInstant(
-        Instant.ofEpochMilli(millis), DATE_FORMAT.get().getZone());
-    return DATE_FORMAT.get().format(dateTime);
-  }
-
-  /**
-   * Convert time in ozone date format to millisecond.
-   * @return time in milliseconds
-   */
-  public static long formatDateTime(String date) throws ParseException {
-    Preconditions.checkNotNull(date, "Date string should not be null.");
-    return ZonedDateTime.parse(date, DATE_FORMAT.get())
-        .toInstant().toEpochMilli();
-  }
-
   private static void doNameChecks(String resName) {
     if (resName == null) {
       throw new IllegalArgumentException("Bucket or Volume name is null");
@@ -208,17 +172,6 @@ public final class HddsClientUtils {
     }
   }
 
-  /**
-   * verifies that bucket / volume name is a valid DNS name.
-   *
-   * @param resourceNames Array of bucket / volume names to be verified.
-   */
-  public static void verifyResourceName(String... resourceNames) {
-    for (String resourceName : resourceNames) {
-      HddsClientUtils.verifyResourceName(resourceName);
-    }
-  }
-
   /**
    * verifies that key name is a valid name.
    *
diff --git 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockDataStreamOutput.java
 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockDataStreamOutput.java
index 005402efa7..bfef03e87d 100644
--- 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockDataStreamOutput.java
+++ 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockDataStreamOutput.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hdds.scm.storage;
 
-import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.hdds.client.BlockID;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
@@ -239,11 +238,6 @@ public class BlockDataStreamOutput implements 
ByteBufferStreamOutput {
     return failedServers;
   }
 
-  @VisibleForTesting
-  public XceiverClientRatis getXceiverClient() {
-    return xceiverClient;
-  }
-
   public IOException getIoException() {
     return ioException.get();
   }
@@ -331,10 +325,6 @@ public class BlockDataStreamOutput implements 
ByteBufferStreamOutput {
     totalDataFlushedLength = writtenDataLength;
   }
 
-  @VisibleForTesting
-  public long getTotalDataFlushedLength() {
-    return totalDataFlushedLength;
-  }
   /**
    * Will be called on the retryPath in case closedContainerException/
    * TimeoutException.
@@ -703,11 +693,6 @@ public class BlockDataStreamOutput implements 
ByteBufferStreamOutput {
     containerBlockData.addChunks(chunkInfo);
   }
 
-  @VisibleForTesting
-  public void setXceiverClient(XceiverClientRatis xceiverClient) {
-    this.xceiverClient = xceiverClient;
-  }
-
   /**
    * Handles InterruptedExecution.
    *
diff --git 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockInputStream.java
 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockInputStream.java
index 385ea6d0c3..1fb4bf954c 100644
--- 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockInputStream.java
+++ 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockInputStream.java
@@ -581,8 +581,4 @@ public class BlockInputStream extends 
BlockExtendedInputStream {
     return chunkStreams;
   }
 
-  @VisibleForTesting
-  public static Logger getLog() {
-    return LOG;
-  }
 }
diff --git 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockOutputStream.java
 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockOutputStream.java
index ac21411ea5..bbc4616695 100644
--- 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockOutputStream.java
+++ 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockOutputStream.java
@@ -235,10 +235,6 @@ public class BlockOutputStream extends OutputStream {
     return ioException.get();
   }
 
-  XceiverClientSpi getXceiverClientSpi() {
-    return this.xceiverClient;
-  }
-
   public BlockData.Builder getContainerBlockData() {
     return this.containerBlockData;
   }
@@ -327,10 +323,6 @@ public class BlockOutputStream extends OutputStream {
     totalDataFlushedLength = writtenDataLength;
   }
 
-  private boolean isBufferPoolFull() {
-    return bufferPool.computeBufferData() == 
streamBufferArgs.getStreamBufferMaxSize();
-  }
-
   /**
    * Will be called on the retryPath in case closedContainerException/
    * TimeoutException.
@@ -758,11 +750,6 @@ public class BlockOutputStream extends OutputStream {
     return null;
   }
 
-  @VisibleForTesting
-  public void setXceiverClient(XceiverClientSpi xceiverClient) {
-    this.xceiverClient = xceiverClient;
-  }
-
   /**
    * Handles InterruptedExecution.
    *
diff --git 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/RatisBlockOutputStream.java
 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/RatisBlockOutputStream.java
index ee708bf0de..b52fc2af91 100644
--- 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/RatisBlockOutputStream.java
+++ 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/RatisBlockOutputStream.java
@@ -30,8 +30,6 @@ import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
 import org.apache.hadoop.ozone.common.ChunkBuffer;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.OutputStream;
@@ -58,8 +56,6 @@ import java.util.concurrent.ExecutionException;
  */
 public class RatisBlockOutputStream extends BlockOutputStream
     implements Syncable {
-  public static final Logger LOG = LoggerFactory.getLogger(
-      RatisBlockOutputStream.class);
 
   // This object will maintain the commitIndexes and byteBufferList in order
   // Also, corresponding to the logIndex, the corresponding list of buffers 
will
diff --git 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/ozone/client/io/ECBlockInputStream.java
 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/ozone/client/io/ECBlockInputStream.java
index ea4f3d743f..e85bf27d53 100644
--- 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/ozone/client/io/ECBlockInputStream.java
+++ 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/ozone/client/io/ECBlockInputStream.java
@@ -107,17 +107,6 @@ public class ECBlockInputStream extends 
BlockExtendedInputStream {
     return count;
   }
 
-  protected int availableParityLocations() {
-    int count = 0;
-    for (int i = repConfig.getData();
-         i < repConfig.getData() + repConfig.getParity(); i++) {
-      if (dataLocations[i] != null) {
-        count++;
-      }
-    }
-    return count;
-  }
-
   public ECBlockInputStream(ECReplicationConfig repConfig,
       BlockLocationInfo blockInfo, boolean verifyChecksum,
       XceiverClientFactory xceiverClientFactory,
diff --git 
a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/client/TestHddsClientUtils.java
 
b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/client/TestHddsClientUtils.java
index 21b5a6c6e7..3cae9ec634 100644
--- 
a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/client/TestHddsClientUtils.java
+++ 
b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/client/TestHddsClientUtils.java
@@ -102,7 +102,6 @@ public class TestHddsClientUtils {
     conf.set(ScmConfigKeys.OZONE_SCM_NODE_ID_KEY, "scm1");
 
     int port = 9880;
-    int i = 1;
     for (String nodeId : nodes) {
       conf.setInt(ConfUtils.addKeySuffixes(OZONE_SCM_CLIENT_PORT_KEY,
           scmServiceId, nodeId), port);
diff --git 
a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockInputStream.java
 
b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockInputStream.java
index a068162360..7755adc5f3 100644
--- 
a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockInputStream.java
+++ 
b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockInputStream.java
@@ -22,7 +22,6 @@ import com.google.common.primitives.Bytes;
 import org.apache.commons.lang3.RandomUtils;
 import org.apache.hadoop.hdds.client.BlockID;
 import org.apache.hadoop.hdds.client.ContainerBlockID;
-import org.apache.hadoop.hdds.protocol.DatanodeDetails;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChecksumType;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfo;
 import org.apache.hadoop.hdds.scm.XceiverClientFactory;
@@ -47,7 +46,6 @@ import java.io.EOFException;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -381,12 +379,6 @@ public class TestBlockInputStream {
     }
   }
 
-  private Pipeline samePipelineWithNewId(Pipeline pipeline) {
-    List<DatanodeDetails> reverseOrder = new ArrayList<>(pipeline.getNodes());
-    Collections.reverse(reverseOrder);
-    return MockPipeline.createPipeline(reverseOrder);
-  }
-
   @ParameterizedTest
   @MethodSource("exceptionsTriggersRefresh")
   public void testRefreshOnReadFailureAfterUnbuffer(IOException ex)
diff --git 
a/hadoop-hdds/client/src/test/java/org/apache/hadoop/ozone/client/io/ECStreamTestUtil.java
 
b/hadoop-hdds/client/src/test/java/org/apache/hadoop/ozone/client/io/ECStreamTestUtil.java
index 8db662cee0..41bf46a8ea 100644
--- 
a/hadoop-hdds/client/src/test/java/org/apache/hadoop/ozone/client/io/ECStreamTestUtil.java
+++ 
b/hadoop-hdds/client/src/test/java/org/apache/hadoop/ozone/client/io/ECStreamTestUtil.java
@@ -281,7 +281,6 @@ public final class ECStreamTestUtil {
   public static class TestBlockInputStream extends BlockExtendedInputStream {
 
     private ByteBuffer data;
-    private boolean closed = false;
     private BlockID blockID;
     private long length;
     private boolean shouldError = false;
@@ -304,10 +303,6 @@ public final class ECStreamTestUtil {
       data.position(0);
     }
 
-    public boolean isClosed() {
-      return closed;
-    }
-
     public void setShouldErrorOnSeek(boolean val) {
       this.shouldErrorOnSeek = val;
     }
@@ -377,9 +372,7 @@ public final class ECStreamTestUtil {
     }
 
     @Override
-    public void close() {
-      closed = true;
-    }
+    public void close() { }
 
     @Override
     public void unbuffer() {
diff --git 
a/hadoop-hdds/client/src/test/java/org/apache/hadoop/ozone/client/io/TestECBlockInputStreamProxy.java
 
b/hadoop-hdds/client/src/test/java/org/apache/hadoop/ozone/client/io/TestECBlockInputStreamProxy.java
index e8ada43b08..97bf71c204 100644
--- 
a/hadoop-hdds/client/src/test/java/org/apache/hadoop/ozone/client/io/TestECBlockInputStreamProxy.java
+++ 
b/hadoop-hdds/client/src/test/java/org/apache/hadoop/ozone/client/io/TestECBlockInputStreamProxy.java
@@ -169,7 +169,7 @@ public class TestECBlockInputStreamProxy {
     BlockLocationInfo blockInfo =
         ECStreamTestUtil.createKeyInfo(repConfig, blockLength, dnMap);
 
-    try (ECBlockInputStreamProxy bis = createBISProxy(repConfig, blockInfo)) {
+    try (ECBlockInputStreamProxy ignored = createBISProxy(repConfig, 
blockInfo)) {
       // Not all locations present, so we expect on;y the "missing=true" stream
       // to be present.
       assertThat(streamFactory.getStreams()).containsKey(false);
@@ -181,7 +181,7 @@ public class TestECBlockInputStreamProxy {
     dnMap = ECStreamTestUtil.createIndexMap(2, 3, 4, 5);
     blockInfo = ECStreamTestUtil.createKeyInfo(repConfig, blockLength, dnMap);
 
-    try (ECBlockInputStreamProxy bis = createBISProxy(repConfig, blockInfo)) {
+    try (ECBlockInputStreamProxy ignored = createBISProxy(repConfig, 
blockInfo)) {
       // Not all locations present, so we expect on;y the "missing=true" stream
       // to be present.
       assertThat(streamFactory.getStreams()).doesNotContainKey(false);


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to