This is an automated email from the ASF dual-hosted git repository.

pifta pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 922697ec121 HDDS-14584. Remove dependency on commons-validator in 
hdds-common (#9731)
922697ec121 is described below

commit 922697ec121b206823299101aadc938bec87ece5
Author: Doroszlai, Attila <[email protected]>
AuthorDate: Tue Feb 10 20:42:32 2026 +0100

    HDDS-14584. Remove dependency on commons-validator in hdds-common (#9731)
---
 hadoop-hdds/common/pom.xml                         |  4 -
 .../org/apache/hadoop/ozone/OzoneSecurityUtil.java | 87 ----------------------
 .../hadoop/hdds/security/ssl/KeyStoresFactory.java |  0
 .../hdds/security/ssl/ReloadingX509KeyManager.java |  0
 .../security/ssl/ReloadingX509TrustManager.java    |  0
 .../hadoop/hdds/security/ssl/package-info.java     |  0
 .../x509/certificate/client/CertificateClient.java |  0
 .../client/CertificateNotification.java            |  0
 .../certificate/utils/CertificateSignRequest.java  |  5 +-
 .../certificate/utils/SelfSignedCertificate.java   |  5 +-
 .../x509/certificate/utils}/package-info.java      |  4 +-
 .../apache/hadoop/hdds/utils/HddsServerUtil.java   | 85 +++++++++++++++++++++
 .../hadoop/ozone/TestSecureOzoneCluster.java       |  4 +-
 13 files changed, 93 insertions(+), 101 deletions(-)

diff --git a/hadoop-hdds/common/pom.xml b/hadoop-hdds/common/pom.xml
index 3787a0e1429..ec6e4578706 100644
--- a/hadoop-hdds/common/pom.xml
+++ b/hadoop-hdds/common/pom.xml
@@ -67,10 +67,6 @@
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
     </dependency>
-    <dependency>
-      <groupId>commons-validator</groupId>
-      <artifactId>commons-validator</artifactId>
-    </dependency>
     <dependency>
       <groupId>info.picocli</groupId>
       <artifactId>picocli</artifactId>
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java
index 76ce8ebd917..71e09bd4ca5 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneSecurityUtil.java
@@ -24,18 +24,10 @@
 
 import java.io.File;
 import java.io.IOException;
-import java.net.Inet6Address;
-import java.net.InetAddress;
-import java.net.NetworkInterface;
 import java.nio.file.Path;
 import java.security.cert.X509Certificate;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
-import org.apache.commons.validator.routines.InetAddressValidator;
 import org.apache.hadoop.hdds.annotation.InterfaceAudience;
 import org.apache.hadoop.hdds.annotation.InterfaceStability;
 import org.apache.hadoop.hdds.conf.ConfigurationSource;
@@ -53,9 +45,6 @@ public final class OzoneSecurityUtil {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(OzoneSecurityUtil.class);
-  // List of ip's not recommended to be added to CSR.
-  private static final Set<String> INVALID_IPS = new HashSet<>(Arrays.asList(
-      "0.0.0.0", "127.0.0.1"));
 
   private OzoneSecurityUtil() {
   }
@@ -82,82 +71,6 @@ public static boolean checkIfFileExist(Path path, String 
fileName) {
         && new File(dir, fileName).exists();
   }
 
-  /**
-   * Iterates through network interfaces and return all valid ip's not
-   * listed in CertificateSignRequest#INVALID_IPS.
-   *
-   * @return List<InetAddress>
-   * @throws IOException if no network interface are found or if an error
-   * occurs.
-   */
-  public static List<InetAddress> getValidInetsForCurrentHost()
-      throws IOException {
-    List<InetAddress> hostIps = new ArrayList<>();
-    InetAddressValidator ipValidator = InetAddressValidator.getInstance();
-
-    Enumeration<NetworkInterface> enumNI =
-        NetworkInterface.getNetworkInterfaces();
-    if (enumNI == null) {
-      throw new IOException("Unable to get network interfaces.");
-    }
-
-    while (enumNI.hasMoreElements()) {
-      NetworkInterface ifc = enumNI.nextElement();
-      if (ifc.isUp()) {
-        Enumeration<InetAddress> enumAdds = ifc.getInetAddresses();
-        while (enumAdds.hasMoreElements()) {
-          InetAddress addr = enumAdds.nextElement();
-
-          String hostAddress = addr.getHostAddress();
-          if (!INVALID_IPS.contains(hostAddress) && 
ipValidator.isValid(hostAddress)
-              && !isScopedOrMaskingIPv6Address(addr)) {
-            LOG.info("Adding ip:{},host:{}", hostAddress, addr.getHostName());
-            hostIps.add(addr);
-          } else {
-            LOG.info("ip:{} not returned.", hostAddress);
-          }
-        }
-      }
-    }
-
-    return hostIps;
-  }
-
-  /**
-   * Determines if the supplied address is an IPv6 address, with a defined 
scope-id and/or with a defined prefix length.
-   * <p>
-   * This method became necessary after Commons Validator was upgraded from 
1.6 version to 1.10. In 1.10 version the
-   * IPv6 addresses with a scope-id and/or with a prefix specifier became 
valid IPv6 addresses, but as these features
-   * are changing the string representation to do not represent only the 16 
octet that specifies the address, the
-   * string representation can not be used as it is as a SAN extension in 
X.509 anymore as in RFC-5280 this type of
-   * Subject Alternative Name is exactly 4 octets in case of an IPv4 address, 
and 16 octets in case of an IPv6 address.
-   * BouncyCastle does not have support to deal with these in an IPAddress 
typed GeneralName, so we need to keep the
-   * previous behaviour, and skip IPv6 addresses with a prefix length and/or a 
scope-id.
-   * <p>
-   * According to RFC-4007 and the InetAddress contract the scope-id is at the 
end of the address' strin
-   * representation, separated by a '%' character from the address.
-   * According to RFC-4632 there is a possibility to specify a prefix length 
at the end of the address to specify
-   * routing related information. RFC-4007 specifies the prefix length to come 
after the scope-id.
-   * <p>
-   *
-   * @param addr the InetAddress to check
-   * @return if the InetAddress is an IPv6 address and if so it contains a 
scope-id and/or a prefix length.
-   * @see <a href="https://datatracker.ietf.org/doc/html/rfc4007";>RFC-4007 - 
Scoped IPv6 Addresses</a>
-   * @see <a 
href="https://datatracker.ietf.org/doc/html/rfc4632#section-5.1";>RFC-4632 - 
CIDR addressing strategy -
-   *        prefix length</a>
-   * @see <a 
href="https://datatracker.ietf.org/doc/html/rfc5280#section-4.2.1.6";>RFC-5280 - 
SAN description</a>
-   * @see <a 
href="https://issues.apache.org/jira/browse/VALIDATOR-445";>VALIDATOR-445 - 
Commons Validator change</a>
-   * @see <a href="https://github.com/bcgit/bc-java/issues/2024";>BouncyCastle 
issue discussion about scoped IPv6
-   *        addresses</a>
-   */
-  public static boolean isScopedOrMaskingIPv6Address(InetAddress addr) {
-    if (addr instanceof Inet6Address) {
-      String hostAddress = addr.getHostAddress();
-      return hostAddress.contains("/") || hostAddress.contains("%");
-    }
-    return false;
-  }
-
   /**
    * Convert list of string encoded certificates to list of X509Certificate.
    * @param pemEncodedCerts
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/KeyStoresFactory.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/ssl/KeyStoresFactory.java
similarity index 100%
rename from 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/KeyStoresFactory.java
rename to 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/ssl/KeyStoresFactory.java
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/ReloadingX509KeyManager.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/ssl/ReloadingX509KeyManager.java
similarity index 100%
rename from 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/ReloadingX509KeyManager.java
rename to 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/ssl/ReloadingX509KeyManager.java
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/ReloadingX509TrustManager.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/ssl/ReloadingX509TrustManager.java
similarity index 100%
rename from 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/ReloadingX509TrustManager.java
rename to 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/ssl/ReloadingX509TrustManager.java
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/package-info.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/ssl/package-info.java
similarity index 100%
copy from 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/package-info.java
copy to 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/ssl/package-info.java
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClient.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClient.java
similarity index 100%
rename from 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClient.java
rename to 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClient.java
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateNotification.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateNotification.java
similarity index 100%
rename from 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateNotification.java
rename to 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateNotification.java
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateSignRequest.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateSignRequest.java
similarity index 98%
rename from 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateSignRequest.java
rename to 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateSignRequest.java
index 2902c43713d..a3933e22df4 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateSignRequest.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateSignRequest.java
@@ -19,6 +19,7 @@
 
 import static 
org.apache.hadoop.hdds.security.exception.SCMSecurityException.ErrorCode.INVALID_CSR;
 import static 
org.apache.hadoop.hdds.security.x509.exception.CertificateException.ErrorCode.CSR_ERROR;
+import static 
org.apache.hadoop.hdds.utils.HddsServerUtil.getValidInetsForCurrentHost;
 
 import com.google.common.base.Preconditions;
 import java.io.IOException;
@@ -35,7 +36,6 @@
 import org.apache.hadoop.hdds.security.SecurityConfig;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.exception.CertificateException;
-import org.apache.hadoop.ozone.OzoneSecurityUtil;
 import org.bouncycastle.asn1.ASN1EncodableVector;
 import org.bouncycastle.asn1.ASN1Object;
 import org.bouncycastle.asn1.ASN1ObjectIdentifier;
@@ -294,8 +294,7 @@ public CertificateSignRequest.Builder addInetAddresses()
       try {
         DomainValidator validator = DomainValidator.getInstance();
         // Add all valid ips.
-        List<InetAddress> inetAddresses =
-            OzoneSecurityUtil.getValidInetsForCurrentHost();
+        List<InetAddress> inetAddresses = getValidInetsForCurrentHost();
         this.addInetAddresses(inetAddresses, validator);
       } catch (IOException e) {
         throw new CertificateException("Error while getting Inet addresses " +
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/SelfSignedCertificate.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/SelfSignedCertificate.java
similarity index 98%
rename from 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/SelfSignedCertificate.java
rename to 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/SelfSignedCertificate.java
index c4a7a01f5a8..87a7cbe4c32 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/SelfSignedCertificate.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/SelfSignedCertificate.java
@@ -19,6 +19,7 @@
 
 import static 
org.apache.hadoop.hdds.security.x509.exception.CertificateException.ErrorCode.CERTIFICATE_ERROR;
 import static 
org.apache.hadoop.hdds.security.x509.exception.CertificateException.ErrorCode.CSR_ERROR;
+import static 
org.apache.hadoop.hdds.utils.HddsServerUtil.getValidInetsForCurrentHost;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
@@ -38,7 +39,6 @@
 import org.apache.hadoop.hdds.security.SecurityConfig;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.exception.CertificateException;
-import org.apache.hadoop.ozone.OzoneSecurityUtil;
 import org.apache.hadoop.util.Time;
 import org.bouncycastle.asn1.ASN1EncodableVector;
 import org.bouncycastle.asn1.ASN1Object;
@@ -221,8 +221,7 @@ public Builder addInetAddresses() throws 
CertificateException {
       try {
         DomainValidator validator = DomainValidator.getInstance();
         // Add all valid ips.
-        List<InetAddress> inetAddresses =
-            OzoneSecurityUtil.getValidInetsForCurrentHost();
+        List<InetAddress> inetAddresses = getValidInetsForCurrentHost();
         this.addInetAddresses(inetAddresses, validator);
       } catch (IOException e) {
         throw new CertificateException("Error while getting Inet addresses " +
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/package-info.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/package-info.java
similarity index 89%
rename from 
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/package-info.java
rename to 
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/package-info.java
index d6cc84f80d7..4b64f762095 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/ssl/package-info.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/package-info.java
@@ -16,6 +16,6 @@
  */
 
 /**
- * This package contains ssl related classes.
+ Helpers for Certificates.
  */
-package org.apache.hadoop.hdds.security.ssl;
+package org.apache.hadoop.hdds.security.x509.certificate.utils;
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
index 3d8b6178025..760bdfbd04b 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
@@ -61,12 +61,16 @@
 import java.io.File;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.net.Inet6Address;
+import java.net.InetAddress;
 import java.net.InetSocketAddress;
+import java.net.NetworkInterface;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.Enumeration;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -82,6 +86,7 @@
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.SystemUtils;
 import org.apache.commons.lang3.tuple.Pair;
+import org.apache.commons.validator.routines.InetAddressValidator;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.HddsConfigKeys;
 import org.apache.hadoop.hdds.HddsUtils;
@@ -137,6 +142,10 @@ public final class HddsServerUtil {
   public static final String OZONE_RATIS_SNAPSHOT_COMPLETE_FLAG_NAME =
       "OZONE_RATIS_SNAPSHOT_COMPLETE";
 
+  // List of ip's not recommended to be added to CSR.
+  private static final Set<String> INVALID_IPS = new HashSet<>(Arrays.asList(
+      "0.0.0.0", "127.0.0.1"));
+
   private HddsServerUtil() {
   }
 
@@ -153,6 +162,82 @@ public static void addPBProtocol(Configuration conf, 
Class<?> protocol,
     server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, service);
   }
 
+  /**
+   * Iterates through network interfaces and return all valid ip's not
+   * listed in {@link #INVALID_IPS}.
+   *
+   * @return List<InetAddress>
+   * @throws IOException if no network interface are found or if an error
+   * occurs.
+   */
+  public static List<InetAddress> getValidInetsForCurrentHost()
+      throws IOException {
+    List<InetAddress> hostIps = new ArrayList<>();
+    InetAddressValidator ipValidator = InetAddressValidator.getInstance();
+
+    Enumeration<NetworkInterface> enumNI =
+        NetworkInterface.getNetworkInterfaces();
+    if (enumNI == null) {
+      throw new IOException("Unable to get network interfaces.");
+    }
+
+    while (enumNI.hasMoreElements()) {
+      NetworkInterface ifc = enumNI.nextElement();
+      if (ifc.isUp()) {
+        Enumeration<InetAddress> enumAdds = ifc.getInetAddresses();
+        while (enumAdds.hasMoreElements()) {
+          InetAddress addr = enumAdds.nextElement();
+
+          String hostAddress = addr.getHostAddress();
+          if (!INVALID_IPS.contains(hostAddress) && 
ipValidator.isValid(hostAddress)
+              && !isScopedOrMaskingIPv6Address(addr)) {
+            LOG.info("Adding ip:{},host:{}", hostAddress, addr.getHostName());
+            hostIps.add(addr);
+          } else {
+            LOG.info("ip:{} not returned.", hostAddress);
+          }
+        }
+      }
+    }
+
+    return hostIps;
+  }
+
+  /**
+   * Determines if the supplied address is an IPv6 address, with a defined 
scope-id and/or with a defined prefix length.
+   * <p>
+   * This method became necessary after Commons Validator was upgraded from 
1.6 version to 1.10. In 1.10 version the
+   * IPv6 addresses with a scope-id and/or with a prefix specifier became 
valid IPv6 addresses, but as these features
+   * are changing the string representation to do not represent only the 16 
octet that specifies the address, the
+   * string representation can not be used as it is as a SAN extension in 
X.509 anymore as in RFC-5280 this type of
+   * Subject Alternative Name is exactly 4 octets in case of an IPv4 address, 
and 16 octets in case of an IPv6 address.
+   * BouncyCastle does not have support to deal with these in an IPAddress 
typed GeneralName, so we need to keep the
+   * previous behaviour, and skip IPv6 addresses with a prefix length and/or a 
scope-id.
+   * <p>
+   * According to RFC-4007 and the InetAddress contract the scope-id is at the 
end of the address' strin
+   * representation, separated by a '%' character from the address.
+   * According to RFC-4632 there is a possibility to specify a prefix length 
at the end of the address to specify
+   * routing related information. RFC-4007 specifies the prefix length to come 
after the scope-id.
+   * <p>
+   *
+   * @param addr the InetAddress to check
+   * @return if the InetAddress is an IPv6 address and if so it contains a 
scope-id and/or a prefix length.
+   * @see <a href="https://datatracker.ietf.org/doc/html/rfc4007";>RFC-4007 - 
Scoped IPv6 Addresses</a>
+   * @see <a 
href="https://datatracker.ietf.org/doc/html/rfc4632#section-5.1";>RFC-4632 - 
CIDR addressing strategy -
+   *        prefix length</a>
+   * @see <a 
href="https://datatracker.ietf.org/doc/html/rfc5280#section-4.2.1.6";>RFC-5280 - 
SAN description</a>
+   * @see <a 
href="https://issues.apache.org/jira/browse/VALIDATOR-445";>VALIDATOR-445 - 
Commons Validator change</a>
+   * @see <a href="https://github.com/bcgit/bc-java/issues/2024";>BouncyCastle 
issue discussion about scoped IPv6
+   *        addresses</a>
+   */
+  public static boolean isScopedOrMaskingIPv6Address(InetAddress addr) {
+    if (addr instanceof Inet6Address) {
+      String hostAddress = addr.getHostAddress();
+      return hostAddress.contains("/") || hostAddress.contains("%");
+    }
+    return false;
+  }
+
   /**
    * Retrieve the socket address that should be used by clients to connect
    * to the SCM.
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
index 35c527180bf..0bc1327e48c 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
@@ -39,6 +39,7 @@
 import static 
org.apache.hadoop.hdds.scm.server.SCMHTTPServerConfig.ConfigStrings.HDDS_SCM_HTTP_KERBEROS_PRINCIPAL_KEY;
 import static 
org.apache.hadoop.hdds.security.x509.exception.CertificateException.ErrorCode.ROLLBACK_ERROR;
 import static org.apache.hadoop.hdds.utils.HddsServerUtil.getScmSecurityClient;
+import static 
org.apache.hadoop.hdds.utils.HddsServerUtil.getValidInetsForCurrentHost;
 import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS;
 import static 
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_CLIENT_FAILOVER_MAX_ATTEMPTS_KEY;
 import static 
org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY;
@@ -1449,8 +1450,7 @@ private static void addIpAndDnsDataToBuilder(
       CertificateSignRequest.Builder csrBuilder) throws IOException {
     DomainValidator validator = DomainValidator.getInstance();
     // Add all valid ips.
-    List<InetAddress> inetAddresses =
-        OzoneSecurityUtil.getValidInetsForCurrentHost();
+    List<InetAddress> inetAddresses = getValidInetsForCurrentHost();
     csrBuilder.addInetAddresses(inetAddresses, validator);
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to