This is an automated email from the ASF dual-hosted git repository.

mthakur pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new 42bd83366a3 HADOOP-19271. NPE in 
AbfsManagedApacheHttpConnection.toString() when not connected (#7040)
42bd83366a3 is described below

commit 42bd83366a35359716fdd9a78693e6c7fda0e2c9
Author: Pranav Saxena <108325433+saxenapra...@users.noreply.github.com>
AuthorDate: Mon Sep 16 22:51:20 2024 +0530

    HADOOP-19271. NPE in AbfsManagedApacheHttpConnection.toString() when not 
connected (#7040)
    
    
    Contributed by: Pranav Saxena
---
 .../services/AbfsManagedApacheHttpConnection.java  | 16 +++--
 .../services/ITestApacheClientConnectionPool.java  | 68 ++++++++++++++++++++++
 2 files changed, 79 insertions(+), 5 deletions(-)

diff --git 
a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsManagedApacheHttpConnection.java
 
b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsManagedApacheHttpConnection.java
index 04697ab561e..bcac125e5f5 100644
--- 
a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsManagedApacheHttpConnection.java
+++ 
b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsManagedApacheHttpConnection.java
@@ -27,6 +27,7 @@ import java.util.UUID;
 import org.apache.http.HttpConnectionMetrics;
 import org.apache.http.HttpEntityEnclosingRequest;
 import org.apache.http.HttpException;
+import org.apache.http.HttpHost;
 import org.apache.http.HttpRequest;
 import org.apache.http.HttpResponse;
 import org.apache.http.conn.ManagedHttpClientConnection;
@@ -51,10 +52,17 @@ class AbfsManagedApacheHttpConnection
    */
   private AbfsManagedHttpClientContext managedHttpContext;
 
+  private final HttpHost targetHost;
+
   private final int hashCode;
 
   AbfsManagedApacheHttpConnection(ManagedHttpClientConnection conn,
       final HttpRoute route) {
+    if (route != null) {
+      targetHost = route.getTargetHost();
+    } else {
+      targetHost = null;
+    }
     this.httpClientConnection = conn;
     this.hashCode = (UUID.randomUUID().toString()
         + httpClientConnection.getId()).hashCode();
@@ -228,11 +236,9 @@ class AbfsManagedApacheHttpConnection
 
   @Override
   public String toString() {
-    StringBuilder stringBuilder = new StringBuilder();
-    stringBuilder.append(
-            httpClientConnection.getRemoteAddress().getHostName())
-        .append(COLON)
-        .append(httpClientConnection.getRemotePort())
+    StringBuilder stringBuilder = targetHost != null ? new StringBuilder(
+        targetHost.toString()) : new StringBuilder();
+    stringBuilder
         .append(COLON)
         .append(hashCode());
     return stringBuilder.toString();
diff --git 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestApacheClientConnectionPool.java
 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestApacheClientConnectionPool.java
index d864fc8c1d3..6fe9acace00 100644
--- 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestApacheClientConnectionPool.java
+++ 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestApacheClientConnectionPool.java
@@ -18,6 +18,10 @@
 
 package org.apache.hadoop.fs.azurebfs.services;
 
+import java.io.IOException;
+import java.util.Map;
+
+import org.assertj.core.api.Assertions;
 import org.junit.Test;
 
 import org.apache.hadoop.conf.Configuration;
@@ -27,12 +31,26 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.azurebfs.AbstractAbfsIntegrationTest;
 import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem;
 import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AbfsDriverException;
+import org.apache.hadoop.security.ssl.DelegatingSSLSocketFactory;
+import org.apache.hadoop.util.functional.Tuples;
+import org.apache.http.HttpHost;
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.config.Registry;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.config.SocketConfig;
+import org.apache.http.conn.routing.HttpRoute;
+import org.apache.http.conn.socket.ConnectionSocketFactory;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.impl.conn.DefaultHttpClientConnectionOperator;
 
+import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.COLON;
 import static 
org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.KEEP_ALIVE_CACHE_CLOSED;
 import static 
org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_NETWORKING_LIBRARY;
+import static 
org.apache.hadoop.fs.azurebfs.constants.FileSystemUriSchemes.HTTPS_SCHEME;
 import static 
org.apache.hadoop.fs.azurebfs.constants.HttpOperationType.APACHE_HTTP_CLIENT;
 import static org.apache.hadoop.test.LambdaTestUtils.intercept;
 import static org.apache.hadoop.test.LambdaTestUtils.verifyCause;
+import static 
org.apache.http.conn.ssl.SSLConnectionSocketFactory.getDefaultHostnameVerifier;
 
 /**
  * This test class tests the exception handling in ABFS thrown by the
@@ -60,4 +78,54 @@ public class ITestApacheClientConnectionPool extends
       verifyCause(ClosedIOException.class, ex);
     }
   }
+
+  @Test
+  public void testNonConnectedConnectionLogging() throws Exception {
+    Map.Entry<HttpRoute, AbfsManagedApacheHttpConnection> testConnPair
+        = getTestConnection();
+    AbfsManagedApacheHttpConnection conn = testConnPair.getValue();
+    String log = conn.toString();
+    Assertions.assertThat(log.split(COLON).length)
+        .describedAs("Log to have three fields: https://host:port:hashCode";)
+        .isEqualTo(4);
+  }
+
+  @Test
+  public void testConnectedConnectionLogging() throws Exception {
+    Map.Entry<HttpRoute, AbfsManagedApacheHttpConnection> testConnPair
+        = getTestConnection();
+    AbfsManagedApacheHttpConnection conn = testConnPair.getValue();
+    HttpRoute httpRoute = testConnPair.getKey();
+
+    Registry<ConnectionSocketFactory> socketFactoryRegistry
+        = RegistryBuilder.<ConnectionSocketFactory>create()
+        .register(HTTPS_SCHEME, new SSLConnectionSocketFactory(
+            DelegatingSSLSocketFactory.getDefaultFactory(),
+            getDefaultHostnameVerifier()))
+        .build();
+    new DefaultHttpClientConnectionOperator(
+        socketFactoryRegistry, null, null).connect(conn,
+        httpRoute.getTargetHost(), httpRoute.getLocalSocketAddress(),
+        getConfiguration().getHttpConnectionTimeout(), SocketConfig.DEFAULT,
+        new HttpClientContext());
+
+    String log = conn.toString();
+    Assertions.assertThat(log.split(COLON).length)
+        .describedAs("Log to have three fields: https://host:port:hashCode";)
+        .isEqualTo(4);
+  }
+
+  private Map.Entry<HttpRoute, AbfsManagedApacheHttpConnection> 
getTestConnection()
+      throws IOException {
+    HttpHost host = new HttpHost(getFileSystem().getUri().getHost(),
+        getFileSystem().getUri().getPort(),
+        HTTPS_SCHEME);
+    HttpRoute httpRoute = new HttpRoute(host);
+
+    AbfsManagedApacheHttpConnection conn
+        = (AbfsManagedApacheHttpConnection) new 
AbfsHttpClientConnectionFactory().create(
+        httpRoute, null);
+
+    return Tuples.pair(httpRoute, conn);
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to