[ 
https://issues.apache.org/jira/browse/HADOOP-17890?focusedWorklogId=647113&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-647113
 ]

ASF GitHub Bot logged work on HADOOP-17890:
-------------------------------------------

                Author: ASF GitHub Bot
            Created on: 06/Sep/21 19:23
            Start Date: 06/Sep/21 19:23
    Worklog Time Spent: 10m 
      Work Description: steveloughran commented on a change in pull request 
#3381:
URL: https://github.com/apache/hadoop/pull/3381#discussion_r703046253



##########
File path: 
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
##########
@@ -369,58 +378,75 @@ public void processResponse(final byte[] buffer, final 
int offset, final int len
       startTime = System.nanoTime();
     }
 
-    if (statusCode >= HttpURLConnection.HTTP_BAD_REQUEST) {
-      processStorageErrorResponse();
+    long totalBytesRead = 0;
+
+    try {
+      totalBytesRead = parseResponse(buffer, offset, length);
+    } finally {
       if (this.isTraceEnabled) {
         this.recvResponseTimeMs += elapsedTimeMs(startTime);
       }
-      this.bytesReceived = 
this.connection.getHeaderFieldLong(HttpHeaderConfigurations.CONTENT_LENGTH, 0);
-    } else {
-      // consume the input stream to release resources
-      int totalBytesRead = 0;
+      this.bytesReceived = totalBytesRead;
+    }
+  }
 
+  public long parseResponse(final byte[] buffer,
+      final int offset,
+      final int length) throws IOException {
+    if (statusCode >= HttpURLConnection.HTTP_BAD_REQUEST) {
+      processStorageErrorResponse();
+      return this.connection.getHeaderFieldLong(

Review comment:
       nit: nit: no need for `this.`

##########
File path: 
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
##########
@@ -369,58 +378,75 @@ public void processResponse(final byte[] buffer, final 
int offset, final int len
       startTime = System.nanoTime();
     }
 
-    if (statusCode >= HttpURLConnection.HTTP_BAD_REQUEST) {
-      processStorageErrorResponse();
+    long totalBytesRead = 0;
+
+    try {
+      totalBytesRead = parseResponse(buffer, offset, length);
+    } finally {
       if (this.isTraceEnabled) {
         this.recvResponseTimeMs += elapsedTimeMs(startTime);
       }
-      this.bytesReceived = 
this.connection.getHeaderFieldLong(HttpHeaderConfigurations.CONTENT_LENGTH, 0);
-    } else {
-      // consume the input stream to release resources
-      int totalBytesRead = 0;
+      this.bytesReceived = totalBytesRead;
+    }
+  }
 
+  public long parseResponse(final byte[] buffer,
+      final int offset,
+      final int length) throws IOException {
+    if (statusCode >= HttpURLConnection.HTTP_BAD_REQUEST) {
+      processStorageErrorResponse();
+      return this.connection.getHeaderFieldLong(
+          HttpHeaderConfigurations.CONTENT_LENGTH, 0);
+    } else {
       try (InputStream stream = this.connection.getInputStream()) {
         if (isNullInputStream(stream)) {
-          return;
+          return 0;
         }
-        boolean endOfStream = false;
 
-        // this is a list operation and need to retrieve the data
-        // need a better solution
-        if (AbfsHttpConstants.HTTP_METHOD_GET.equals(this.method) && buffer == 
null) {
+        if (AbfsHttpConstants.HTTP_METHOD_GET.equals(this.method)
+            && buffer == null) {
           parseListFilesResponse(stream);
         } else {
-          if (buffer != null) {
-            while (totalBytesRead < length) {
-              int bytesRead = stream.read(buffer, offset + totalBytesRead, 
length - totalBytesRead);
-              if (bytesRead == -1) {
-                endOfStream = true;
-                break;
-              }
-              totalBytesRead += bytesRead;
-            }
-          }
-          if (!endOfStream && stream.read() != -1) {
-            // read and discard
-            int bytesRead = 0;
-            byte[] b = new byte[CLEAN_UP_BUFFER_SIZE];
-            while ((bytesRead = stream.read(b)) >= 0) {
-              totalBytesRead += bytesRead;
-            }
-          }
+          return readDataFromStream(stream, buffer, offset, length);
         }
-      } catch (IOException ex) {
-        LOG.warn("IO/Network error: {} {}: {}",
-            method, getMaskedUrl(), ex.getMessage());
-        LOG.debug("IO Error: ", ex);
-        throw ex;
-      } finally {
-        if (this.isTraceEnabled) {
-          this.recvResponseTimeMs += elapsedTimeMs(startTime);
+      }
+    }
+
+    return 0;
+  }
+
+  public long readDataFromStream(final InputStream stream,
+      final byte[] buffer,
+      final int offset,
+      final int length) throws IOException {
+    // consume the input stream to release resources
+    int totalBytesRead = 0;
+    boolean endOfStream = false;
+
+    if (buffer != null) {

Review comment:
       does this ever happen? why?

##########
File path: 
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
##########
@@ -369,58 +378,75 @@ public void processResponse(final byte[] buffer, final 
int offset, final int len
       startTime = System.nanoTime();
     }
 
-    if (statusCode >= HttpURLConnection.HTTP_BAD_REQUEST) {
-      processStorageErrorResponse();
+    long totalBytesRead = 0;
+
+    try {
+      totalBytesRead = parseResponse(buffer, offset, length);
+    } finally {
       if (this.isTraceEnabled) {
         this.recvResponseTimeMs += elapsedTimeMs(startTime);
       }
-      this.bytesReceived = 
this.connection.getHeaderFieldLong(HttpHeaderConfigurations.CONTENT_LENGTH, 0);
-    } else {
-      // consume the input stream to release resources
-      int totalBytesRead = 0;
+      this.bytesReceived = totalBytesRead;
+    }
+  }
 
+  public long parseResponse(final byte[] buffer,
+      final int offset,
+      final int length) throws IOException {
+    if (statusCode >= HttpURLConnection.HTTP_BAD_REQUEST) {
+      processStorageErrorResponse();
+      return this.connection.getHeaderFieldLong(
+          HttpHeaderConfigurations.CONTENT_LENGTH, 0);
+    } else {
       try (InputStream stream = this.connection.getInputStream()) {
         if (isNullInputStream(stream)) {
-          return;
+          return 0;
         }
-        boolean endOfStream = false;
 
-        // this is a list operation and need to retrieve the data
-        // need a better solution
-        if (AbfsHttpConstants.HTTP_METHOD_GET.equals(this.method) && buffer == 
null) {
+        if (AbfsHttpConstants.HTTP_METHOD_GET.equals(this.method)
+            && buffer == null) {
           parseListFilesResponse(stream);
         } else {
-          if (buffer != null) {
-            while (totalBytesRead < length) {
-              int bytesRead = stream.read(buffer, offset + totalBytesRead, 
length - totalBytesRead);
-              if (bytesRead == -1) {
-                endOfStream = true;
-                break;
-              }
-              totalBytesRead += bytesRead;
-            }
-          }
-          if (!endOfStream && stream.read() != -1) {
-            // read and discard
-            int bytesRead = 0;
-            byte[] b = new byte[CLEAN_UP_BUFFER_SIZE];
-            while ((bytesRead = stream.read(b)) >= 0) {
-              totalBytesRead += bytesRead;
-            }
-          }
+          return readDataFromStream(stream, buffer, offset, length);
         }
-      } catch (IOException ex) {
-        LOG.warn("IO/Network error: {} {}: {}",
-            method, getMaskedUrl(), ex.getMessage());
-        LOG.debug("IO Error: ", ex);
-        throw ex;
-      } finally {
-        if (this.isTraceEnabled) {
-          this.recvResponseTimeMs += elapsedTimeMs(startTime);
+      }
+    }
+
+    return 0;
+  }
+
+  public long readDataFromStream(final InputStream stream,

Review comment:
       javadocs

##########
File path: 
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsStatistic.java
##########
@@ -75,6 +75,8 @@
       "Total bytes uploaded."),
   BYTES_RECEIVED("bytes_received",
       "Total bytes received."),
+  BYTES_DISCARDED_AT_SOCKET_READ("bytes_discarded_at_socket_read",

Review comment:
       StreamStatisticNames already has two counters for discarded bytes...use 
one of those or, if it this is a new variant, add a new option to that class 
and then reference

##########
File path: 
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
##########
@@ -74,6 +76,7 @@
   // metrics
   private int bytesSent;
   private long bytesReceived;
+  private long bytesDiscarded;

Review comment:
       nit, javadocs for the new attribute: try to do better than your 
predecessors....

##########
File path: 
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
##########
@@ -369,58 +378,75 @@ public void processResponse(final byte[] buffer, final 
int offset, final int len
       startTime = System.nanoTime();
     }
 
-    if (statusCode >= HttpURLConnection.HTTP_BAD_REQUEST) {
-      processStorageErrorResponse();
+    long totalBytesRead = 0;
+
+    try {
+      totalBytesRead = parseResponse(buffer, offset, length);
+    } finally {
       if (this.isTraceEnabled) {
         this.recvResponseTimeMs += elapsedTimeMs(startTime);
       }
-      this.bytesReceived = 
this.connection.getHeaderFieldLong(HttpHeaderConfigurations.CONTENT_LENGTH, 0);
-    } else {
-      // consume the input stream to release resources
-      int totalBytesRead = 0;
+      this.bytesReceived = totalBytesRead;
+    }
+  }
 
+  public long parseResponse(final byte[] buffer,

Review comment:
       add javadocs




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Issue Time Tracking
-------------------

    Worklog Id:     (was: 647113)
    Time Spent: 20m  (was: 10m)

> ABFS: Refactor HTTP request handling code
> -----------------------------------------
>
>                 Key: HADOOP-17890
>                 URL: https://issues.apache.org/jira/browse/HADOOP-17890
>             Project: Hadoop Common
>          Issue Type: Sub-task
>          Components: fs/azure
>    Affects Versions: 3.4.0
>            Reporter: Sneha Vijayarajan
>            Assignee: Sneha Vijayarajan
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 20m
>  Remaining Estimate: 0h
>
> Aims at Http request handling code refactoring.



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to