Repository: hadoop
Updated Branches:
  refs/heads/branch-2 98075d922 -> d9b3b5838
  refs/heads/branch-2.9 90074c136 -> fa47ae2f7


HDFS-14048. DFSOutputStream close() throws exception on subsequent call after 
DataNode restart. Contributed by Erik Krogen.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d9b3b583
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d9b3b583
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d9b3b583

Branch: refs/heads/branch-2
Commit: d9b3b583894e345c1c9f7112a79190da5bfadda7
Parents: 98075d9
Author: Inigo Goiri <[email protected]>
Authored: Tue Nov 6 12:16:46 2018 -0800
Committer: Inigo Goiri <[email protected]>
Committed: Tue Nov 6 12:16:46 2018 -0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/hadoop/hdfs/DataStreamer.java      | 5 +++--
 .../hadoop/hdfs/TestClientProtocolForPipelineRecovery.java      | 2 ++
 2 files changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9b3b583/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java
index 02fb44a..fb7568d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java
@@ -290,7 +290,7 @@ class DataStreamer extends Daemon {
     }
     packets.clear();
   }
-  
+
   static class LastExceptionInStreamer {
     private IOException thrown;
 
@@ -1754,6 +1754,7 @@ class DataStreamer extends Daemon {
         blockStream = out;
         result =  true; // success
         errorState.reset();
+        lastException.clear();
         // remove all restarting nodes from failed nodes list
         failed.removeAll(restartingNodes);
         restartingNodes.clear();
@@ -1835,7 +1836,7 @@ class DataStreamer extends Daemon {
 
   protected LocatedBlock locateFollowingBlock(DatanodeInfo[] excluded,
       ExtendedBlock oldBlock) throws IOException {
-    final DfsClientConf conf = dfsClient.getConf(); 
+    final DfsClientConf conf = dfsClient.getConf();
     int retries = conf.getNumBlockWriteLocateFollowingRetry();
     long sleeptime = conf.getBlockWriteLocateFollowingInitialDelayMs();
     while (true) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9b3b583/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientProtocolForPipelineRecovery.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientProtocolForPipelineRecovery.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientProtocolForPipelineRecovery.java
index f5582be..6c22569 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientProtocolForPipelineRecovery.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestClientProtocolForPipelineRecovery.java
@@ -436,6 +436,8 @@ public class TestClientProtocolForPipelineRecovery {
           0, out.getStreamer().getPipelineRecoveryCount());
       out.write(1);
       out.close();
+      // Ensure that subsequent closes are idempotent and do not throw errors
+      out.close();
     } finally {
       if (cluster != null) {
         cluster.shutdown();


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to