HBASE-20732 Shutdown scan pool when master is stopped

Conflicts:
        
hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/955264ed
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/955264ed
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/955264ed

Branch: refs/heads/branch-1.3
Commit: 955264ed437165fbb26f04172f617fb44835ff6b
Parents: 0824695
Author: Reid Chan <reidc...@apache.org>
Authored: Wed Jun 27 18:55:21 2018 +0800
Committer: Andrew Purtell <apurt...@apache.org>
Committed: Wed Dec 12 18:08:19 2018 -0800

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/master/HMaster.java |  1 +
 .../hbase/master/cleaner/CleanerChore.java      | 74 ++++++++++++--------
 .../hbase/master/cleaner/TestCleanerChore.java  | 16 +++--
 3 files changed, 55 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/955264ed/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 67c7787..b47fecb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -1228,6 +1228,7 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
     }
     super.stopServiceThreads();
     stopChores();
+    CleanerChore.shutDownChorePool();
 
     // Wait for all the remaining region servers to report in IFF we were
     // running a cluster shutdown AND we were NOT aborting.

http://git-wip-us.apache.org/repos/asf/hbase/blob/955264ed/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
index b3f1f0a..7d38ddb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase.master.cleaner;
 
 import java.io.IOException;
+import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -118,7 +119,7 @@ public abstract class CleanerChore<T extends 
FileCleanerDelegate> extends Schedu
           break;
         }
       }
-      pool.shutdownNow();
+      shutDownNow();
       LOG.info("Update chore's pool size from " + pool.getParallelism() + " to 
" + size);
       pool = new ForkJoinPool(size);
     }
@@ -136,6 +137,13 @@ public abstract class CleanerChore<T extends 
FileCleanerDelegate> extends Schedu
     synchronized void submit(ForkJoinTask task) {
       pool.submit(task);
     }
+
+    synchronized void shutDownNow() {
+      if (pool == null || pool.isShutdown()) {
+        return;
+      }
+      pool.shutdownNow();
+    }
   }
   // It may be waste resources for each cleaner chore own its pool,
   // so let's make pool for all cleaner chores.
@@ -148,17 +156,24 @@ public abstract class CleanerChore<T extends 
FileCleanerDelegate> extends Schedu
   protected Map<String, Object> params;
   private AtomicBoolean enabled = new AtomicBoolean(true);
 
-  public CleanerChore(String name, final int sleepPeriod, final Stoppable s, 
Configuration conf,
-                      FileSystem fs, Path oldFileDir, String confKey) {
-    this(name, sleepPeriod, s, conf, fs, oldFileDir, confKey, null);
-  }
-
   public static void initChorePool(Configuration conf) {
     if (POOL == null) {
       POOL = new DirScanPool(conf);
     }
   }
 
+  public static void shutDownChorePool() {
+    if (POOL != null) {
+      POOL.shutDownNow();
+      POOL = null;
+    }
+  }
+
+  public CleanerChore(String name, final int sleepPeriod, final Stoppable s, 
Configuration conf,
+                      FileSystem fs, Path oldFileDir, String confKey) {
+    this(name, sleepPeriod, s, conf, fs, oldFileDir, confKey, null);
+  }
+
   /**
    * @param name name of the chore being run
    * @param sleepPeriod the period of time to sleep between each run
@@ -432,6 +447,7 @@ public abstract class CleanerChore<T extends 
FileCleanerDelegate> extends Schedu
     protected Boolean compute() {
       LOG.trace("Cleaning under " + dir);
       List<FileStatus> subDirs;
+      List<FileStatus> tmpFiles;
       final List<FileStatus> files;
       try {
         // if dir doesn't exist, we'll get null back for both of these
@@ -442,48 +458,48 @@ public abstract class CleanerChore<T extends 
FileCleanerDelegate> extends Schedu
             return f.isDirectory();
           }
         });
-        files = FSUtils.listStatusWithStatusFilter(fs, dir, new 
FileStatusFilter() {
+        if (subDirs == null) {
+          subDirs = Collections.emptyList();
+        }
+        tmpFiles = FSUtils.listStatusWithStatusFilter(fs, dir, new 
FileStatusFilter() {
           @Override
           public boolean accept(FileStatus f) {
             return f.isFile();
           }
         });
+        files = tmpFiles == null ? Collections.<FileStatus>emptyList() : 
tmpFiles;
       } catch (IOException ioe) {
         LOG.warn("failed to get FileStatus for contents of '" + dir + "'", 
ioe);
         return false;
       }
 
-      boolean nullSubDirs = subDirs == null;
-      if (nullSubDirs) {
-        LOG.trace("There is no subdir under " + dir);
-      }
-      if (files == null) {
-        LOG.trace("There is no file under " + dir);
+      boolean allFilesDeleted = true;
+      if (!files.isEmpty()) {
+        allFilesDeleted = deleteAction(new Action<Boolean>() {
+          @Override
+          public Boolean act() throws IOException {
+            return checkAndDeleteFiles(files);
+          }
+        }, "files");
       }
 
-      int capacity = nullSubDirs ? 0 : subDirs.size();
-      final List<CleanerTask> tasks = Lists.newArrayListWithCapacity(capacity);
-      if (!nullSubDirs) {
+      boolean allSubdirsDeleted = true;
+      if (!subDirs.isEmpty()) {
+        final List<CleanerTask> tasks = 
Lists.newArrayListWithCapacity(subDirs.size());
         for (FileStatus subdir : subDirs) {
           CleanerTask task = new CleanerTask(subdir, false);
           tasks.add(task);
           task.fork();
         }
+        allSubdirsDeleted = deleteAction(new Action<Boolean>() {
+          @Override
+          public Boolean act() throws IOException {
+            return getCleanResult(tasks);
+          }
+        }, "subdirs");
       }
 
-      boolean result = true;
-      result &= deleteAction(new Action<Boolean>() {
-        @Override
-        public Boolean act() throws IOException {
-          return checkAndDeleteFiles(files);
-        }
-      }, "files");
-      result &= deleteAction(new Action<Boolean>() {
-        @Override
-        public Boolean act() throws IOException {
-          return getCleanResult(tasks);
-        }
-      }, "subdirs");
+      boolean result = allFilesDeleted && allSubdirsDeleted;
       // if and only if files and subdirs under current dir are deleted 
successfully, and
       // it is not the root dir, then task will try to delete it.
       if (result && !root) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/955264ed/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
index 7711354..12f78be 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
@@ -38,8 +38,8 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.Stoppable;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.StoppableImplementation;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
@@ -52,16 +52,17 @@ public class TestCleanerChore {
   private static final Log LOG = LogFactory.getLog(TestCleanerChore.class);
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
 
-  @Before
-  public void setup() throws Exception {
+  @BeforeClass
+  public static void setup() {
     CleanerChore.initChorePool(UTIL.getConfiguration());
   }
 
-  @After
-  public void cleanup() throws Exception {
+  @AfterClass
+  public static void cleanup() throws Exception {
     // delete and recreate the test directory, ensuring a clean test dir 
between tests
     UTIL.cleanupTestDir();
-}
+    CleanerChore.shutDownChorePool();
+  }
 
 
   @Test
@@ -296,6 +297,7 @@ public class TestCleanerChore {
    */
   @Test
   public void testNoExceptionFromDirectoryWithRacyChildren() throws Exception {
+    UTIL.cleanupTestDir();
     Stoppable stop = new StoppableImplementation();
     // need to use a localutil to not break the rest of the test that runs on 
the local FS, which
     // gets hosed when we start to use a minicluster.

Reply via email to