Repository: hbase
Updated Branches:
  refs/heads/0.98 4617b13a7 -> cf4834c81


HBASE-12683 Compilation with hadoop-2.7.0-SNAPSHOT is broken


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cf4834c8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cf4834c8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cf4834c8

Branch: refs/heads/0.98
Commit: cf4834c813c49d16d1a37eb6419d15ac59605319
Parents: 4617b13
Author: Enis Soztutar <[email protected]>
Authored: Mon Dec 15 11:33:30 2014 -0800
Committer: Enis Soztutar <[email protected]>
Committed: Mon Dec 15 11:34:05 2014 -0800

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/util/Threads.java   | 49 +++++++++++++++++++-
 .../org/apache/hadoop/hbase/master/HMaster.java |  3 +-
 .../hadoop/hbase/master/MasterDumpServlet.java  | 27 ++++++-----
 .../hbase/regionserver/RSDumpServlet.java       | 11 +++--
 .../hadoop/hbase/util/JVMClusterUtil.java       |  3 +-
 5 files changed, 72 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/cf4834c8/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
----------------------------------------------------------------------
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
index 867fb80..fd21012 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
@@ -18,8 +18,11 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import java.io.PrintStream;
 import java.io.PrintWriter;
 import java.lang.Thread.UncaughtExceptionHandler;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
@@ -116,7 +119,7 @@ public class Threads {
     while (t.isAlive()) {
       t.join(60 * 1000);
       if (t.isAlive()) {
-        ReflectionUtils.printThreadInfo(new PrintWriter(System.out),
+        printThreadInfo(System.out,
             "Automatic Stack Trace every 60 seconds waiting on " +
             t.getName());
       }
@@ -242,4 +245,48 @@ public class Threads {
 
     };
   }
+
+  private static Method printThreadInfoMethod = null;
+  private static boolean printThreadInfoMethodWithPrintStream = true;
+
+  /**
+   * Print all of the thread's information and stack traces. Wrapper around 
Hadoop's method.
+   *
+   * @param stream the stream to
+   * @param title a string title for the stack trace
+   */
+  public static void printThreadInfo(PrintStream stream, String title) {
+
+    if (printThreadInfoMethod == null) {
+      try {
+        // Hadoop 2.7+ declares printThreadInfo(PrintStream, String)
+        printThreadInfoMethod = 
ReflectionUtils.class.getMethod("printThreadInfo",
+          PrintStream.class, String.class);
+      } catch (NoSuchMethodException e) {
+        // Hadoop 2.6 and earlier declares printThreadInfo(PrintWriter, String)
+        printThreadInfoMethodWithPrintStream = false;
+        try {
+          printThreadInfoMethod = 
ReflectionUtils.class.getMethod("printThreadInfo",
+            PrintWriter.class, String.class);
+        } catch (NoSuchMethodException e1) {
+          throw new RuntimeException("Cannot find method. Check hadoop jars 
linked", e1);
+        }
+      }
+      printThreadInfoMethod.setAccessible(true);
+    }
+
+    try {
+      if (printThreadInfoMethodWithPrintStream) {
+        printThreadInfoMethod.invoke(null, stream, title);
+      } else {
+        printThreadInfoMethod.invoke(null, new PrintWriter(stream), title);
+      }
+    } catch (IllegalAccessException e) {
+      throw new RuntimeException(e.getCause());
+    } catch (IllegalArgumentException e) {
+      throw new RuntimeException(e.getCause());
+    } catch (InvocationTargetException e) {
+      throw new RuntimeException(e.getCause());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cf4834c8/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 8cdb144..453b35d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -329,8 +329,7 @@ MasterServices, Server {
                 + " consider submitting a bug report including a thread dump 
of this process.");
             if (haltOnTimeout) {
               LOG.error("Zombie Master exiting. Thread dump to stdout");
-              org.apache.hadoop.util.ReflectionUtils.printThreadInfo(
-                  new PrintWriter(System.out), "Zombie HMaster");
+              Threads.printThreadInfo(System.out, "Zombie HMaster");
               System.exit(-1);
             }
           }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cf4834c8/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java
index 92e1302..b14a835 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.master;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.io.PrintStream;
 import java.io.PrintWriter;
 import java.util.Date;
 import java.util.Map;
@@ -34,7 +35,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.monitoring.LogMonitoring;
 import org.apache.hadoop.hbase.monitoring.StateDumpServlet;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.hbase.util.Threads;
 
 @InterfaceAudience.Private
 public class MasterDumpServlet extends StateDumpServlet {
@@ -51,10 +52,10 @@ public class MasterDumpServlet extends StateDumpServlet {
     response.setContentType("text/plain");
     OutputStream os = response.getOutputStream();
     PrintWriter out = new PrintWriter(os);
-    
+
     out.println("Master status for " + master.getServerName()
         + " as of " + new Date());
-    
+
     out.println("\n\nVersion Info:");
     out.println(LINE);
     dumpVersionInfo(out);
@@ -62,34 +63,36 @@ public class MasterDumpServlet extends StateDumpServlet {
     out.println("\n\nTasks:");
     out.println(LINE);
     TaskMonitor.get().dumpAsText(out);
-    
+
     out.println("\n\nServers:");
     out.println(LINE);
     dumpServers(master, out);
-    
+
     out.println("\n\nRegions-in-transition:");
     out.println(LINE);
     dumpRIT(master, out);
-    
+
     out.println("\n\nExecutors:");
     out.println(LINE);
     dumpExecutors(master.getExecutorService(), out);
-    
+
     out.println("\n\nStacks:");
     out.println(LINE);
-    ReflectionUtils.printThreadInfo(out, "");
-    
+    PrintStream ps = new PrintStream(response.getOutputStream(), false, 
"UTF-8");
+    Threads.printThreadInfo(ps, "");
+    ps.flush();
+
     out.println("\n\nMaster configuration:");
     out.println(LINE);
     Configuration conf = master.getConfiguration();
     out.flush();
     conf.writeXml(os);
     os.flush();
-    
+
     out.println("\n\nRecent regionserver aborts:");
     out.println(LINE);
     master.getRegionServerFatalLogBuffer().dumpTo(out);
-    
+
     out.println("\n\nLogs");
     out.println(LINE);
     long tailKb = getTailKbParam(request);
@@ -97,7 +100,7 @@ public class MasterDumpServlet extends StateDumpServlet {
     
     out.flush();
   }
-  
+
 
   private void dumpRIT(HMaster master, PrintWriter out) {
     Map<String, RegionState> regionsInTransition =

http://git-wip-us.apache.org/repos/asf/hbase/blob/cf4834c8/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java
index d82baad..f2aab40 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.io.PrintStream;
 import java.io.PrintWriter;
 import java.util.Date;
 
@@ -31,14 +32,14 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.monitoring.LogMonitoring;
 import org.apache.hadoop.hbase.monitoring.StateDumpServlet;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.hbase.util.Threads;
 
 @InterfaceAudience.Private
 public class RSDumpServlet extends StateDumpServlet {
   private static final long serialVersionUID = 1L;
   private static final String LINE =
     "===========================================================";
-  
+
   @Override
   public void doGet(HttpServletRequest request, HttpServletResponse response)
       throws IOException {
@@ -51,7 +52,7 @@ public class RSDumpServlet extends StateDumpServlet {
     assert hrsconf != null : "No RS conf in context";
 
     response.setContentType("text/plain");
- 
+
     if (!hrs.isOnline()) {
       response.getWriter().write("The RegionServer is initializing!");
       response.getWriter().close();
@@ -78,7 +79,9 @@ public class RSDumpServlet extends StateDumpServlet {
     
     out.println("\n\nStacks:");
     out.println(LINE);
-    ReflectionUtils.printThreadInfo(out, "");
+    PrintStream ps = new PrintStream(response.getOutputStream(), false, 
"UTF-8");
+    Threads.printThreadInfo(ps, "");
+    ps.flush();
     
     out.println("\n\nRS Configuration:");
     out.println(LINE);

http://git-wip-us.apache.org/repos/asf/hbase/blob/cf4834c8/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
index b91d761..f6bd815 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hbase.util;
 
 import java.io.IOException;
-import java.io.PrintWriter;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
 import java.util.List;
@@ -216,7 +215,7 @@ public class JVMClusterUtil {
       }
       if (System.currentTimeMillis() > startTime + maxwait) {
         String msg = "Master not initialized after " + maxwait + "ms seconds";
-        ReflectionUtils.printThreadInfo(new PrintWriter(System.out),
+        Threads.printThreadInfo(System.out,
           "Thread dump because: " + msg);
         throw new RuntimeException(msg);
       }

Reply via email to