Author: stack
Date: Mon Jan 21 08:21:46 2008
New Revision: 613923

URL: http://svn.apache.org/viewvc?rev=613923&view=rev
Log:
HADOOP-2650 Remove Writables.clone and use WritableUtils.clone from
            hadoop instead
HADOOP-2584 Web UI displays an IOException instead of the Tables


Modified:
    lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
    
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java
    
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java
    
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
    
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
    
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java
    
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/master/master_jsp.java
    
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java
    
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/util/Writables.java
    lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/hql.jsp
    lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/master.jsp
    
lucene/hadoop/trunk/src/contrib/hbase/src/webapps/regionserver/regionserver.jsp

Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Mon Jan 21 08:21:46 2008
@@ -140,6 +140,9 @@
    HADOOP-2619 Compaction errors after a region splits
    HADOOP-2621 Memcache flush flushing every 60 secs with out considering
                the max memcache size
+   HADOOP-2584 Web UI displays an IOException instead of the Tables
+   HADOOP-2650 Remove Writables.clone and use WritableUtils.clone from
+               hadoop instead
    
   IMPROVEMENTS
    HADOOP-2401 Add convenience put method that takes writable

Modified: 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java
 (original)
+++ 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java
 Mon Jan 21 08:21:46 2008
@@ -22,13 +22,12 @@
 import java.io.IOException;
 import java.util.Map;
 import java.util.NoSuchElementException;
-import java.util.SortedMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.io.HbaseMapWritable;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Writables;
-import org.apache.hadoop.hbase.io.HbaseMapWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.ipc.RemoteException;

Modified: 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java
 (original)
+++ 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java
 Mon Jan 21 08:21:46 2008
@@ -249,12 +249,10 @@
       do {
         try{
           // turn the start row into a location
-          metaLocation = 
-            locateRegion(META_TABLE_NAME, startRow);
+          metaLocation = locateRegion(META_TABLE_NAME, startRow);
 
           // connect to the server hosting the .META. region
-          server = 
-            getHRegionConnection(metaLocation.getServerAddress());
+          server = getHRegionConnection(metaLocation.getServerAddress());
 
           // open a scanner over the meta region
           scannerId = server.openScanner(
@@ -289,8 +287,9 @@
           // advance the startRow to the end key of the current region
           startRow = metaLocation.getRegionInfo().getEndKey();          
         } catch (IOException e) {
-          // need retry logic?
-          throw e;
+          // Retry once.
+          metaLocation = relocateRegion(META_TABLE_NAME, startRow);
+          continue;
         }
         finally {
           if (scannerId != -1L) {

Modified: 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
 (original)
+++ 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
 Mon Jan 21 08:21:46 2008
@@ -3182,6 +3182,13 @@
     }
   }
 
+  /**
+   * @return Return configuration being used by this server.
+   */
+  public HBaseConfiguration getConfiguration() {
+    return this.conf;
+  }
+
   /*
    * Main program
    */

Modified: 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
 (original)
+++ 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
 Mon Jan 21 08:21:46 2008
@@ -44,6 +44,7 @@
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -1460,15 +1461,15 @@
       this.log.append(regionInfo.getRegionName(),
           regionInfo.getTableDesc().getName(), updatesByColumn);
 
-      long memcacheSize = 0;
+      long size = 0;
       for (Map.Entry<HStoreKey, byte[]> e: updatesByColumn.entrySet()) {
         HStoreKey key = e.getKey();
         byte[] val = e.getValue();
-        memcacheSize = this.memcacheSize.addAndGet(key.getSize() +
+        size = this.memcacheSize.addAndGet(key.getSize() +
             (val == null ? 0 : val.length));
         stores.get(HStoreKey.extractFamily(key.getColumn())).add(key, val);
       }
-      if (this.flushListener != null && memcacheSize > this.memcacheFlushSize) 
{
+      if (this.flushListener != null && size > this.memcacheFlushSize) {
         // Request a cache flush
         this.flushListener.flushRequested(this);
       }
@@ -1626,7 +1627,7 @@
           // one shared across many rows. See HADOOP-2467.
           scanners[i] = stores[i].getScanner(timestamp, cols, firstRow,
             (i > 0 && filter != null)?
-              (RowFilterInterface)Writables.clone(filter, conf): filter);
+              (RowFilterInterface)WritableUtils.clone(filter, conf): filter);
         }
       } catch(IOException e) {
         for (int i = 0; i < this.scanners.length; i++) {
@@ -1758,14 +1759,14 @@
         }
       } finally {
         synchronized (activeScannerCount) {
-          int scanners = activeScannerCount.decrementAndGet();
-          if (scanners < 0) {
-            LOG.error("active scanner count less than zero: " + scanners +
+          int count = activeScannerCount.decrementAndGet();
+          if (count < 0) {
+            LOG.error("active scanner count less than zero: " + count +
                 " resetting to zero");
             activeScannerCount.set(0);
-            scanners = 0;
+            count = 0;
           }
-          if (scanners == 0) {
+          if (count == 0) {
             activeScannerCount.notifyAll();
           }
         }

Modified: 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java
 (original)
+++ 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java
 Mon Jan 21 08:21:46 2008
@@ -20,17 +20,15 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
-import java.util.List;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.Random;
 import java.util.SortedMap;
 import java.util.TreeMap;
 import java.util.Map.Entry;
 import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.atomic.AtomicReferenceArray;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -38,9 +36,9 @@
 import org.apache.hadoop.hbase.filter.StopRowFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
 import org.apache.hadoop.hbase.io.BatchUpdate;
+import org.apache.hadoop.hbase.io.HbaseMapWritable;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Writables;
-import org.apache.hadoop.hbase.io.HbaseMapWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.ipc.RemoteException;
@@ -105,13 +103,16 @@
   }
 
   /**
-   * Find region location hosting passed row using cached info
+   * Find region location hosting passed row
    * @param row Row to find.
+   * @param reload If true do not use cache, otherwise bypass.
    * @return Location of row.
    */
   HRegionLocation getRegionLocation(Text row, boolean reload) throws 
IOException {
     checkClosed();
-    return this.connection.relocateRegion(this.tableName, row);
+    return reload?
+      this.connection.relocateRegion(this.tableName, row):
+      this.connection.locateRegion(tableName, row);
   }
 
 
@@ -454,7 +455,7 @@
         if (LOG.isDebugEnabled()) {
           LOG.debug("reloading table servers because: " + e.getMessage());
         }
-        r = getRegionLocation(row, true);        
+        r = getRegionLocation(row, true);
       }
       try {
         Thread.sleep(this.pause);
@@ -832,7 +833,6 @@
         if (LOG.isDebugEnabled()) {
           LOG.debug("reloading table servers because: " + e.getMessage());
         }
-/*        tableServers = connection.reloadTableServers(tableName);*/
         r = getRegionLocation(row, true);
       }
       try {

Modified: 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/master/master_jsp.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/master/master_jsp.java?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/master/master_jsp.java
 (original)
+++ 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/master/master_jsp.java
 Mon Jan 21 08:21:46 2008
@@ -57,7 +57,7 @@
 
 
   HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
-  HBaseConfiguration conf = new HBaseConfiguration();
+  HBaseConfiguration conf = master.getConfiguration();
   TableFormatter formatter = new HtmlTableFormatter(out);
   ShowCommand show = new ShowCommand(out, formatter, "tables");
   HServerAddress rootLocation = master.getRootRegionLocation();

Modified: 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java
 (original)
+++ 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java
 Mon Jan 21 08:21:46 2008
@@ -5,6 +5,7 @@
 import javax.servlet.jsp.*;
 import java.util.*;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.hbase.HRegionServer;
 import org.apache.hadoop.hbase.HRegion;
 import org.apache.hadoop.hbase.HConstants;
@@ -55,7 +56,18 @@
       out.print( serverInfo.getServerAddress().toString() );
       out.write("</title>\n<link rel=\"stylesheet\" type=\"text/css\" 
href=\"/static/hbase.css\" />\n</head>\n\n<body>\n<a id=\"logo\" 
href=\"http://wiki.apache.org/lucene-hadoop/Hbase\";><img 
src=\"/static/hbase_logo_med.gif\" alt=\"Hbase Logo\" title=\"Hbase Logo\" 
/></a>\n<h1 id=\"page_title\">Region Server: ");
       out.print( serverInfo.getServerAddress().toString() );
-      out.write("</h1>\n<p id=\"links_menu\"><a href=\"/logs/\">Local 
logs</a>, <a href=\"/stacks\">Thread Dump</a>, <a href=\"/logLevel\">Log 
Level</a></p>\n<hr id=\"head_rule\" />\n\n<h2>Region Server 
Attributes</h2>\n<table>\n<tr><th>Attribute 
Name</th><th>Value</th><th>Description</th></tr>\n<tr><td>Load</td><td>");
+      out.write("</h1>\n<p id=\"links_menu\"><a href=\"/logs/\">Local 
logs</a>, <a href=\"/stacks\">Thread Dump</a>, <a href=\"/logLevel\">Log 
Level</a></p>\n<hr id=\"head_rule\" />\n\n<h2>Region Server 
Attributes</h2>\n<table>\n<tr><th>Attribute 
Name</th><th>Value</th><th>Description</th></tr>\n<tr><td>Version</td><td>");
+      out.print( VersionInfo.getVersion() );
+      out.write(',');
+      out.write(' ');
+      out.write('r');
+      out.print( VersionInfo.getRevision() );
+      out.write("</td><td>Hbase version and svn 
revision</td></tr>\n<tr><td>Compiled</td><td>");
+      out.print( VersionInfo.getDate() );
+      out.write(',');
+      out.write(' ');
+      out.print( VersionInfo.getUser() );
+      out.write("</td><td>When this version was compiled and by 
whom</td></tr>\n<tr><td>Load</td><td>");
       out.print( serverInfo.getLoad().toString() );
       out.write("</td><td>Requests/<em>hbase.regionserver.msginterval</em> + 
count of loaded regions</td></tr>\n</table>\n\n<h2>Online Regions</h2>\n");
  if (onlineRegions != null && onlineRegions.size() > 0) { 

Modified: 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/util/Writables.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/util/Writables.java?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/util/Writables.java
 (original)
+++ 
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/util/Writables.java
 Mon Jan 21 08:21:46 2008
@@ -24,15 +24,11 @@
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.util.ReflectionUtils;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 
 /**
  * Utility class with methods for manipulating Writable objects
@@ -91,24 +87,6 @@
       return w;
     } finally {
       in.close();
-    }
-  }
-
-  /**
-   * Make a copy of a writable object using serialization to a buffer.
-   * Copied from WritableUtils only <code>conf</code> type is Configurable
-   * rather than JobConf (Doesn't need to be JobConf -- HADOOP-2469).
-   * @param orig The object to copy
-   * @return The copied object
-   */
-  public static Writable clone(Writable orig, Configuration conf) {
-    try {
-      Writable newInst =
-        (Writable)ReflectionUtils.newInstance(orig.getClass(), conf);
-      WritableUtils.cloneInto(newInst, orig);
-      return newInst;
-    } catch (IOException e) {
-      throw new RuntimeException("Error writing/reading clone buffer", e);
     }
   }
 

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/hql.jsp
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/hql.jsp?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/hql.jsp (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/hql.jsp Mon Jan 21 
08:21:46 2008
@@ -3,7 +3,7 @@
   import="org.apache.hadoop.hbase.HBaseConfiguration"
   import="org.apache.hadoop.hbase.hql.TableFormatter"
   import="org.apache.hadoop.hbase.hql.ReturnMsg"
-  import="org.apache.hadoop.hbase.hql.generated.Parser"
+  import="org.apache.hadoop.hbase.hql.generated.HQLParser"
   import="org.apache.hadoop.hbase.hql.Command"
   import="org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter" 
 %><?xml version="1.0" encoding="UTF-8" ?>

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/master.jsp
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/master.jsp?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/master.jsp 
(original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/webapps/master/master.jsp Mon Jan 
21 08:21:46 2008
@@ -16,7 +16,7 @@
   import="org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter"
   import="org.apache.hadoop.hbase.HTableDescriptor" %><%
   HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
-  HBaseConfiguration conf = new HBaseConfiguration();
+  HBaseConfiguration conf = master.getConfiguration();
   TableFormatter formatter = new HtmlTableFormatter(out);
   ShowCommand show = new ShowCommand(out, formatter, "tables");
   HServerAddress rootLocation = master.getRootRegionLocation();

Modified: 
lucene/hadoop/trunk/src/contrib/hbase/src/webapps/regionserver/regionserver.jsp
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/webapps/regionserver/regionserver.jsp?rev=613923&r1=613922&r2=613923&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/contrib/hbase/src/webapps/regionserver/regionserver.jsp 
(original)
+++ 
lucene/hadoop/trunk/src/contrib/hbase/src/webapps/regionserver/regionserver.jsp 
Mon Jan 21 08:21:46 2008
@@ -1,6 +1,7 @@
 <%@ page contentType="text/html;charset=UTF-8"
   import="java.util.*"
   import="org.apache.hadoop.io.Text"
+  import="org.apache.hadoop.util.VersionInfo"
   import="org.apache.hadoop.hbase.HRegionServer"
   import="org.apache.hadoop.hbase.HRegion"
   import="org.apache.hadoop.hbase.HConstants"
@@ -27,6 +28,8 @@
 <h2>Region Server Attributes</h2>
 <table>
 <tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>
+<tr><td>Version</td><td><%= VersionInfo.getVersion() %>, r<%= 
VersionInfo.getRevision() %></td><td>Hbase version and svn revision</td></tr>
+<tr><td>Compiled</td><td><%= VersionInfo.getDate() %>, <%= 
VersionInfo.getUser() %></td><td>When this version was compiled and by 
whom</td></tr>
 <tr><td>Load</td><td><%= serverInfo.getLoad().toString() 
%></td><td>Requests/<em>hbase.regionserver.msginterval</em> + count of loaded 
regions</td></tr>
 </table>
 


Reply via email to