Author: stack Date: Tue Dec 4 23:27:59 2007 New Revision: 601232 URL: http://svn.apache.org/viewvc?rev=601232&view=rev Log: HADOOP-2347 REST servlet not thread safe but run in a threaded manner
Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/GenericHandler.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/MetaHandler.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/TableHandler.java Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=601232&r1=601231&r2=601232&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original) +++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Tue Dec 4 23:27:59 2007 @@ -56,6 +56,8 @@ (Bryan Duxbury via Stack) HADOOP-2332 Meta table data selection in Hbase Shell (Edward Yoon via Stack) + HADOOP-2347 REST servlet not thread safe but run in a threaded manner + (Bryan Duxbury via Stack) IMPROVEMENTS HADOOP-2401 Add convenience put method that takes writable Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java?rev=601232&r1=601231&r2=601232&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java Tue Dec 4 23:27:59 2007 @@ -61,9 +61,6 @@ */ public class Dispatcher extends javax.servlet.http.HttpServlet implements javax.servlet.Servlet { - - private static final long serialVersionUID = 1045003206345359301L; - private MetaHandler metaHandler; private TableHandler tableHandler; private ScannerHandler scannerHandler; Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/GenericHandler.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/GenericHandler.java?rev=601232&r1=601231&r2=601232&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/GenericHandler.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/GenericHandler.java Tue Dec 4 23:27:59 2007 @@ -44,12 +44,8 @@ * REST handler types take advantage of. */ public abstract class GenericHandler { - - protected static final long serialVersionUID = 6939910503474376143L; - protected HBaseConfiguration conf; protected HBaseAdmin admin; - protected HTable table = null; protected static final String ACCEPT = "accept"; protected static final String COLUMN = "column"; @@ -255,16 +251,10 @@ } } - protected void focusTable(final String tableName) throws IOException { - // Do we have an HTable instance to suit? TODO, keep a pool of - // instances of HTable. For now, allocate a new one each time table - // focus changes. - if (this.table == null || - !this.table.getTableName().toString().equals(tableName)) { - if (this.table != null) { - this.table.close(); - } - this.table = new HTable(this.conf, new Text(tableName)); - } + /* + * Get an HTable instance by it's table name. + */ + protected HTable getTable(final String tableName) throws IOException { + return new HTable(this.conf, new Text(tableName)); } } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/MetaHandler.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/MetaHandler.java?rev=601232&r1=601231&r2=601232&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/MetaHandler.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/MetaHandler.java Tue Dec 4 23:27:59 2007 @@ -44,7 +44,7 @@ } - public void doGet(HttpServletRequest request, HttpServletResponse response, + public void doGet(HttpServletRequest request, HttpServletResponse response, String[] pathSegments) throws ServletException, IOException { getTables(request, response); @@ -77,6 +77,7 @@ final HttpServletResponse response) throws IOException { HTableDescriptor [] tables = this.admin.listTables(); + switch (ContentType.getContentType(request.getHeader(ACCEPT))) { case XML: setResponseHeader(response, tables.length > 0? 200: 204, Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java?rev=601232&r1=601231&r2=601232&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java Tue Dec 4 23:27:59 2007 @@ -31,6 +31,7 @@ import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.hbase.HBaseAdmin; +import org.apache.hadoop.hbase.HTable; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HScannerInterface; @@ -248,8 +249,8 @@ private void openScanner(final HttpServletRequest request, final HttpServletResponse response, final String [] pathSegments) throws IOException, ServletException { - // focus on the table - focusTable(getTableName(pathSegments)); + // get the table + HTable table = getTable(getTableName(pathSegments)); // get the list of columns we're supposed to interact with String[] raw_columns = request.getParameterValues(COLUMN); @@ -284,8 +285,8 @@ HConstants.UTF8_ENCODING)); HScannerInterface scanner = (request.getParameter(END_ROW) == null)? - this.table.obtainScanner(columns, startRow): - this.table.obtainScanner(columns, startRow, endRow); + table.obtainScanner(columns, startRow): + table.obtainScanner(columns, startRow, endRow); // Make a scanner id by hashing the object toString value (object name + // an id). Will make identifier less burdensome and more url friendly. Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/TableHandler.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/TableHandler.java?rev=601232&r1=601231&r2=601232&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/TableHandler.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/rest/TableHandler.java Tue Dec 4 23:27:59 2007 @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.HTable; import org.apache.hadoop.io.Text; import org.mortbay.servlet.MultiPartResponse; import org.w3c.dom.Document; @@ -67,14 +68,14 @@ getTableMetadata(request, response, pathSegments[0]); } else{ - focusTable(pathSegments[0]); + HTable table = getTable(pathSegments[0]); if (pathSegments[1].toLowerCase().equals(REGIONS)) { // get a region list - getTableRegions(request, response); + getTableRegions(table, request, response); } else if (pathSegments[1].toLowerCase().equals(ROW)) { // get a row - getRow(request, response, pathSegments); + getRow(table, request, response, pathSegments); } else{ doNotFound(response, "Not handled in TableHandler"); @@ -108,7 +109,7 @@ * @throws IOException * Retrieve a row in one of several output formats. */ - private void getRow(final HttpServletRequest request, + private void getRow(HTable table, final HttpServletRequest request, final HttpServletResponse response, final String [] pathSegments) throws IOException { // pull the row key out of the path @@ -132,8 +133,8 @@ // Presumption is that this.table has already been focused on target table. Map<Text, byte[]> result = timestampStr == null ? - this.table.getRow(new Text(row)) - : this.table.getRow(new Text(row), Long.parseLong(timestampStr)); + table.getRow(new Text(row)) + : table.getRow(new Text(row), Long.parseLong(timestampStr)); if (result == null || result.size() == 0) { doNotFound(response, "Row not found!"); @@ -151,7 +152,7 @@ } } } else { - Map<Text, byte[]> prefiltered_result = this.table.getRow(new Text(row)); + Map<Text, byte[]> prefiltered_result = table.getRow(new Text(row)); if (prefiltered_result == null || prefiltered_result.size() == 0) { doNotFound(response, "Row not found!"); @@ -243,13 +244,14 @@ private void putRow(final HttpServletRequest request, final HttpServletResponse response, final String [] pathSegments) throws IOException, ServletException { - focusTable(pathSegments[0]); + HTable table = getTable(pathSegments[0]); + switch(ContentType.getContentType(request.getHeader(CONTENT_TYPE))) { case XML: - putRowXml(request, response, pathSegments); + putRowXml(table, request, response, pathSegments); break; case MIME: - doNotAcceptable(response); + doNotAcceptable(response, "Don't support multipart/related yet..."); break; default: doNotAcceptable(response, "Unsupported Accept Header Content: " + @@ -263,7 +265,7 @@ * @param pathSegments * Decode supplied XML and do a put to Hbase. */ - private void putRowXml(final HttpServletRequest request, + private void putRowXml(HTable table, final HttpServletRequest request, final HttpServletResponse response, final String [] pathSegments) throws IOException, ServletException{ @@ -291,7 +293,7 @@ try{ // start an update Text key = new Text(pathSegments[2]); - lock_id = this.table.startUpdate(key); + lock_id = table.startUpdate(key); // set the columns from the xml NodeList columns = doc.getElementsByTagName("column"); @@ -310,15 +312,15 @@ byte[] value = org.apache.hadoop.hbase.util.Base64.decode(value_node.getFirstChild().getNodeValue()); // put the value - this.table.put(lock_id, name, value); + table.put(lock_id, name, value); } // commit the update if (timestamp != null) { - this.table.commit(lock_id, Long.parseLong(timestamp)); + table.commit(lock_id, Long.parseLong(timestamp)); } else{ - this.table.commit(lock_id); + table.commit(lock_id); } // respond with a 200 @@ -326,7 +328,7 @@ } catch(Exception e){ if (lock_id != -1) { - this.table.abort(lock_id); + table.abort(lock_id); } throw new ServletException(e); } @@ -337,11 +339,11 @@ * @param request * @param response */ - private void getTableRegions(final HttpServletRequest request, - final HttpServletResponse response) + private void getTableRegions(HTable table, final HttpServletRequest request, + final HttpServletResponse response) throws IOException { // Presumption is that this.table has already been focused on target table. - Text [] startKeys = this.table.getStartKeys(); + Text [] startKeys = table.getStartKeys(); // Presumption is that this.table has already been set against target table switch (ContentType.getContentType(request.getHeader(ACCEPT))) { case XML: @@ -445,7 +447,7 @@ final HttpServletResponse response, final String [] pathSegments) throws IOException, ServletException { // grab the table we're operating on - focusTable(getTableName(pathSegments)); + HTable table = getTable(getTableName(pathSegments)); Text key = new Text(pathSegments[2]); @@ -465,7 +467,7 @@ } else{ // delete each column in turn for(int i = 0; i < columns.length; i++){ - this.table.deleteAll(key, new Text(columns[i])); + table.deleteAll(key, new Text(columns[i])); } } response.setStatus(202);