Author: stack
Date: Tue Jun 17 16:58:05 2008
New Revision: 668880

URL: http://svn.apache.org/viewvc?rev=668880&view=rev
Log:
HBASE-487 Replace hql w/ a hbase-friendly jirb or jython shell

Modified:
    hadoop/hbase/trunk/bin/Formatter.rb
    hadoop/hbase/trunk/bin/HBase.rb
    hadoop/hbase/trunk/bin/hirb.rb
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java
    
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java
    
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java
    
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java

Modified: hadoop/hbase/trunk/bin/Formatter.rb
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/bin/Formatter.rb?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/bin/Formatter.rb (original)
+++ hadoop/hbase/trunk/bin/Formatter.rb Tue Jun 17 16:58:05 2008
@@ -1,7 +1,8 @@
 # Results formatter
 module Formatter
+  # Base abstract class for results formatting.
   class Formatter
-    # Base abstract class for results formatting.
+    # Takes an output stream and a print width.
     def initialize(o, w = 80)
       raise TypeError.new("Type %s of parameter %s is not IO" % [o.class, o]) \
         unless o.instance_of? IO
@@ -9,17 +10,26 @@
       @maxWidth = w
       @rowCount = 0
     end
-    
+
+    attr_reader :rowCount
+
     def header(args = [])
-      row(args) if args.length > 0
+      row(args, false) if args.length > 0
       @rowCount = 0
     end
     
-    def row(args = [])
+    # Output a row.
+    # Inset is whether or not to offset row by a space.
+    def row(args = [], inset = true)
       if not args or args.length == 0
         # Print out nothing
         return
       end
+      if args.class == String
+        output(@maxWidth, args)
+        puts
+        return
+      end
       # TODO: Look at the type.  Is it RowResult?
       if args.length == 1
         splits = split(@maxWidth, dump(args[0]))
@@ -35,8 +45,15 @@
         biggest = (splits2.length > splits1.length)? splits2.length: 
splits1.length
         index = 0
         while index < biggest
-          @out.print(" ")
+          if inset
+            # Inset by one space if inset is set.
+            @out.print(" ")
+          end
           output(col1width, splits1[index])
+          if not inset
+            # Add extra space so second column lines up w/ second column output
+            @out.print(" ")
+          end
           @out.print(" ")
           output(col2width, splits2[index])
           index += 1
@@ -68,6 +85,9 @@
 
     def dump(str)
       # Remove double-quotes added by 'dump'.
+      if str.instance_of? Fixnum
+          return
+      end
       return str.dump.slice(1, str.length)
     end
 
@@ -82,7 +102,7 @@
         return
       end
       # Only output elapsed time and row count if startTime passed
-      @out.puts("%d row(s) in %s seconds" % [EMAIL PROTECTED], Time.now - 
startTime])
+      @out.puts("%d row(s) in %.4f seconds" % [EMAIL PROTECTED], Time.now - 
startTime])
     end
   end
      

Modified: hadoop/hbase/trunk/bin/HBase.rb
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/bin/HBase.rb?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/bin/HBase.rb (original)
+++ hadoop/hbase/trunk/bin/HBase.rb Tue Jun 17 16:58:05 2008
@@ -1,5 +1,33 @@
-# HBase ruby classes
+# HBase ruby classes.
+# Has wrapper classes for org.apache.hadoop.hbase.client.HBaseAdmin
+# and for org.apache.hadoop.hbase.client.HTable.  Classes take
+# Formatters on construction and outputs any results using
+# Formatter methods.  These classes are only really for use by
+# the hirb.rb HBase Shell script; they don't make much sense elsewhere.
+# For example, the exists method on Admin class prints to the formatter
+# whether the table exists and returns nil regardless.
+include Java
+import org.apache.hadoop.hbase.client.HBaseAdmin
+import org.apache.hadoop.hbase.client.HTable
+import org.apache.hadoop.hbase.HConstants
+import org.apache.hadoop.hbase.io.BatchUpdate
+import org.apache.hadoop.hbase.io.RowResult
+import org.apache.hadoop.hbase.io.Cell
+import org.apache.hadoop.hbase.HBaseConfiguration
+import org.apache.hadoop.hbase.HColumnDescriptor
+import org.apache.hadoop.hbase.HTableDescriptor
+
 module HBase
+  COLUMN = "COLUMN"
+  COLUMNS = "COLUMNS"
+  TIMESTAMP = "TIMESTAMP"
+  NAME = HConstants::NAME
+  VERSIONS = HConstants::VERSIONS
+  STOPROW = "STOPROW"
+  STARTROW = "STARTROW"
+  LIMIT = "LIMIT"
+
+  # Wrapper for org.apache.hadoop.hbase.client.HBaseAdmin
   class Admin
     def initialize(configuration, formatter)
       @admin = HBaseAdmin.new(configuration)
@@ -26,7 +54,7 @@
         end
       end
       if not found
-        raise new ArgumentError.new("Failed to find table named " + tableName)
+        raise ArgumentError.new("Failed to find table named " + tableName)
       end
       @formatter.footer(now)
     end
@@ -34,7 +62,8 @@
     def exists(tableName)
       now = Time.now 
       @formatter.header()
-      @formatter.row([EMAIL PROTECTED](tableName).to_s])
+      e = @admin.tableExists(tableName)
+      @formatter.row([e.to_s])
       @formatter.footer(now)
     end
 
@@ -61,6 +90,7 @@
       @formatter.footer(now)
     end
 
+    # Pass tablename and an array of Hashes
     def create(tableName, args)
       now = Time.now 
       # Pass table name and an array of Hashes.  Later, test the last
@@ -71,9 +101,13 @@
       # hash specifications. TODO: Add table options handling.
       htd = HTableDescriptor.new(tableName)
       for arg in args
-        raise TypeError.new(arg.class.to_s + " of " + arg.to_s + " is not of 
Hash type") \
-          unless arg.instance_of? Hash
-        htd.addFamily(hcd(arg))
+        if arg.instance_of? String
+          htd.addFamily(HColumnDescriptor.new(makeColumnName(arg)))
+        else
+          raise TypeError.new(arg.class.to_s + " of " + arg.to_s + " is not of 
Hash type") \
+            unless arg.instance_of? Hash
+          htd.addFamily(hcd(arg))
+        end
       end
       @admin.createTable(htd)
       @formatter.header()
@@ -90,6 +124,18 @@
       @formatter.footer(now)
     end
 
+    # Make a legal column  name of the passed String
+    # Check string ends in colon. If not, add it.
+    def makeColumnName(arg)
+      index = arg.index(':')
+      if not index
+        # Add a colon.  If already a colon, its in the right place,
+        # or an exception will come up out of the addFamily
+        arg << ':'
+      end
+      arg
+    end
+
     def hcd(arg)
       # Return a new HColumnDescriptor made of passed args
       # TODO: This is brittle code.
@@ -102,27 +148,204 @@
       name = arg[NAME]
       raise ArgumentError.new("Column family " + arg + " must have a name") \
         unless name
-      # Check the family name for colon.  Add it if missing.
-      index = name.index(':')
-      if not index
-        # Add a colon.  If already a colon, its in the right place,
-        # or an exception will come up out of the addFamily
-        name << ':'
-      end
+      name = makeColumnName(name)
       # TODO: What encoding are Strings in jruby?
       return HColumnDescriptor.new(name.to_java_bytes,
         # JRuby uses longs for ints. Need to convert.  Also constants are 
String 
-        arg[MAX_VERSIONS]? arg[MAX_VERSIONS]: 
HColumnDescriptor::DEFAULT_MAX_VERSIONS,
-        arg[COMPRESSION]? 
HColumnDescriptor::CompressionType::valueOf(arg[COMPRESSION]):
+        arg[VERSIONS]? arg[VERSIONS]: HColumnDescriptor::DEFAULT_VERSIONS,
+        arg[HColumnDescriptor::COMPRESSION]? 
HColumnDescriptor::CompressionType::valueOf(arg[HColumnDescriptor::COMPRESSION]):
           HColumnDescriptor::DEFAULT_COMPRESSION,
-        arg[IN_MEMORY]? arg[IN_MEMORY]: HColumnDescriptor::DEFAULT_IN_MEMORY,
-        arg[BLOCKCACHE]? arg[BLOCKCACHE]: 
HColumnDescriptor::DEFAULT_BLOCKCACHE,
-        arg[MAX_LENGTH]? arg[MAX_LENGTH]: 
HColumnDescriptor::DEFAULT_MAX_LENGTH,
-        arg[TTL]? arg[TTL]: HColumnDescriptor::DEFAULT_TTL,
-        arg[BLOOMFILTER]? arg[BLOOMFILTER]: 
HColumnDescriptor::DEFAULT_BLOOMFILTER)
+        arg[HColumnDescriptor::IN_MEMORY]? arg[HColumnDescriptor::IN_MEMORY]: 
HColumnDescriptor::DEFAULT_IN_MEMORY,
+        arg[HColumnDescriptor::BLOCKCACHE]? 
arg[HColumnDescriptor::BLOCKCACHE]: HColumnDescriptor::DEFAULT_BLOCKCACHE,
+        arg[HColumnDescriptor::LENGTH]? arg[HColumnDescriptor::LENGTH]: 
HColumnDescriptor::DEFAULT_LENGTH,
+        arg[HColumnDescriptor::TTL]? arg[HColumnDescriptor::TTL]: 
HColumnDescriptor::DEFAULT_TTL,
+        arg[HColumnDescriptor::BLOOMFILTER]? 
arg[HColumnDescriptor::BLOOMFILTER]: HColumnDescriptor::DEFAULT_BLOOMFILTER)
     end
   end
 
+  # Wrapper for org.apache.hadoop.hbase.client.HTable
   class Table
+    def initialize(configuration, tableName, formatter)
+      @table = HTable.new(configuration, tableName)
+      @formatter = formatter
+    end
+
+    # Delete a cell
+    def delete(row, args)
+      now = Time.now 
+      bu = nil
+      if timestamp
+        bu = BatchUpdate.new(row, timestamp)
+      else
+        bu = BatchUpdate.new(row)
+      end
+      bu.delete(column)
+      @table.commit(bu)
+      @formatter.header()
+      @formatter.footer(now)
+    end
+
+    def deleteall(row, column = nil, timestamp = HConstants::LATEST_TIMESTAMP)
+      now = Time.now 
+      @table.deleteAll(row, column, timestamp)
+      @formatter.header()
+      @formatter.footer(now)
+    end
+
+    def deletefc(row, column_family, timestamp = HConstants::LATEST_TIMESTAMP)
+      now = Time.now 
+      @table.deleteFamily(row, column_family)
+      @formatter.header()
+      @formatter.footer(now)
+    end
+
+    def scan(columns, args = {})
+      now = Time.now 
+      if not columns or columns.length < 1
+        raise ArgumentError.new("Must supply an array of columns to scan")
+      end
+      cs = columns.to_java(java.lang.String)
+      limit = -1
+      if args == nil or args.length <= 0
+        s = @table.getScanner(cs)
+      else
+        limit = args["LIMIT"] || -1 
+        filter = args["FILTER"] || nil
+        startrow = args["STARTROW"] || ""
+        stoprow = args["STOPROW"] || nil
+        timestamp = args["TIMESTAMP"] || HConstants::LATEST_TIMESTAMP
+        if stoprow
+          s = @table.getScanner(cs, startrow, stoprow, timestamp)
+        else
+          s = @table.getScanner(cs, startrow, timestamp, filter) 
+        end
+      end 
+      count = 0
+      @formatter.header(["Row", "Column+Cell"])
+      i = s.iterator()
+      while i.hasNext()
+        r = i.next()
+        row = String.from_java_bytes r.getRow()
+        for k, v in r
+          column = String.from_java_bytes k
+          cell = v.toString()
+          @formatter.row([row, "column=%s, %s" % [column, v.toString()]])
+        end
+        count += 1
+        if limit != -1 and count >= limit
+          break
+        end
+      end
+      @formatter.footer(now)
+    end
+
+    def put(row, column, value, timestamp = nil)
+      now = Time.now 
+      bu = nil
+      if timestamp
+        bu = BatchUpdate.new(row)
+      else
+        bu = BatchUpdate.new(row)
+      end
+      bu.put(column, value.to_java_bytes)
+      @table.commit(bu)
+      @formatter.header()
+      @formatter.footer(now)
+    end
+  
+    # Get from table
+    def get(row, args = {})
+      now = Time.now 
+      result = nil
+      if args == nil or args.length == 0
+        result = @table.getRow(row.to_java_bytes)
+      else
+        # Its a hash.
+        columns = args[COLUMN] 
+        if columns == nil
+          # Maybe they used the COLUMNS key
+          columns = args[COLUMNS]
+        end
+        if columns == nil
+          # May have passed TIMESTAMP and row only; wants all columns from ts.
+          ts = args[TIMESTAMP] 
+          if not ts
+            raise ArgumentError.new("Failed parse of " + args + ", " + 
args.class)
+          end
+          result = @table.getRow(row.to_java_bytes, ts)
+        else
+          # Columns are non-nil
+          if columns.class == String
+            # Single column
+            result = @table.get(row, columns,
+              args[TIMESTAMP]? args[TIMESTAMP]: HConstants::LATEST_TIMESTAMP,
+              args[VERSIONS]? args[VERSIONS]: 1)
+          elsif columns.class == Array
+            result = @table.getRow(row, columns.to_java(:string),
+              args[TIMESTAMP]? args[TIMESTAMP]: HConstants::LATEST_TIMESTAMP)
+          else
+            raise ArgumentError.new("Failed parse column argument type " +
+              args + ", " + args.class)
+          end
+        end
+      end
+      # Print out results.  Result can be Cell or RowResult.
+      h = nil
+      if result.instance_of? RowResult
+        h = String.from_java_bytes result.getRow()
+        @formatter.header(["Column", "Cell"])
+        if result
+          for column, cell in result
+            v = String.from_java_bytes cell.getValue()
+            ts = cell.getTimestamp()
+            @formatter.row([(String.from_java_bytes column), cell.toString()])
+          end
+        end
+      else
+        # Presume Cells
+        @formatter.header()
+        if result 
+          for c in result
+            @formatter.row([c.toString()])
+          end
+        end
+      end
+      @formatter.footer(now)
+    end
+  end
+
+  # Do a bit of testing.
+  # To run this test, do: ./bin/hbase org.jruby.Main bin/HBase.rb
+  if $0 == __FILE__
+    # Add this directory to LOAD_PATH; presumption is that Formatter module
+    # sits beside this one.  Then load it up.
+    $LOAD_PATH.unshift File.dirname($PROGRAM_NAME)
+    require 'Formatter'
+    # Make a console formatter
+    formatter = Formatter::Console.new(STDOUT)
+    # Now add in java and hbase classes
+    configuration = HBaseConfiguration.new()
+    admin = Admin.new(configuration, formatter)
+    # Create a table; drop old one if exists first.
+    TESTTABLE = "HBase_rb_testtable"
+    begin
+      admin.disable(TESTTABLE)
+      admin.drop(TESTTABLE)
+    rescue org.apache.hadoop.hbase.TableNotFoundException
+      # Just suppress not found exception
+    end
+    admin.create(TESTTABLE, [{NAME => 'x', VERSIONS => 5}])
+    # Presume it exists.  If it doesn't, next items will fail.
+    table = Table.new(configuration, TESTTABLE, formatter) 
+    for i in 1..10
+      table.put('x', 'x:%d' % i, 'x%d' % i)
+    end
+    table.get('x', {COLUMN => 'x:1'})
+    if formatter.rowCount() != 1
+      raise IOError.new("Failed first put")
+    end
+    table.scan(['x:'])
+    admin.disable(TESTTABLE)
+    admin.drop(TESTTABLE)
   end
 end

Modified: hadoop/hbase/trunk/bin/hirb.rb
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/bin/hirb.rb?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/bin/hirb.rb (original)
+++ hadoop/hbase/trunk/bin/hirb.rb Tue Jun 17 16:58:05 2008
@@ -8,17 +8,11 @@
 # has to time out.
 # TODO: Add support for listing and manipulating catalog tables, etc.
 # TODO: Fix 'irb: warn: can't alias help from irb_help.' in banner message
+# TODO: Encoding; need to know how to go from ruby String to UTF-8 bytes
 
 # Run the java magic include and import basic HBase types that will help ease
 # hbase hacking.
 include Java
-import org.apache.hadoop.hbase.HBaseConfiguration
-import org.apache.hadoop.hbase.client.HTable
-import org.apache.hadoop.hbase.client.HBaseAdmin
-import org.apache.hadoop.hbase.HColumnDescriptor
-import org.apache.hadoop.hbase.HConstants
-import org.apache.hadoop.hbase.HTableDescriptor
-import org.apache.hadoop.hbase.io.BatchUpdate
 
 # Some goodies for hirb. Should these be left up to the user's discretion?
 require 'irb/completion'
@@ -63,8 +57,12 @@
 end
 
 # Setup the HBase module.  Create a configuration.  If a master, set it.
[EMAIL PROTECTED] = HBaseConfiguration.new()
+# Turn off retries in hbase and ipc.  Human doesn't want to wait on N retries.
[EMAIL PROTECTED] = org.apache.hadoop.hbase.HBaseConfiguration.new()
 @configuration.set("hbase.master", master) if master
[EMAIL PROTECTED]("hbase.client.retries.number", 3)
[EMAIL PROTECTED]("ipc.client.connect.max.retries", 3)
+
 # Do lazy create of admin because if we are pointed at bad master, it will hang
 # shell on startup trying to connect.
 @admin = nil
@@ -81,8 +79,9 @@
     end
   end
 end
-promoteConstants(HColumnDescriptor.constants)
-promoteConstants(HTableDescriptor.constants)
+promoteConstants(org.apache.hadoop.hbase.HColumnDescriptor.constants)
+promoteConstants(org.apache.hadoop.hbase.HTableDescriptor.constants)
+promoteConstants(HBase.constants)
 
 # Start of the hbase shell commands.
 
@@ -96,48 +95,86 @@
   # TODO: Add help to the commands themselves rather than keep it distinct
   h  = <<HERE
 HBASE SHELL COMMANDS:
- alter     Alter column family schema in a table.  Pass table name and a
-           dictionary specifying the new column family schema. Dictionaries
-           are described below in the GENERAL NOTES section.  Dictionary must
-           include name of column family to alter.  For example, to change
-           the 'f1' column family in table 't1' to have a MAX_VERSIONS of 5,
-           do:
-
-           hbase> alter 't1', {NAME => 'f1', MAX_VERSIONS => 5}
-
- create    Create table; pass a table name, a dictionary of specifications per
-           column family, and optionally, named parameters of table options.
-           Dictionaries are described below in the GENERAL NOTES section. Named
-           parameters are like dictionary elements with uppercase names
-           (constants) as keys and a '=>' key/value delimiter.  Parameters are
-           comma-delimited.  For example, to create a table named 't1' with an
-           alternate maximum region size and a single family named 'f1' with an
-           alternate maximum number of cells and 'record' compression, type:
-
-           hbase> create 't1' {NAME => 'f1', MAX_VERSIONS => 5, \
-               COMPRESSION => 'RECORD'}, REGION_SIZE => 1024
-
-           For compression types, pass one of 'NONE', 'RECORD', or 'BLOCK'
-
- describe  Describe the named table. Outputs the table and family descriptors
- drop      Drop the named table.  Table must first be disabled
- disable   Disable the named table: e.g. "disable 't1'<RETURN>"
+ alter     Alter column family schema;  pass table name and a dictionary
+           specifying new column family schema. Dictionaries are described
+           below in the GENERAL NOTES section.  Dictionary must include name
+           of column family to alter.  For example, to change the 'f1' column
+           family in table 't1' from defaults to instead keep a maximum of 5
+           cell VERSIONS, do:
+
+           hbase> alter 't1', {NAME => 'f1', VERSIONS => 5}
+
+ create    Create table; pass table name, a dictionary of specifications per
+           column family, and optionally a dictionary of table configuration.
+           Dictionaries are described below in the GENERAL NOTES section.
+           For example, to create a table named 't1' with a single family named
+           'f1' with an alternate maximum number of cells, type:
+
+           hbase> create 't1' {NAME => 'f1', VERSIONS => 5}
+
+ describe  Describe the named table: e.g. "hbase> describe 't1'"
+
+ delete    Put a delete cell value at specified table/row/column and optionally
+           timestamp coordinates.  Deletes must match the deleted cell's
+           coordinates exactly.  When scanning, a delete cell suppresses older
+           versions. Takes arguments like 'put' described below
+ 
+ deleteall Delete all cells; pass a table name, row and optionally, a column
+           and timestamp
+
+ deletefc  Delete all in the named column family.  Pass table name and family
+
+ drop      Drop the named table. Table must first be disabled
+
+ disable   Disable the named table: e.g. "hbase> disable 't1'"
+
  enable    Enable the named table
- exists    Does the named table exist? e.g. "exists 't1'<RETURN>"
- exit      Exit the shell
- list      List all tables
+
+ exists    Does the named table exist? e.g. "hbase> exists 't1'"
+
+ exit      Type "hbase> exit" to leave the HBase Shell
+
+ get       Get row or cell contents; pass table name, row, and optionally
+           a dictionary of column(s), timestamp and versions.  Examples:
+
+           hbase> get 't1', 'r1'
+           hbase> get 't1', 'r1', {COLUMN => 'c1'}
+           hbase> get 't1', 'r1', {COLUMN => ['c1', 'c2', 'c3']}
+           hbase> get 't1', 'r1', {TIMESTAMP => ts1, VERSIONS => 4}
+
+ list      List all tables in hbase
+
+ put       Put a cell value at specified table/row/column and optionally
+           timestamp coordinates.  To put a cell value into table 't1' at
+           row 'r1' under column 'c1' marked with the time 'ts1', do:
+
+           hbase> put 't1', 'r1', 'c1', ts1
+
+ scan      Scan a table; pass table name and an array of column names.
+           Optionally, pass a dictionary of options that includes one or more
+           of the following: LIMIT, FILTER, STARTROW, STOPROW, and TIMESTAMP.
+           For example, to scan column 'c1', and 'c2', in table 't1' returning
+           10 rows only:
+
+           hbase> scan 't1', ['c1', 'c2'], {LIMIT => 10}
+
  version   Output this HBase version
 
 GENERAL NOTES:
 Quote all names in the hbase shell such as table and column names.  Don't
-forget commas delimiting command parameters. Dictionaries of configuration used
-in the creation and alteration of tables are ruby-style Hashes. They look like
-this: { 'key1' => 'value1', 'key2' => 'value2', ...}.  They are opened and
-closed with curley-braces.  Key/values are delimited by the '=>' character
-combination.  Usually keys are predefined constants such as NAME, MAX_VERSIONS,
-COMPRESSION, MAX_LENGTH, TTL, etc.  Constants do not need to be quoted.  Type
+forget commas delimit command parameters.  Type <RETURN> after entering a
+command to run it.  Dictionaries of configuration used in the creation and
+alteration of tables are ruby Hashes. They look like this:
+
+  {'key1' => 'value1', 'key2' => 'value2', ...}
+
+They are opened and closed with curley-braces.  Key/values are delimited by
+the '=>' character combination.  Usually keys are predefined constants such as
+NAME, VERSIONS, COMPRESSION, etc.  Constants do not need to be quoted.  Type
 'Object.constants' to see a (messy) list of all constants in the environment.
-See http://wiki.apache.org/hadoop/Hbase/Shell for more on the HBase Shell.
+
+This HBase shell is the JRuby IRB with the above HBase-specific commands added.
+For more on the HBase Shell, see http://wiki.apache.org/hadoop/Hbase/Shell
 HERE
   puts h
 end
@@ -156,16 +193,21 @@
   @admin
 end
 
-def create(table_name, *args)
-  admin().create(table_name, args)
+def table(table)
+  # Create new one each time
+  HBase::Table.new(@configuration, table, @formatter)
+end
+
+def create(table, *args)
+  admin().create(table, args)
 end
 
-def drop(table_name)
-  admin().drop(table_name)
+def drop(table)
+  admin().drop(table)
 end
 
-def alter(table_name, args)
-  admin().alter(table_name, args) 
+def alter(table, args)
+  admin().alter(table, args) 
 end
 
 # Administration
@@ -174,38 +216,48 @@
   admin().list()
 end
 
-def describe(table_name)
-  admin().describe(table_name)
+def describe(table)
+  admin().describe(table)
 end
   
-def enable(table_name)
-  admin().enable(table_name)
+def enable(table)
+  admin().enable(table)
 end
 
-def disable(table_name)
-  admin().disable(table_name)
+def disable(table)
+  admin().disable(table)
 end
 
-def exists(table_name)
-  admin().exists(table_name)
+def exists(table)
+  admin().exists(table)
 end
   
 # CRUD
   
-def get(table_name, row_key, *args)
-  puts "Not implemented yet"
+def get(table, row, args = {})
+  table(table).get(row, args)
 end
 
-def put(table_name, row_key, *args)
-  puts "Not implemented yet"
+def put(table, row, column, value, timestamp = nil)
+  table(table).put(row, column, value, timestamp)
 end
   
-def scan(table_name, start_key, end_key, *args)
-  puts "Not implemented yet"
+def scan(table, columns, args = {})
+  table(table).scan(columns, args)
 end
   
-def delete(table_name, row_key, *args)
-  puts "Not implemented yet"
+def delete(table, row, *args)
+  table(table).get(row, args)
+end
+
+def deleteall(table, row, column = nil,
+    timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP)
+  table(table).get(row, column, timestamp)
+end
+
+def deletefc(table, row, column_family,
+    timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP)
+  table(table).get(row, column_family, timestamp)
 end
 
 # Output a banner message that tells users where to go for help
@@ -226,9 +278,9 @@
     # @EXTEND_COMMANDS.each{|x| puts x if x[0] == :irb_help}
   end
 
+  # Subclass of IRB so can intercept methods
   class HIRB < Irb
-    # Subclass irb so can intercept methods
-
+    
     def output_value
       # Suppress output if last_value is 'nil'
       # Otherwise, when user types help, get ugly 'nil'

Modified: 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
(original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java 
Tue Jun 17 16:58:05 2008
@@ -57,12 +57,10 @@
   }
 
   // Defines for jruby/shell
-  public static final String NAME = "NAME";
-  public static final String MAX_VERSIONS = "MAX_VERSIONS";
   public static final String COMPRESSION = "COMPRESSION";
   public static final String IN_MEMORY = "IN_MEMORY";
   public static final String BLOCKCACHE = "BLOCKCACHE";
-  public static final String MAX_LENGTH = "MAX_LENGTH";
+  public static final String LENGTH = "LENGTH";
   public static final String TTL = "TTL";
   public static final String BLOOMFILTER = "BLOOMFILTER";
   public static final String FOREVER = "FOREVER";
@@ -76,12 +74,12 @@
   /**
    * Default number of versions of a record to keep.
    */
-  public static final int DEFAULT_MAX_VERSIONS = 3;
+  public static final int DEFAULT_VERSIONS = 3;
 
   /**
    * Default maximum cell length.
    */
-  public static final int DEFAULT_MAX_LENGTH = Integer.MAX_VALUE;
+  public static final int DEFAULT_LENGTH = Integer.MAX_VALUE;
 
   /**
    * Default setting for whether to serve from memory or not.
@@ -94,11 +92,6 @@
   public static final boolean DEFAULT_BLOCKCACHE = false;
 
   /**
-   * Default maximum length of cell contents.
-   */
-  public static final int DEFAULT_MAX_VALUE_LENGTH = Integer.MAX_VALUE;
-
-  /**
    * Default time to live of cell contents.
    */
   public static final int DEFAULT_TTL = HConstants.FOREVER;
@@ -111,7 +104,7 @@
   // Column family name
   private byte [] name;
   // Number of versions to keep
-  private int maxVersions = DEFAULT_MAX_VERSIONS;
+  private int maxVersions = DEFAULT_VERSIONS;
   // Compression setting if any
   private CompressionType compressionType = DEFAULT_COMPRESSION;
   // Serve reads from in-memory cache
@@ -119,7 +112,7 @@
   // Serve reads from in-memory block cache
   private boolean blockCacheEnabled = DEFAULT_BLOCKCACHE;
   // Maximum value size
-  private int maxValueLength = DEFAULT_MAX_LENGTH;
+  private int maxValueLength = DEFAULT_LENGTH;
   // Time to live of cell contents, in seconds from last timestamp
   private int timeToLive = DEFAULT_TTL;
   // True if bloom filter was specified
@@ -163,7 +156,7 @@
   public HColumnDescriptor(final byte [] columnName) {
     this (columnName == null || columnName.length <= 0?
       HConstants.EMPTY_BYTE_ARRAY: columnName,
-      DEFAULT_MAX_VERSIONS, DEFAULT_COMPRESSION, DEFAULT_IN_MEMORY,
+      DEFAULT_VERSIONS, DEFAULT_COMPRESSION, DEFAULT_IN_MEMORY,
       DEFAULT_BLOCKCACHE, 
       Integer.MAX_VALUE, DEFAULT_TTL,
       DEFAULT_BLOOMFILTER);
@@ -311,12 +304,12 @@
   /** [EMAIL PROTECTED] */
   @Override
   public String toString() {
-    return "{" + NAME + " => '" + Bytes.toString(name) +
-      "', " + MAX_VERSIONS + " => " + maxVersions +
+    return "{" + HConstants.NAME + " => '" + Bytes.toString(name) +
+      "', " + HConstants.VERSIONS + " => " + maxVersions +
       ", " + COMPRESSION + " => '" + this.compressionType +
       "', " + IN_MEMORY + " => " + inMemory +
       ", " + BLOCKCACHE + " => " + blockCacheEnabled +
-      ", " + MAX_LENGTH + " => " + maxValueLength +
+      ", " + LENGTH + " => " + maxValueLength +
       ", " + TTL + " => " +
           (timeToLive == HConstants.FOREVER ? "FOREVER" : 
               Integer.toString(timeToLive)) +

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java 
(original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java Tue Jun 
17 16:58:05 2008
@@ -219,5 +219,7 @@
   public static final String HBASE_CLIENT_RETRIES_NUMBER_KEY =
     "hbase.client.retries.number";
   public static final int DEFAULT_CLIENT_RETRIES = 5;
-  
+
+  public static final String NAME = "NAME";
+  public static final String VERSIONS = "VERSIONS";
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java 
(original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java Tue 
Jun 17 16:58:05 2008
@@ -339,7 +339,7 @@
    */
   @Override
   public String toString() {
-    return "REGION => {" + HColumnDescriptor.NAME + " => '" +
+    return "REGION => {" + HConstants.NAME + " => '" +
       this.regionNameStr +
       "', STARTKEY => '" +
       Bytes.toString(this.startKey) + "', ENDKEY => '" +

Modified: 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
 (original)
+++ 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
 Tue Jun 17 16:58:05 2008
@@ -410,8 +410,7 @@
             metaLocation.getRegionInfo().getRegionName(), metaKey);
 
           if (regionInfoRow == null) {
-            throw new TableNotFoundException("Table '" +
-              Bytes.toString(tableName) + "' does not exist.");
+            throw new TableNotFoundException(Bytes.toString(tableName));
           }
 
           Cell value = regionInfoRow.get(COL_REGIONINFO);
@@ -433,7 +432,7 @@
 
           if (regionInfo.isOffline()) {
             throw new RegionOfflineException("region offline: " + 
-              regionInfo.getRegionName());
+              regionInfo.getRegionNameAsString());
           }
           
           String serverAddress = 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java 
(original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java Tue 
Jun 17 16:58:05 2008
@@ -1155,24 +1155,27 @@
    *
    * @param row The row to operate on
    * @param family The column family to match
+   * @param timestamp Timestamp to match
    * @throws IOException
    */  
-  public void deleteFamily(final Text row, final Text family) throws 
IOException{
-    deleteFamily(row.getBytes(), family.getBytes(),
-      HConstants.LATEST_TIMESTAMP);
+  public void deleteFamily(final Text row, final Text family,
+      final long timestamp)
+  throws IOException{
+    deleteFamily(row.getBytes(), family.getBytes(), timestamp);
   }
-  
+
   /**
    * Delete all cells for a row with matching column family at all timestamps.
    *
    * @param row The row to operate on
    * @param family The column family to match
+   * @param timestamp Timestamp to match
    * @throws IOException
    */  
-  public void deleteFamily(final String row, final String family)
+  public void deleteFamily(final String row, final String family,
+      final long timestamp)
   throws IOException{
-    deleteFamily(Bytes.toBytes(row), Bytes.toBytes(family),
-      HConstants.LATEST_TIMESTAMP);
+    deleteFamily(Bytes.toBytes(row), Bytes.toBytes(family), timestamp);
   }
 
   /**

Modified: 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java 
(original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java 
Tue Jun 17 16:58:05 2008
@@ -45,7 +45,7 @@
    * Default constructor
    */
   public BatchOperation() {
-    this(null);
+    this((byte [])null);
   }
 
   /**
@@ -57,6 +57,23 @@
   }
 
   /**
+   * Creates a DELETE batch operation.
+   * @param column column name
+   */
+  public BatchOperation(final String column) {
+    this(Bytes.toBytes(column), null);
+  }
+
+  /**
+   * Create a batch operation.
+   * @param column column name
+   * @param value column value.  If non-null, this is a PUT operation.
+   */
+  public BatchOperation(final String column, String value) {
+    this(Bytes.toBytes(column), Bytes.toBytes(value));
+  }
+
+  /**
    * Create a batch operation.
    * @param column column name
    * @param value column value.  If non-null, this is a PUT operation.

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java Tue Jun 17 
16:58:05 2008
@@ -41,7 +41,16 @@
     value = null;
     timestamp = 0;
   }
-  
+
+  /**
+   * Create a new Cell with a given value and timestamp. Used by HStore.
+   * @param value
+   * @param timestamp
+   */
+  public Cell(String value, long timestamp) {
+    this(Bytes.toBytes(value), timestamp);
+  }
+
   /**
    * Create a new Cell with a given value and timestamp. Used by HStore.
    * @param value

Modified: 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java 
(original)
+++ 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java 
Tue Jun 17 16:58:05 2008
@@ -124,7 +124,7 @@
       }
 
       if (!tableExists) {
-        throw new TableNotFoundException(tableName + " does not exist");
+        throw new TableNotFoundException(Bytes.toString(tableName));
       }
 
       postProcessMeta(m, server);

Modified: 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java 
(original)
+++ 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java 
Tue Jun 17 16:58:05 2008
@@ -472,7 +472,7 @@
       Path p = datfiles[i].getPath();
       // If does not have sympathetic info file, delete.
       if (!mapfiles.contains(fs.makeQualified(p))) {
-        fs.delete(p, false);
+        fs.delete(p, true);
       }
     }
     return results;

Modified: 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
 (original)
+++ 
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
 Tue Jun 17 16:58:05 2008
@@ -19,12 +19,12 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
-import java.io.IOException;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 
 /**
  * Thrown if request for nonexistent column family.
  */
-public class NoSuchColumnFamilyException extends IOException {
+public class NoSuchColumnFamilyException extends DoNotRetryIOException {
   public NoSuchColumnFamilyException() {
     super();
   }

Modified: 
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java?rev=668880&r1=668879&r2=668880&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java 
(original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java 
Tue Jun 17 16:58:05 2008
@@ -173,7 +173,7 @@
             HColumnDescriptor.CompressionType.NONE,   // no compression
             HColumnDescriptor.DEFAULT_IN_MEMORY,      // not in memory
             HColumnDescriptor.DEFAULT_BLOCKCACHE,
-            HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
+            HColumnDescriptor.DEFAULT_LENGTH,
             HColumnDescriptor.DEFAULT_TTL,
             bloomFilter
         )
@@ -239,7 +239,7 @@
             HColumnDescriptor.CompressionType.NONE,   // no compression
             HColumnDescriptor.DEFAULT_IN_MEMORY,      // not in memory
             HColumnDescriptor.DEFAULT_BLOCKCACHE,
-            HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
+            HColumnDescriptor.DEFAULT_LENGTH,
             HColumnDescriptor.DEFAULT_TTL,
             bloomFilter
         )


Reply via email to