Author: stack
Date: Fri Oct  8 18:03:03 2010
New Revision: 1005936

URL: http://svn.apache.org/viewvc?rev=1005936&view=rev
Log:
HBASE-3092 Replace deprecated 'new HBaseConfiguration(...)' calls

Modified:
    hbase/trunk/CHANGES.txt
    
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java
    
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
    
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
    
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
    
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
    
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
    
hbase/trunk/src/test/java/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
    
hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
    
hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
    
hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java

Modified: hbase/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Fri Oct  8 18:03:03 2010
@@ -981,6 +981,8 @@ Release 0.21.0 - Unreleased
    HBASE-2917  Reseek directly to next row (Pranav Khaitan)
    HBASE-2907  [rest/stargate] Improve error response when trying to create a
                scanner on a nonexistant table
+   HBASE-3092  Replace deprecated "new HBaseConfiguration(...)" calls
+               (Lars Francke)
 
   NEW FEATURES
    HBASE-1961  HBase EC2 scripts

Modified: 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
 (original)
+++ 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
 Fri Oct  8 18:03:03 2010
@@ -48,7 +48,7 @@ implements Partitioner<ImmutableBytesWri
 
   public void configure(JobConf job) {
     try {
-      this.table = new HTable(new HBaseConfiguration(job),
+      this.table = new HTable(HBaseConfiguration.create(job),
         job.get(TableOutputFormat.OUTPUT_TABLE));
     } catch (IOException e) {
       LOG.error(e);

Modified: 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java 
(original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java 
Fri Oct  8 18:03:03 2010
@@ -130,8 +130,7 @@ public class RowCounter extends Configur
    * @throws Exception
    */
   public static void main(String[] args) throws Exception {
-    HBaseConfiguration c = new HBaseConfiguration();
-    int errCode = ToolRunner.run(c, new RowCounter(), args);
+    int errCode = ToolRunner.run(HBaseConfiguration.create(), new 
RowCounter(), args);
     System.exit(errCode);
   }
 }
\ No newline at end of file

Modified: 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java 
(original)
+++ 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java 
Fri Oct  8 18:03:03 2010
@@ -55,7 +55,7 @@ public class TableInputFormat extends Ta
     }
     setInputColumns(m_cols);
     try {
-      setHTable(new HTable(new HBaseConfiguration(job), 
tableNames[0].getName()));
+      setHTable(new HTable(HBaseConfiguration.create(job), 
tableNames[0].getName()));
     } catch (Exception e) {
       LOG.error(StringUtils.stringifyException(e));
     }

Modified: 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
 (original)
+++ 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
 Fri Oct  8 18:03:03 2010
@@ -45,7 +45,7 @@ import org.apache.hadoop.mapred.Reporter
  *   class ExampleTIF extends TableInputFormatBase implements JobConfigurable {
  *
  *     public void configure(JobConf job) {
- *       HTable exampleTable = new HTable(new HBaseConfiguration(job),
+ *       HTable exampleTable = new HTable(HBaseConfiguration.create(job),
  *         Bytes.toBytes("exampleTable"));
  *       // mandatory
  *       setHTable(exampleTable);

Modified: 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
 (original)
+++ 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
 Fri Oct  8 18:03:03 2010
@@ -106,7 +106,7 @@ public class TableMapReduceUtil {
     job.setOutputValueClass(Put.class);
     if (partitioner == HRegionPartitioner.class) {
       job.setPartitionerClass(HRegionPartitioner.class);
-      HTable outputTable = new HTable(new HBaseConfiguration(job), table);
+      HTable outputTable = new HTable(HBaseConfiguration.create(job), table);
       int regions = outputTable.getRegionsInfo().size();
       if (job.getNumReduceTasks() > regions) {
         job.setNumReduceTasks(outputTable.getRegionsInfo().size());
@@ -127,7 +127,7 @@ public class TableMapReduceUtil {
    */
   public static void limitNumReduceTasks(String table, JobConf job)
   throws IOException {
-    HTable outputTable = new HTable(new HBaseConfiguration(job), table);
+    HTable outputTable = new HTable(HBaseConfiguration.create(job), table);
     int regions = outputTable.getRegionsInfo().size();
     if (job.getNumReduceTasks() > regions)
       job.setNumReduceTasks(regions);
@@ -143,7 +143,7 @@ public class TableMapReduceUtil {
    */
   public static void limitNumMapTasks(String table, JobConf job)
   throws IOException {
-    HTable outputTable = new HTable(new HBaseConfiguration(job), table);
+    HTable outputTable = new HTable(HBaseConfiguration.create(job), table);
     int regions = outputTable.getRegionsInfo().size();
     if (job.getNumMapTasks() > regions)
       job.setNumMapTasks(regions);
@@ -159,7 +159,7 @@ public class TableMapReduceUtil {
    */
   public static void setNumReduceTasks(String table, JobConf job)
   throws IOException {
-    HTable outputTable = new HTable(new HBaseConfiguration(job), table);
+    HTable outputTable = new HTable(HBaseConfiguration.create(job), table);
     int regions = outputTable.getRegionsInfo().size();
     job.setNumReduceTasks(regions);
   }
@@ -174,7 +174,7 @@ public class TableMapReduceUtil {
    */
   public static void setNumMapTasks(String table, JobConf job)
   throws IOException {
-    HTable outputTable = new HTable(new HBaseConfiguration(job), table);
+    HTable outputTable = new HTable(HBaseConfiguration.create(job), table);
     int regions = outputTable.getRegionsInfo().size();
     job.setNumMapTasks(regions);
   }

Modified: 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java 
(original)
+++ 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java 
Fri Oct  8 18:03:03 2010
@@ -85,7 +85,7 @@ FileOutputFormat<ImmutableBytesWritable,
     String tableName = job.get(OUTPUT_TABLE);
     HTable table = null;
     try {
-      table = new HTable(new HBaseConfiguration(job), tableName);
+      table = new HTable(HBaseConfiguration.create(job), tableName);
     } catch(IOException e) {
       LOG.error(e);
       throw e;

Modified: 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
 (original)
+++ 
hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
 Fri Oct  8 18:03:03 2010
@@ -49,7 +49,7 @@ import org.apache.hadoop.util.StringUtil
  *   class ExampleTIF extends TableInputFormatBase implements JobConfigurable {
  *
  *     public void configure(JobConf job) {
- *       HTable exampleTable = new HTable(new HBaseConfiguration(job),
+ *       HTable exampleTable = new HTable(HBaseConfiguration.create(job),
  *         Bytes.toBytes("exampleTable"));
  *       // mandatory
  *       setHTable(exampleTable);

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java 
(original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java Fri 
Oct  8 18:03:03 2010
@@ -31,6 +31,7 @@ import junit.framework.TestCase;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.Delete;
@@ -77,7 +78,7 @@ public abstract class HBaseTestCase exte
     initialize();
   }
 
-  public volatile HBaseConfiguration conf;
+  public volatile Configuration conf;
 
   /** constructor */
   public HBaseTestCase() {
@@ -94,7 +95,7 @@ public abstract class HBaseTestCase exte
   }
 
   private void init() {
-    conf = new HBaseConfiguration();
+    conf = HBaseConfiguration.create();
     try {
       START_KEY = new String(START_KEY_BYTES, HConstants.UTF8_ENCODING);
     } catch (UnsupportedEncodingException e) {
@@ -194,7 +195,7 @@ public abstract class HBaseTestCase exte
     HTableDescriptor htd = new HTableDescriptor(name);
     htd.addFamily(new HColumnDescriptor(fam1, versions,
       HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
-      Integer.MAX_VALUE, HConstants.FOREVER,
+      Integer.MAX_VALUE, HConstants.FOREVER, 
       HColumnDescriptor.DEFAULT_BLOOMFILTER,
       HConstants.REPLICATION_SCOPE_LOCAL));
     htd.addFamily(new HColumnDescriptor(fam2, versions,
@@ -670,7 +671,7 @@ public abstract class HBaseTestCase exte
   }
 
   public static void assertByteEquals(byte[] expected,
-                                      byte[] actual) {
+                               byte[] actual) {
     if (Bytes.compareTo(expected, actual) != 0) {
       throw new AssertionFailedError("expected:<" +
       Bytes.toString(expected) + "> but was:<" +

Modified: 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
 (original)
+++ 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
 Fri Oct  8 18:03:03 2010
@@ -40,7 +40,7 @@ import org.apache.hadoop.io.WritableComp
  * </p>
  */
 public class MapFilePerformanceEvaluation {
-  protected final HBaseConfiguration conf;
+  protected final Configuration conf;
   private static final int ROW_LENGTH = 10;
   private static final int ROW_COUNT = 100000;
 
@@ -50,7 +50,7 @@ public class MapFilePerformanceEvaluatio
   /**
    * @param c
    */
-  public MapFilePerformanceEvaluation(final HBaseConfiguration c) {
+  public MapFilePerformanceEvaluation(final Configuration c) {
     super();
     this.conf = c;
   }
@@ -343,7 +343,7 @@ public class MapFilePerformanceEvaluatio
    * @throws IOException
    */
   public static void main(String[] args) throws Exception {
-    new MapFilePerformanceEvaluation(new HBaseConfiguration()).
+    new MapFilePerformanceEvaluation(HBaseConfiguration.create()).
       runBenchmarks();
   }
 }

Modified: 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
 (original)
+++ 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
 Fri Oct  8 18:03:03 2010
@@ -51,7 +51,7 @@ public class TestHbaseObjectWritable ext
 
   @SuppressWarnings("boxing")
   public void testReadObjectDataInputConfiguration() throws IOException {
-    HBaseConfiguration conf = new HBaseConfiguration();
+    Configuration conf = HBaseConfiguration.create();
     // Do primitive type
     final int COUNT = 101;
     assertTrue(doType(conf, COUNT, int.class).equals(COUNT));

Modified: 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
 (original)
+++ 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
 Fri Oct  8 18:03:03 2010
@@ -129,7 +129,7 @@ public class TestTimeRangeMapRed extends
     public void setConf(Configuration configuration) {
       this.conf = configuration;
       try {
-        table = new HTable(new HBaseConfiguration(conf), TABLE_NAME);
+        table = new HTable(HBaseConfiguration.create(conf), TABLE_NAME);
       } catch (IOException e) {
         e.printStackTrace();
       }

Modified: 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: 
http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=1005936&r1=1005935&r2=1005936&view=diff
==============================================================================
--- 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java 
(original)
+++ 
hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java 
Fri Oct  8 18:03:03 2010
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.regionse
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestCase;
@@ -117,7 +118,7 @@ public class TestHRegion extends HBaseTe
   
//////////////////////////////////////////////////////////////////////////////
 
   public void testGetWhileRegionClose() throws IOException {
-    HBaseConfiguration hc = initSplit();
+    Configuration hc = initSplit();
     int numRows = 100;
     byte [][] families = {fam1, fam2, fam3};
 
@@ -1194,7 +1195,7 @@ public class TestHRegion extends HBaseTe
     byte [] tableName = Bytes.toBytes("testtable");
     byte [][] families = {fam1, fam2, fam3};
 
-    HBaseConfiguration hc = initSplit();
+    Configuration hc = initSplit();
     //Setting up region
     String method = this.getName();
     initHRegion(tableName, method, hc, families);
@@ -1257,7 +1258,7 @@ public class TestHRegion extends HBaseTe
   public void testMerge() throws IOException {
     byte [] tableName = Bytes.toBytes("testtable");
     byte [][] families = {fam1, fam2, fam3};
-    HBaseConfiguration hc = initSplit();
+    Configuration hc = initSplit();
     //Setting up region
     String method = this.getName();
     initHRegion(tableName, method, hc, families);
@@ -2171,7 +2172,7 @@ public class TestHRegion extends HBaseTe
     byte [] tableName = Bytes.toBytes("testtable");
     byte [][] families = {fam1, fam2, fam3};
 
-    HBaseConfiguration hc = initSplit();
+    Configuration hc = initSplit();
     //Setting up region
     String method = this.getName();
     initHRegion(tableName, method, hc, families);
@@ -2258,7 +2259,7 @@ public class TestHRegion extends HBaseTe
   public void testSplitRegion() throws IOException {
     byte [] tableName = Bytes.toBytes("testtable");
     byte [] qualifier = Bytes.toBytes("qualifier");
-    HBaseConfiguration hc = initSplit();
+    Configuration hc = initSplit();
     int numRows = 10;
     byte [][] families = {fam1, fam3};
 
@@ -2663,7 +2664,7 @@ public class TestHRegion extends HBaseTe
 
     //Setting up region
     String method = "testIndexesScanWithOneDeletedRow";
-    initHRegion(tableName, method, new HBaseConfiguration(), family);
+    initHRegion(tableName, method, HBaseConfiguration.create(), family);
 
     Put put = new Put(Bytes.toBytes(1L));
     put.add(family, qual1, 1L, Bytes.toBytes(1L));
@@ -2867,8 +2868,8 @@ public class TestHRegion extends HBaseTe
     }
   }
 
-  private HBaseConfiguration initSplit() {
-    HBaseConfiguration conf = new HBaseConfiguration();
+  private Configuration initSplit() {
+    Configuration conf = HBaseConfiguration.create();
     // Always compact if there is more than one store file.
     conf.setInt("hbase.hstore.compactionThreshold", 2);
 
@@ -2889,11 +2890,11 @@ public class TestHRegion extends HBaseTe
   private void initHRegion (byte [] tableName, String callingMethod,
     byte[] ... families)
   throws IOException {
-    initHRegion(tableName, callingMethod, new HBaseConfiguration(), families);
+    initHRegion(tableName, callingMethod, HBaseConfiguration.create(), 
families);
   }
 
   private void initHRegion (byte [] tableName, String callingMethod,
-    HBaseConfiguration conf, byte [] ... families)
+    Configuration conf, byte [] ... families)
   throws IOException{
     HTableDescriptor htd = new HTableDescriptor(tableName);
     for(byte [] family : families) {


Reply via email to