http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html
index ca4c22a..35fccf3 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.html
@@ -27,125 +27,124 @@
 <span class="sourceLineNo">019</span>package 
org.apache.hadoop.hbase.mapreduce;<a name="line.19"></a>
 <span class="sourceLineNo">020</span><a name="line.20"></a>
 <span class="sourceLineNo">021</span>import java.io.IOException;<a 
name="line.21"></a>
-<span class="sourceLineNo">022</span><a name="line.22"></a>
-<span class="sourceLineNo">023</span>import org.apache.commons.logging.Log;<a 
name="line.23"></a>
-<span class="sourceLineNo">024</span>import 
org.apache.commons.logging.LogFactory;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import 
org.apache.hadoop.conf.Configurable;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import 
org.apache.hadoop.hbase.client.Connection;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import 
org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import 
org.apache.hadoop.hbase.client.RegionLocator;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import 
org.apache.hadoop.hbase.mapred.TableOutputFormat;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import 
org.apache.hadoop.hbase.util.Bytes;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import 
org.apache.hadoop.mapreduce.Partitioner;<a name="line.36"></a>
-<span class="sourceLineNo">037</span><a name="line.37"></a>
-<span class="sourceLineNo">038</span>/**<a name="line.38"></a>
-<span class="sourceLineNo">039</span> * This is used to partition the output 
keys into groups of keys.<a name="line.39"></a>
-<span class="sourceLineNo">040</span> * Keys are grouped according to the 
regions that currently exist<a name="line.40"></a>
-<span class="sourceLineNo">041</span> * so that each reducer fills a single 
region so load is distributed.<a name="line.41"></a>
-<span class="sourceLineNo">042</span> *<a name="line.42"></a>
-<span class="sourceLineNo">043</span> * &lt;p&gt;This class is not suitable as 
partitioner creating hfiles<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * for incremental bulk loads as region 
spread will likely change between time of<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * hfile creation and load time. See 
{@link org.apache.hadoop.hbase.tool.LoadIncrementalHFiles}<a name="line.45"></a>
-<span class="sourceLineNo">046</span> * and &lt;a 
href="http://hbase.apache.org/book.html#arch.bulk.load"&gt;Bulk 
Load&lt;/a&gt;.&lt;/p&gt;<a name="line.46"></a>
-<span class="sourceLineNo">047</span> *<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * @param &lt;KEY&gt;  The type of the 
key.<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * @param &lt;VALUE&gt;  The type of the 
value.<a name="line.49"></a>
-<span class="sourceLineNo">050</span> */<a name="line.50"></a>
-<span class="sourceLineNo">051</span>@InterfaceAudience.Public<a 
name="line.51"></a>
-<span class="sourceLineNo">052</span>public class HRegionPartitioner&lt;KEY, 
VALUE&gt;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>extends 
Partitioner&lt;ImmutableBytesWritable, VALUE&gt;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>implements Configurable {<a 
name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private static final Log LOG = 
LogFactory.getLog(HRegionPartitioner.class);<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  private Configuration conf = null;<a 
name="line.57"></a>
-<span class="sourceLineNo">058</span>  // Connection and locator are not 
cleaned up; they just die when partitioner is done.<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  private Connection connection;<a 
name="line.59"></a>
-<span class="sourceLineNo">060</span>  private RegionLocator locator;<a 
name="line.60"></a>
-<span class="sourceLineNo">061</span>  private byte[][] startKeys;<a 
name="line.61"></a>
-<span class="sourceLineNo">062</span><a name="line.62"></a>
-<span class="sourceLineNo">063</span>  /**<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * Gets the partition number for a 
given key (hence record) given the total<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   * number of partitions i.e. number of 
reduce-tasks for the job.<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   *<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   * &lt;p&gt;Typically a hash function 
on a all or a subset of the key.&lt;/p&gt;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>   *<a name="line.68"></a>
-<span class="sourceLineNo">069</span>   * @param key  The key to be 
partitioned.<a name="line.69"></a>
-<span class="sourceLineNo">070</span>   * @param value  The entry value.<a 
name="line.70"></a>
-<span class="sourceLineNo">071</span>   * @param numPartitions  The total 
number of partitions.<a name="line.71"></a>
-<span class="sourceLineNo">072</span>   * @return The partition number for the 
&lt;code&gt;key&lt;/code&gt;.<a name="line.72"></a>
-<span class="sourceLineNo">073</span>   * @see 
org.apache.hadoop.mapreduce.Partitioner#getPartition(<a name="line.73"></a>
-<span class="sourceLineNo">074</span>   *   java.lang.Object, 
java.lang.Object, int)<a name="line.74"></a>
-<span class="sourceLineNo">075</span>   */<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  @Override<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  public int 
getPartition(ImmutableBytesWritable key,<a name="line.77"></a>
-<span class="sourceLineNo">078</span>      VALUE value, int numPartitions) {<a 
name="line.78"></a>
-<span class="sourceLineNo">079</span>    byte[] region = null;<a 
name="line.79"></a>
-<span class="sourceLineNo">080</span>    // Only one region return 0<a 
name="line.80"></a>
-<span class="sourceLineNo">081</span>    if (this.startKeys.length == 1){<a 
name="line.81"></a>
-<span class="sourceLineNo">082</span>      return 0;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    try {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      // Not sure if this is cached after 
a split so we could have problems<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      // here if a region splits while 
mapping<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      region = 
this.locator.getRegionLocation(key.get()).getRegionInfo().getStartKey();<a 
name="line.87"></a>
-<span class="sourceLineNo">088</span>    } catch (IOException e) {<a 
name="line.88"></a>
-<span class="sourceLineNo">089</span>      LOG.error(e);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    }<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    for (int i = 0; i &lt; 
this.startKeys.length; i++){<a name="line.91"></a>
-<span class="sourceLineNo">092</span>      if (Bytes.compareTo(region, 
this.startKeys[i]) == 0 ){<a name="line.92"></a>
-<span class="sourceLineNo">093</span>        if (i &gt;= numPartitions-1){<a 
name="line.93"></a>
-<span class="sourceLineNo">094</span>          // cover if we have less 
reduces then regions.<a name="line.94"></a>
-<span class="sourceLineNo">095</span>          return 
(Integer.toString(i).hashCode()<a name="line.95"></a>
-<span class="sourceLineNo">096</span>              &amp; Integer.MAX_VALUE) % 
numPartitions;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>        }<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        return i;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      }<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    }<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    // if above fails to find start key 
that match we need to return something<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    return 0;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  }<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  /**<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   * Returns the current configuration.<a 
name="line.106"></a>
-<span class="sourceLineNo">107</span>   *<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * @return The current configuration.<a 
name="line.108"></a>
-<span class="sourceLineNo">109</span>   * @see 
org.apache.hadoop.conf.Configurable#getConf()<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   */<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  @Override<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public Configuration getConf() {<a 
name="line.112"></a>
-<span class="sourceLineNo">113</span>    return conf;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * Sets the configuration. This is used 
to determine the start keys for the<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * given table.<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   *<a name="line.119"></a>
-<span class="sourceLineNo">120</span>   * @param configuration  The 
configuration to set.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>   * @see 
org.apache.hadoop.conf.Configurable#setConf(<a name="line.121"></a>
-<span class="sourceLineNo">122</span>   *   
org.apache.hadoop.conf.Configuration)<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   */<a name="line.123"></a>
-<span class="sourceLineNo">124</span>  @Override<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public void setConf(Configuration 
configuration) {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    this.conf = 
HBaseConfiguration.create(configuration);<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    try {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      this.connection = 
ConnectionFactory.createConnection(HBaseConfiguration.create(conf));<a 
name="line.128"></a>
-<span class="sourceLineNo">129</span>      TableName tableName = 
TableName.valueOf(conf.get(TableOutputFormat.OUTPUT_TABLE));<a 
name="line.129"></a>
-<span class="sourceLineNo">130</span>      this.locator = 
this.connection.getRegionLocator(tableName);<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    } catch (IOException e) {<a 
name="line.131"></a>
-<span class="sourceLineNo">132</span>      LOG.error(e);<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    }<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    try {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      this.startKeys = 
this.locator.getStartKeys();<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    } catch (IOException e) {<a 
name="line.136"></a>
-<span class="sourceLineNo">137</span>      LOG.error(e);<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    }<a name="line.138"></a>
-<span class="sourceLineNo">139</span>  }<a name="line.139"></a>
-<span class="sourceLineNo">140</span>}<a name="line.140"></a>
+<span class="sourceLineNo">022</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import org.slf4j.Logger;<a 
name="line.23"></a>
+<span class="sourceLineNo">024</span>import org.slf4j.LoggerFactory;<a 
name="line.24"></a>
+<span class="sourceLineNo">025</span>import 
org.apache.hadoop.conf.Configurable;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import 
org.apache.hadoop.hbase.client.Connection;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import 
org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import 
org.apache.hadoop.hbase.client.RegionLocator;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.mapred.TableOutputFormat;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import 
org.apache.hadoop.hbase.util.Bytes;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import 
org.apache.hadoop.mapreduce.Partitioner;<a name="line.35"></a>
+<span class="sourceLineNo">036</span><a name="line.36"></a>
+<span class="sourceLineNo">037</span>/**<a name="line.37"></a>
+<span class="sourceLineNo">038</span> * This is used to partition the output 
keys into groups of keys.<a name="line.38"></a>
+<span class="sourceLineNo">039</span> * Keys are grouped according to the 
regions that currently exist<a name="line.39"></a>
+<span class="sourceLineNo">040</span> * so that each reducer fills a single 
region so load is distributed.<a name="line.40"></a>
+<span class="sourceLineNo">041</span> *<a name="line.41"></a>
+<span class="sourceLineNo">042</span> * &lt;p&gt;This class is not suitable as 
partitioner creating hfiles<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * for incremental bulk loads as region 
spread will likely change between time of<a name="line.43"></a>
+<span class="sourceLineNo">044</span> * hfile creation and load time. See 
{@link org.apache.hadoop.hbase.tool.LoadIncrementalHFiles}<a name="line.44"></a>
+<span class="sourceLineNo">045</span> * and &lt;a 
href="http://hbase.apache.org/book.html#arch.bulk.load"&gt;Bulk 
Load&lt;/a&gt;.&lt;/p&gt;<a name="line.45"></a>
+<span class="sourceLineNo">046</span> *<a name="line.46"></a>
+<span class="sourceLineNo">047</span> * @param &lt;KEY&gt;  The type of the 
key.<a name="line.47"></a>
+<span class="sourceLineNo">048</span> * @param &lt;VALUE&gt;  The type of the 
value.<a name="line.48"></a>
+<span class="sourceLineNo">049</span> */<a name="line.49"></a>
+<span class="sourceLineNo">050</span>@InterfaceAudience.Public<a 
name="line.50"></a>
+<span class="sourceLineNo">051</span>public class HRegionPartitioner&lt;KEY, 
VALUE&gt;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>extends 
Partitioner&lt;ImmutableBytesWritable, VALUE&gt;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>implements Configurable {<a 
name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  private static final Logger LOG = 
LoggerFactory.getLogger(HRegionPartitioner.class);<a name="line.55"></a>
+<span class="sourceLineNo">056</span>  private Configuration conf = null;<a 
name="line.56"></a>
+<span class="sourceLineNo">057</span>  // Connection and locator are not 
cleaned up; they just die when partitioner is done.<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  private Connection connection;<a 
name="line.58"></a>
+<span class="sourceLineNo">059</span>  private RegionLocator locator;<a 
name="line.59"></a>
+<span class="sourceLineNo">060</span>  private byte[][] startKeys;<a 
name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>  /**<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   * Gets the partition number for a 
given key (hence record) given the total<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * number of partitions i.e. number of 
reduce-tasks for the job.<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   *<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   * &lt;p&gt;Typically a hash function 
on a all or a subset of the key.&lt;/p&gt;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   *<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   * @param key  The key to be 
partitioned.<a name="line.68"></a>
+<span class="sourceLineNo">069</span>   * @param value  The entry value.<a 
name="line.69"></a>
+<span class="sourceLineNo">070</span>   * @param numPartitions  The total 
number of partitions.<a name="line.70"></a>
+<span class="sourceLineNo">071</span>   * @return The partition number for the 
&lt;code&gt;key&lt;/code&gt;.<a name="line.71"></a>
+<span class="sourceLineNo">072</span>   * @see 
org.apache.hadoop.mapreduce.Partitioner#getPartition(<a name="line.72"></a>
+<span class="sourceLineNo">073</span>   *   java.lang.Object, 
java.lang.Object, int)<a name="line.73"></a>
+<span class="sourceLineNo">074</span>   */<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  @Override<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  public int 
getPartition(ImmutableBytesWritable key,<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      VALUE value, int numPartitions) {<a 
name="line.77"></a>
+<span class="sourceLineNo">078</span>    byte[] region = null;<a 
name="line.78"></a>
+<span class="sourceLineNo">079</span>    // Only one region return 0<a 
name="line.79"></a>
+<span class="sourceLineNo">080</span>    if (this.startKeys.length == 1){<a 
name="line.80"></a>
+<span class="sourceLineNo">081</span>      return 0;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    }<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    try {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>      // Not sure if this is cached after 
a split so we could have problems<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      // here if a region splits while 
mapping<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      region = 
this.locator.getRegionLocation(key.get()).getRegionInfo().getStartKey();<a 
name="line.86"></a>
+<span class="sourceLineNo">087</span>    } catch (IOException e) {<a 
name="line.87"></a>
+<span class="sourceLineNo">088</span>      LOG.error(e.toString(), e);<a 
name="line.88"></a>
+<span class="sourceLineNo">089</span>    }<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    for (int i = 0; i &lt; 
this.startKeys.length; i++){<a name="line.90"></a>
+<span class="sourceLineNo">091</span>      if (Bytes.compareTo(region, 
this.startKeys[i]) == 0 ){<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        if (i &gt;= numPartitions-1){<a 
name="line.92"></a>
+<span class="sourceLineNo">093</span>          // cover if we have less 
reduces then regions.<a name="line.93"></a>
+<span class="sourceLineNo">094</span>          return 
(Integer.toString(i).hashCode()<a name="line.94"></a>
+<span class="sourceLineNo">095</span>              &amp; Integer.MAX_VALUE) % 
numPartitions;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        }<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        return i;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>      }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    }<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    // if above fails to find start key 
that match we need to return something<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    return 0;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  }<a name="line.102"></a>
+<span class="sourceLineNo">103</span><a name="line.103"></a>
+<span class="sourceLineNo">104</span>  /**<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * Returns the current configuration.<a 
name="line.105"></a>
+<span class="sourceLineNo">106</span>   *<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   * @return The current configuration.<a 
name="line.107"></a>
+<span class="sourceLineNo">108</span>   * @see 
org.apache.hadoop.conf.Configurable#getConf()<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   */<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  @Override<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  public Configuration getConf() {<a 
name="line.111"></a>
+<span class="sourceLineNo">112</span>    return conf;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  }<a name="line.113"></a>
+<span class="sourceLineNo">114</span><a name="line.114"></a>
+<span class="sourceLineNo">115</span>  /**<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * Sets the configuration. This is used 
to determine the start keys for the<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   * given table.<a name="line.117"></a>
+<span class="sourceLineNo">118</span>   *<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @param configuration  The 
configuration to set.<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @see 
org.apache.hadoop.conf.Configurable#setConf(<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   *   
org.apache.hadoop.conf.Configuration)<a name="line.121"></a>
+<span class="sourceLineNo">122</span>   */<a name="line.122"></a>
+<span class="sourceLineNo">123</span>  @Override<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public void setConf(Configuration 
configuration) {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    this.conf = 
HBaseConfiguration.create(configuration);<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    try {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      this.connection = 
ConnectionFactory.createConnection(HBaseConfiguration.create(conf));<a 
name="line.127"></a>
+<span class="sourceLineNo">128</span>      TableName tableName = 
TableName.valueOf(conf.get(TableOutputFormat.OUTPUT_TABLE));<a 
name="line.128"></a>
+<span class="sourceLineNo">129</span>      this.locator = 
this.connection.getRegionLocator(tableName);<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    } catch (IOException e) {<a 
name="line.130"></a>
+<span class="sourceLineNo">131</span>      LOG.error(e.toString(), e);<a 
name="line.131"></a>
+<span class="sourceLineNo">132</span>    }<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    try {<a name="line.133"></a>
+<span class="sourceLineNo">134</span>      this.startKeys = 
this.locator.getStartKeys();<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    } catch (IOException e) {<a 
name="line.135"></a>
+<span class="sourceLineNo">136</span>      LOG.error(e.toString(), e);<a 
name="line.136"></a>
+<span class="sourceLineNo">137</span>    }<a name="line.137"></a>
+<span class="sourceLineNo">138</span>  }<a name="line.138"></a>
+<span class="sourceLineNo">139</span>}<a name="line.139"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.html
index c2c7ef8..1e20dab 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.html
@@ -28,9 +28,9 @@
 <span class="sourceLineNo">020</span><a name="line.20"></a>
 <span class="sourceLineNo">021</span>import java.io.IOException;<a 
name="line.21"></a>
 <span class="sourceLineNo">022</span><a name="line.22"></a>
-<span class="sourceLineNo">023</span>import org.apache.commons.logging.Log;<a 
name="line.23"></a>
-<span class="sourceLineNo">024</span>import 
org.apache.commons.logging.LogFactory;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.25"></a>
+<span class="sourceLineNo">023</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import org.slf4j.Logger;<a 
name="line.24"></a>
+<span class="sourceLineNo">025</span>import org.slf4j.LoggerFactory;<a 
name="line.25"></a>
 <span class="sourceLineNo">026</span>import 
org.apache.hadoop.hbase.client.Mutation;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import org.apache.hadoop.io.Writable;<a 
name="line.27"></a>
 <span class="sourceLineNo">028</span><a name="line.28"></a>
@@ -61,7 +61,7 @@
 <span class="sourceLineNo">053</span>extends TableReducer&lt;Writable, 
Mutation, Writable&gt; {<a name="line.53"></a>
 <span class="sourceLineNo">054</span><a name="line.54"></a>
 <span class="sourceLineNo">055</span>  @SuppressWarnings("unused")<a 
name="line.55"></a>
-<span class="sourceLineNo">056</span>  private static final Log LOG = 
LogFactory.getLog(IdentityTableReducer.class);<a name="line.56"></a>
+<span class="sourceLineNo">056</span>  private static final Logger LOG = 
LoggerFactory.getLogger(IdentityTableReducer.class);<a name="line.56"></a>
 <span class="sourceLineNo">057</span><a name="line.57"></a>
 <span class="sourceLineNo">058</span>  /**<a name="line.58"></a>
 <span class="sourceLineNo">059</span>   * Writes each given record, consisting 
of the row key and the given values,<a name="line.59"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
index f48e20e..db707b0 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
@@ -41,52 +41,52 @@
 <span class="sourceLineNo">033</span>import java.util.TreeMap;<a 
name="line.33"></a>
 <span class="sourceLineNo">034</span>import java.util.UUID;<a 
name="line.34"></a>
 <span class="sourceLineNo">035</span><a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.commons.logging.Log;<a 
name="line.36"></a>
-<span class="sourceLineNo">037</span>import 
org.apache.commons.logging.LogFactory;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import 
org.apache.hadoop.conf.Configured;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FileSystem;<a 
name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.Path;<a 
name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a 
name="line.42"></a>
-<span class="sourceLineNo">043</span>import 
org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import 
org.apache.hadoop.hbase.CellUtil;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import 
org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import 
org.apache.hadoop.hbase.KeyValue;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import 
org.apache.hadoop.hbase.KeyValueUtil;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import 
org.apache.hadoop.hbase.client.Admin;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import 
org.apache.hadoop.hbase.client.Connection;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import 
org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import 
org.apache.hadoop.hbase.client.Delete;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import 
org.apache.hadoop.hbase.client.Durability;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import 
org.apache.hadoop.hbase.client.Mutation;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import 
org.apache.hadoop.hbase.client.Put;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import 
org.apache.hadoop.hbase.client.RegionLocator;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import 
org.apache.hadoop.hbase.client.Result;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import 
org.apache.hadoop.hbase.client.Table;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import 
org.apache.hadoop.hbase.filter.Filter;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import 
org.apache.hadoop.hbase.util.Bytes;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import 
org.apache.hadoop.hbase.util.MapReduceCell;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import 
org.apache.hadoop.hbase.zookeeper.ZKClusterId;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import 
org.apache.hadoop.io.RawComparator;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import 
org.apache.hadoop.io.WritableComparable;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import 
org.apache.hadoop.io.WritableComparator;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.Job;<a 
name="line.71"></a>
-<span class="sourceLineNo">072</span>import 
org.apache.hadoop.mapreduce.Partitioner;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import 
org.apache.hadoop.mapreduce.Reducer;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import 
org.apache.hadoop.mapreduce.TaskCounter;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import 
org.apache.hadoop.mapreduce.lib.input.FileInputFormat;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import 
org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;<a 
name="line.76"></a>
-<span class="sourceLineNo">077</span>import 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import 
org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;<a 
name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.util.Tool;<a 
name="line.79"></a>
-<span class="sourceLineNo">080</span>import 
org.apache.hadoop.util.ToolRunner;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import 
org.apache.zookeeper.KeeperException;<a name="line.81"></a>
+<span class="sourceLineNo">036</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import 
org.apache.hadoop.conf.Configured;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a 
name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.Path;<a 
name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.Cell;<a 
name="line.40"></a>
+<span class="sourceLineNo">041</span>import 
org.apache.hadoop.hbase.CellComparator;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import 
org.apache.hadoop.hbase.CellUtil;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import 
org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import 
org.apache.hadoop.hbase.KeyValue;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import 
org.apache.hadoop.hbase.KeyValueUtil;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import 
org.apache.hadoop.hbase.client.Admin;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import 
org.apache.hadoop.hbase.client.Connection;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import 
org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import 
org.apache.hadoop.hbase.client.Delete;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import 
org.apache.hadoop.hbase.client.Durability;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import 
org.apache.hadoop.hbase.client.Mutation;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import 
org.apache.hadoop.hbase.client.Put;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import 
org.apache.hadoop.hbase.client.RegionLocator;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import 
org.apache.hadoop.hbase.client.Result;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import 
org.apache.hadoop.hbase.client.Table;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import 
org.apache.hadoop.hbase.filter.Filter;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import 
org.apache.hadoop.hbase.util.Bytes;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import 
org.apache.hadoop.hbase.util.MapReduceCell;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import 
org.apache.hadoop.hbase.zookeeper.ZKClusterId;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import 
org.apache.hadoop.io.RawComparator;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import 
org.apache.hadoop.io.WritableComparable;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import 
org.apache.hadoop.io.WritableComparator;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.Job;<a 
name="line.69"></a>
+<span class="sourceLineNo">070</span>import 
org.apache.hadoop.mapreduce.Partitioner;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import 
org.apache.hadoop.mapreduce.Reducer;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import 
org.apache.hadoop.mapreduce.TaskCounter;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import 
org.apache.hadoop.mapreduce.lib.input.FileInputFormat;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import 
org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;<a 
name="line.74"></a>
+<span class="sourceLineNo">075</span>import 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import 
org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;<a 
name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.util.Tool;<a 
name="line.77"></a>
+<span class="sourceLineNo">078</span>import 
org.apache.hadoop.util.ToolRunner;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import 
org.apache.zookeeper.KeeperException;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.slf4j.Logger;<a 
name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.slf4j.LoggerFactory;<a 
name="line.81"></a>
 <span class="sourceLineNo">082</span><a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>
 <span class="sourceLineNo">084</span>/**<a name="line.84"></a>
@@ -94,7 +94,7 @@
 <span class="sourceLineNo">086</span> */<a name="line.86"></a>
 <span class="sourceLineNo">087</span>@InterfaceAudience.Public<a 
name="line.87"></a>
 <span class="sourceLineNo">088</span>public class Import extends Configured 
implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  private static final Log LOG = 
LogFactory.getLog(Import.class);<a name="line.89"></a>
+<span class="sourceLineNo">089</span>  private static final Logger LOG = 
LoggerFactory.getLogger(Import.class);<a name="line.89"></a>
 <span class="sourceLineNo">090</span>  final static String NAME = "import";<a 
name="line.90"></a>
 <span class="sourceLineNo">091</span>  public final static String 
CF_RENAME_PROP = "HBASE_IMPORTER_RENAME_CFS";<a name="line.91"></a>
 <span class="sourceLineNo">092</span>  public final static String 
BULK_OUTPUT_CONF_KEY = "import.bulk.output";<a name="line.92"></a>
@@ -200,7 +200,7 @@
 <span class="sourceLineNo">192</span>      extends 
TableMapper&lt;CellWritableComparable, Cell&gt; {<a name="line.192"></a>
 <span class="sourceLineNo">193</span>    private Map&lt;byte[], byte[]&gt; 
cfRenameMap;<a name="line.193"></a>
 <span class="sourceLineNo">194</span>    private Filter filter;<a 
name="line.194"></a>
-<span class="sourceLineNo">195</span>    private static final Log LOG = 
LogFactory.getLog(CellImporter.class);<a name="line.195"></a>
+<span class="sourceLineNo">195</span>    private static final Logger LOG = 
LoggerFactory.getLogger(CellImporter.class);<a name="line.195"></a>
 <span class="sourceLineNo">196</span><a name="line.196"></a>
 <span class="sourceLineNo">197</span>    /**<a name="line.197"></a>
 <span class="sourceLineNo">198</span>     * @param row  The current table row 
key.<a name="line.198"></a>
@@ -264,7 +264,7 @@
 <span class="sourceLineNo">256</span>  public static class CellImporter 
extends TableMapper&lt;ImmutableBytesWritable, Cell&gt; {<a name="line.256"></a>
 <span class="sourceLineNo">257</span>    private Map&lt;byte[], byte[]&gt; 
cfRenameMap;<a name="line.257"></a>
 <span class="sourceLineNo">258</span>    private Filter filter;<a 
name="line.258"></a>
-<span class="sourceLineNo">259</span>    private static final Log LOG = 
LogFactory.getLog(CellImporter.class);<a name="line.259"></a>
+<span class="sourceLineNo">259</span>    private static final Logger LOG = 
LoggerFactory.getLogger(CellImporter.class);<a name="line.259"></a>
 <span class="sourceLineNo">260</span><a name="line.260"></a>
 <span class="sourceLineNo">261</span>    /**<a name="line.261"></a>
 <span class="sourceLineNo">262</span>     * @param row  The current table row 
key.<a name="line.262"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html
index a31ffdc..416162e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html
@@ -35,19 +35,19 @@
 <span class="sourceLineNo">027</span>import java.util.Set;<a 
name="line.27"></a>
 <span class="sourceLineNo">028</span><a name="line.28"></a>
 <span class="sourceLineNo">029</span>import 
org.apache.commons.lang3.StringUtils;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.commons.logging.Log;<a 
name="line.30"></a>
-<span class="sourceLineNo">031</span>import 
org.apache.commons.logging.LogFactory;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import 
org.apache.hadoop.conf.Configured;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.Path;<a 
name="line.34"></a>
-<span class="sourceLineNo">035</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import 
org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import 
org.apache.hadoop.hbase.HTableDescriptor;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import 
org.apache.hadoop.hbase.TableNotEnabledException;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import 
org.apache.hadoop.hbase.TableNotFoundException;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.42"></a>
+<span class="sourceLineNo">030</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import 
org.apache.hadoop.conf.Configured;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.Path;<a 
name="line.32"></a>
+<span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import 
org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import 
org.apache.hadoop.hbase.HTableDescriptor;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import 
org.apache.hadoop.hbase.TableNotEnabledException;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import 
org.apache.hadoop.hbase.TableNotFoundException;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.slf4j.Logger;<a 
name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.slf4j.LoggerFactory;<a 
name="line.42"></a>
 <span class="sourceLineNo">043</span>import 
org.apache.hadoop.hbase.client.Admin;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import 
org.apache.hadoop.hbase.client.Connection;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import 
org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.45"></a>
@@ -85,7 +85,7 @@
 <span class="sourceLineNo">077</span>@InterfaceAudience.Public<a 
name="line.77"></a>
 <span class="sourceLineNo">078</span>public class ImportTsv extends Configured 
implements Tool {<a name="line.78"></a>
 <span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>  protected static final Log LOG = 
LogFactory.getLog(ImportTsv.class);<a name="line.80"></a>
+<span class="sourceLineNo">080</span>  protected static final Logger LOG = 
LoggerFactory.getLogger(ImportTsv.class);<a name="line.80"></a>
 <span class="sourceLineNo">081</span><a name="line.81"></a>
 <span class="sourceLineNo">082</span>  final static String NAME = 
"importtsv";<a name="line.82"></a>
 <span class="sourceLineNo">083</span><a name="line.83"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
index 19b041e..7deaffb 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
@@ -26,9 +26,9 @@
 <span class="sourceLineNo">018</span><a name="line.18"></a>
 <span class="sourceLineNo">019</span>package 
org.apache.hadoop.hbase.mapreduce;<a name="line.19"></a>
 <span class="sourceLineNo">020</span><a name="line.20"></a>
-<span class="sourceLineNo">021</span>import org.apache.commons.logging.Log;<a 
name="line.21"></a>
-<span class="sourceLineNo">022</span>import 
org.apache.commons.logging.LogFactory;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.23"></a>
+<span class="sourceLineNo">021</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import org.slf4j.Logger;<a 
name="line.22"></a>
+<span class="sourceLineNo">023</span>import org.slf4j.LoggerFactory;<a 
name="line.23"></a>
 <span class="sourceLineNo">024</span>import 
org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import 
org.apache.hadoop.hbase.util.Bytes;<a name="line.26"></a>
@@ -55,7 +55,7 @@
 <span class="sourceLineNo">047</span>@InterfaceAudience.Public<a 
name="line.47"></a>
 <span class="sourceLineNo">048</span>@VisibleForTesting<a name="line.48"></a>
 <span class="sourceLineNo">049</span>public class MultiTableHFileOutputFormat 
extends HFileOutputFormat2 {<a name="line.49"></a>
-<span class="sourceLineNo">050</span>  private static final Log LOG = 
LogFactory.getLog(MultiTableHFileOutputFormat.class);<a name="line.50"></a>
+<span class="sourceLineNo">050</span>  private static final Logger LOG = 
LoggerFactory.getLogger(MultiTableHFileOutputFormat.class);<a 
name="line.50"></a>
 <span class="sourceLineNo">051</span><a name="line.51"></a>
 <span class="sourceLineNo">052</span>  /**<a name="line.52"></a>
 <span class="sourceLineNo">053</span>   * Creates a composite key to use as a 
mapper output key when using<a name="line.53"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html
index cdc47c2..acd0553 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.html
@@ -30,9 +30,9 @@
 <span class="sourceLineNo">022</span>import java.util.ArrayList;<a 
name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.util.List;<a 
name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.commons.logging.Log;<a 
name="line.25"></a>
-<span class="sourceLineNo">026</span>import 
org.apache.commons.logging.LogFactory;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.27"></a>
+<span class="sourceLineNo">025</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.slf4j.Logger;<a 
name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.slf4j.LoggerFactory;<a 
name="line.27"></a>
 <span class="sourceLineNo">028</span>import 
org.apache.hadoop.hbase.HRegionInfo;<a name="line.28"></a>
 <span class="sourceLineNo">029</span>import 
org.apache.hadoop.hbase.HRegionLocation;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.30"></a>
@@ -63,7 +63,7 @@
 <span class="sourceLineNo">055</span>public abstract class 
MultiTableInputFormatBase extends<a name="line.55"></a>
 <span class="sourceLineNo">056</span>    
InputFormat&lt;ImmutableBytesWritable, Result&gt; {<a name="line.56"></a>
 <span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  private static final Log LOG = 
LogFactory.getLog(MultiTableInputFormatBase.class);<a name="line.58"></a>
+<span class="sourceLineNo">058</span>  private static final Logger LOG = 
LoggerFactory.getLogger(MultiTableInputFormatBase.class);<a name="line.58"></a>
 <span class="sourceLineNo">059</span><a name="line.59"></a>
 <span class="sourceLineNo">060</span>  /** Holds the set of scans used to 
define the input. */<a name="line.60"></a>
 <span class="sourceLineNo">061</span>  private List&lt;Scan&gt; scans;<a 
name="line.61"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html
index d42894e..f55c994 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.html
@@ -30,9 +30,9 @@
 <span class="sourceLineNo">022</span>import java.util.HashMap;<a 
name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.util.Map;<a 
name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.commons.logging.Log;<a 
name="line.25"></a>
-<span class="sourceLineNo">026</span>import 
org.apache.commons.logging.LogFactory;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.27"></a>
+<span class="sourceLineNo">025</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.slf4j.Logger;<a 
name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.slf4j.LoggerFactory;<a 
name="line.27"></a>
 <span class="sourceLineNo">028</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
 <span class="sourceLineNo">029</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.30"></a>
@@ -80,7 +80,7 @@
 <span class="sourceLineNo">072</span>   */<a name="line.72"></a>
 <span class="sourceLineNo">073</span>  protected static class 
MultiTableRecordWriter extends<a name="line.73"></a>
 <span class="sourceLineNo">074</span>      
RecordWriter&lt;ImmutableBytesWritable, Mutation&gt; {<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    private static final Log LOG = 
LogFactory.getLog(MultiTableRecordWriter.class);<a name="line.75"></a>
+<span class="sourceLineNo">075</span>    private static final Logger LOG = 
LoggerFactory.getLogger(MultiTableRecordWriter.class);<a name="line.75"></a>
 <span class="sourceLineNo">076</span>    Connection connection;<a 
name="line.76"></a>
 <span class="sourceLineNo">077</span>    Map&lt;ImmutableBytesWritable, 
BufferedMutator&gt; mutatorMap = new HashMap&lt;&gt;();<a name="line.77"></a>
 <span class="sourceLineNo">078</span>    Configuration conf;<a 
name="line.78"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutCombiner.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutCombiner.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutCombiner.html
index 7daea21..db30d20 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutCombiner.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/PutCombiner.html
@@ -31,9 +31,9 @@
 <span class="sourceLineNo">023</span>import java.util.Map.Entry;<a 
name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.Map;<a 
name="line.24"></a>
 <span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.logging.Log;<a 
name="line.26"></a>
-<span class="sourceLineNo">027</span>import 
org.apache.commons.logging.LogFactory;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.28"></a>
+<span class="sourceLineNo">026</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.slf4j.Logger;<a 
name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.slf4j.LoggerFactory;<a 
name="line.28"></a>
 <span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.Cell;<a 
name="line.29"></a>
 <span class="sourceLineNo">030</span>import 
org.apache.hadoop.hbase.KeyValue;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import 
org.apache.hadoop.hbase.KeyValueUtil;<a name="line.31"></a>
@@ -47,7 +47,7 @@
 <span class="sourceLineNo">039</span> */<a name="line.39"></a>
 <span class="sourceLineNo">040</span>@InterfaceAudience.Public<a 
name="line.40"></a>
 <span class="sourceLineNo">041</span>public class PutCombiner&lt;K&gt; extends 
Reducer&lt;K, Put, K, Put&gt; {<a name="line.41"></a>
-<span class="sourceLineNo">042</span>  private static final Log LOG = 
LogFactory.getLog(PutCombiner.class);<a name="line.42"></a>
+<span class="sourceLineNo">042</span>  private static final Logger LOG = 
LoggerFactory.getLogger(PutCombiner.class);<a name="line.42"></a>
 <span class="sourceLineNo">043</span><a name="line.43"></a>
 <span class="sourceLineNo">044</span>  @Override<a name="line.44"></a>
 <span class="sourceLineNo">045</span>  protected void reduce(K row, 
Iterable&lt;Put&gt; vals, Context context)<a name="line.45"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ResultSerialization.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ResultSerialization.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ResultSerialization.html
index a9958db..b71480e 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ResultSerialization.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ResultSerialization.html
@@ -33,13 +33,13 @@
 <span class="sourceLineNo">025</span>import java.util.ArrayList;<a 
name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.List;<a 
name="line.26"></a>
 <span class="sourceLineNo">027</span><a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.commons.logging.Log;<a 
name="line.28"></a>
-<span class="sourceLineNo">029</span>import 
org.apache.commons.logging.LogFactory;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import 
org.apache.hadoop.conf.Configured;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.Cell;<a 
name="line.32"></a>
-<span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.KeyValue;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.34"></a>
+<span class="sourceLineNo">028</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import 
org.apache.hadoop.conf.Configured;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.Cell;<a 
name="line.30"></a>
+<span class="sourceLineNo">031</span>import 
org.apache.hadoop.hbase.KeyValue;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.slf4j.Logger;<a 
name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.slf4j.LoggerFactory;<a 
name="line.34"></a>
 <span class="sourceLineNo">035</span>import 
org.apache.hadoop.hbase.client.Result;<a name="line.35"></a>
 <span class="sourceLineNo">036</span>import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.36"></a>
 <span class="sourceLineNo">037</span>import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a 
name="line.37"></a>
@@ -50,7 +50,7 @@
 <span class="sourceLineNo">042</span><a name="line.42"></a>
 <span class="sourceLineNo">043</span>@InterfaceAudience.Public<a 
name="line.43"></a>
 <span class="sourceLineNo">044</span>public class ResultSerialization extends 
Configured implements Serialization&lt;Result&gt; {<a name="line.44"></a>
-<span class="sourceLineNo">045</span>  private static final Log LOG = 
LogFactory.getLog(ResultSerialization.class);<a name="line.45"></a>
+<span class="sourceLineNo">045</span>  private static final Logger LOG = 
LoggerFactory.getLogger(ResultSerialization.class);<a name="line.45"></a>
 <span class="sourceLineNo">046</span>  // The following configuration property 
indicates import file format version.<a name="line.46"></a>
 <span class="sourceLineNo">047</span>  public static final String 
IMPORT_FORMAT_VER = "hbase.import.version";<a name="line.47"></a>
 <span class="sourceLineNo">048</span><a name="line.48"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/RowCounter.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/RowCounter.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/RowCounter.html
index a5879af..56ec3ac 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/RowCounter.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/RowCounter.html
@@ -30,11 +30,11 @@
 <span class="sourceLineNo">022</span>import java.util.List;<a 
name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.util.ArrayList;<a 
name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.commons.logging.Log;<a 
name="line.25"></a>
-<span class="sourceLineNo">026</span>import 
org.apache.commons.logging.LogFactory;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import 
org.apache.commons.lang3.StringUtils;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.29"></a>
+<span class="sourceLineNo">025</span>import 
org.apache.commons.lang3.StringUtils;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.slf4j.Logger;<a 
name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.slf4j.LoggerFactory;<a 
name="line.29"></a>
 <span class="sourceLineNo">030</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import 
org.apache.hadoop.conf.Configured;<a name="line.31"></a>
 <span class="sourceLineNo">032</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.32"></a>
@@ -58,7 +58,7 @@
 <span class="sourceLineNo">050</span>@InterfaceAudience.Public<a 
name="line.50"></a>
 <span class="sourceLineNo">051</span>public class RowCounter extends 
Configured implements Tool {<a name="line.51"></a>
 <span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>  private static final Log LOG = 
LogFactory.getLog(RowCounter.class);<a name="line.53"></a>
+<span class="sourceLineNo">053</span>  private static final Logger LOG = 
LoggerFactory.getLogger(RowCounter.class);<a name="line.53"></a>
 <span class="sourceLineNo">054</span><a name="line.54"></a>
 <span class="sourceLineNo">055</span>  /** Name of this 'program'. */<a 
name="line.55"></a>
 <span class="sourceLineNo">056</span>  static final String NAME = 
"rowcounter";<a name="line.56"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.html
index e7c1b30..fe63405 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.html
@@ -26,9 +26,9 @@
 <span class="sourceLineNo">018</span> */<a name="line.18"></a>
 <span class="sourceLineNo">019</span>package 
org.apache.hadoop.hbase.mapreduce;<a name="line.19"></a>
 <span class="sourceLineNo">020</span><a name="line.20"></a>
-<span class="sourceLineNo">021</span>import org.apache.commons.logging.Log;<a 
name="line.21"></a>
-<span class="sourceLineNo">022</span>import 
org.apache.commons.logging.LogFactory;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.23"></a>
+<span class="sourceLineNo">021</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import org.slf4j.Logger;<a 
name="line.22"></a>
+<span class="sourceLineNo">023</span>import org.slf4j.LoggerFactory;<a 
name="line.23"></a>
 <span class="sourceLineNo">024</span>import 
org.apache.hadoop.conf.Configurable;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.26"></a>
@@ -54,7 +54,7 @@
 <span class="sourceLineNo">046</span>@InterfaceAudience.Public<a 
name="line.46"></a>
 <span class="sourceLineNo">047</span>public class 
SimpleTotalOrderPartitioner&lt;VALUE&gt; extends 
Partitioner&lt;ImmutableBytesWritable, VALUE&gt;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>implements Configurable {<a 
name="line.48"></a>
-<span class="sourceLineNo">049</span>  private final static Log LOG = 
LogFactory.getLog(SimpleTotalOrderPartitioner.class);<a name="line.49"></a>
+<span class="sourceLineNo">049</span>  private final static Logger LOG = 
LoggerFactory.getLogger(SimpleTotalOrderPartitioner.class);<a 
name="line.49"></a>
 <span class="sourceLineNo">050</span><a name="line.50"></a>
 <span class="sourceLineNo">051</span>  @Deprecated<a name="line.51"></a>
 <span class="sourceLineNo">052</span>  public static final String START = 
"hbase.simpletotalorder.start";<a name="line.52"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
index 27a7727..2ca8a6c 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
@@ -31,13 +31,13 @@
 <span class="sourceLineNo">023</span>import java.util.List;<a 
name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.Locale;<a 
name="line.24"></a>
 <span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.logging.Log;<a 
name="line.26"></a>
-<span class="sourceLineNo">027</span>import 
org.apache.commons.logging.LogFactory;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import 
org.apache.hadoop.conf.Configurable;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import 
org.apache.hadoop.hbase.CellUtil;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.32"></a>
+<span class="sourceLineNo">026</span>import 
org.apache.hadoop.conf.Configurable;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import 
org.apache.hadoop.hbase.CellUtil;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.slf4j.Logger;<a 
name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.slf4j.LoggerFactory;<a 
name="line.32"></a>
 <span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.client.Connection;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import 
org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.34"></a>
 <span class="sourceLineNo">035</span>import 
org.apache.hadoop.hbase.client.RegionLocator;<a name="line.35"></a>
@@ -57,7 +57,7 @@
 <span class="sourceLineNo">049</span>implements Configurable {<a 
name="line.49"></a>
 <span class="sourceLineNo">050</span><a name="line.50"></a>
 <span class="sourceLineNo">051</span>  @SuppressWarnings("hiding")<a 
name="line.51"></a>
-<span class="sourceLineNo">052</span>  private static final Log LOG = 
LogFactory.getLog(TableInputFormat.class);<a name="line.52"></a>
+<span class="sourceLineNo">052</span>  private static final Logger LOG = 
LoggerFactory.getLogger(TableInputFormat.class);<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>  /** Job parameter that specifies the 
input table. */<a name="line.54"></a>
 <span class="sourceLineNo">055</span>  public static final String INPUT_TABLE 
= "hbase.mapreduce.inputtable";<a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
index be26fad..edf22fb 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
@@ -36,9 +36,9 @@
 <span class="sourceLineNo">028</span>import java.util.List;<a 
name="line.28"></a>
 <span class="sourceLineNo">029</span>import java.util.Map;<a 
name="line.29"></a>
 <span class="sourceLineNo">030</span><a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.commons.logging.Log;<a 
name="line.31"></a>
-<span class="sourceLineNo">032</span>import 
org.apache.commons.logging.LogFactory;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.33"></a>
+<span class="sourceLineNo">031</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.slf4j.Logger;<a 
name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.slf4j.LoggerFactory;<a 
name="line.33"></a>
 <span class="sourceLineNo">034</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.34"></a>
 <span class="sourceLineNo">035</span>import 
org.apache.hadoop.hbase.HRegionLocation;<a name="line.35"></a>
 <span class="sourceLineNo">036</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.36"></a>
@@ -117,7 +117,7 @@
 <span class="sourceLineNo">109</span>public abstract class 
TableInputFormatBase<a name="line.109"></a>
 <span class="sourceLineNo">110</span>    extends 
InputFormat&lt;ImmutableBytesWritable, Result&gt; {<a name="line.110"></a>
 <span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  private static final Log LOG = 
LogFactory.getLog(TableInputFormatBase.class);<a name="line.112"></a>
+<span class="sourceLineNo">112</span>  private static final Logger LOG = 
LoggerFactory.getLogger(TableInputFormatBase.class);<a name="line.112"></a>
 <span class="sourceLineNo">113</span><a name="line.113"></a>
 <span class="sourceLineNo">114</span>  private static final String 
NOT_INITIALIZED = "The input format instance has not been properly " +<a 
name="line.114"></a>
 <span class="sourceLineNo">115</span>      "initialized. Ensure you call 
initializeTable either in your constructor or initialize " +<a 
name="line.115"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/505bbb2e/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
----------------------------------------------------------------------
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
index 299e41e..df73714 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.html
@@ -41,16 +41,16 @@
 <span class="sourceLineNo">033</span>import java.util.zip.ZipEntry;<a 
name="line.33"></a>
 <span class="sourceLineNo">034</span>import java.util.zip.ZipFile;<a 
name="line.34"></a>
 <span class="sourceLineNo">035</span><a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.commons.logging.Log;<a 
name="line.36"></a>
-<span class="sourceLineNo">037</span>import 
org.apache.commons.logging.LogFactory;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileSystem;<a 
name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a 
name="line.40"></a>
-<span class="sourceLineNo">041</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import 
org.apache.hadoop.hbase.MetaTableAccessor;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.45"></a>
+<span class="sourceLineNo">036</span>import 
org.apache.hadoop.conf.Configuration;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileSystem;<a 
name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.Path;<a 
name="line.38"></a>
+<span class="sourceLineNo">039</span>import 
org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import 
org.apache.hadoop.hbase.MetaTableAccessor;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.slf4j.Logger;<a 
name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.slf4j.LoggerFactory;<a 
name="line.45"></a>
 <span class="sourceLineNo">046</span>import 
org.apache.hadoop.hbase.client.Connection;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import 
org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import 
org.apache.hadoop.hbase.client.Put;<a name="line.48"></a>
@@ -78,7 +78,7 @@
 <span class="sourceLineNo">070</span>@SuppressWarnings({ "rawtypes", 
"unchecked" })<a name="line.70"></a>
 <span class="sourceLineNo">071</span>@InterfaceAudience.Public<a 
name="line.71"></a>
 <span class="sourceLineNo">072</span>public class TableMapReduceUtil {<a 
name="line.72"></a>
-<span class="sourceLineNo">073</span>  private static final Log LOG = 
LogFactory.getLog(TableMapReduceUtil.class);<a name="line.73"></a>
+<span class="sourceLineNo">073</span>  private static final Logger LOG = 
LoggerFactory.getLogger(TableMapReduceUtil.class);<a name="line.73"></a>
 <span class="sourceLineNo">074</span><a name="line.74"></a>
 <span class="sourceLineNo">075</span>  /**<a name="line.75"></a>
 <span class="sourceLineNo">076</span>   * Use this before submitting a 
TableMap job. It will appropriately set up<a name="line.76"></a>

Reply via email to