Repository: hbase-site
Updated Branches:
  refs/heads/asf-site c44efafc1 -> d0f1a9f6b


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d0f1a9f6/testdevapidocs/src-html/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.html
----------------------------------------------------------------------
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.html
index d255b0f..cb83fd6 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.html
@@ -26,272 +26,295 @@
 <span class="sourceLineNo">018</span><a name="line.18"></a>
 <span class="sourceLineNo">019</span>package org.apache.hadoop.hbase.mapred;<a 
name="line.19"></a>
 <span class="sourceLineNo">020</span><a name="line.20"></a>
-<span class="sourceLineNo">021</span>import static org.mockito.Mockito.mock;<a 
name="line.21"></a>
-<span class="sourceLineNo">022</span><a name="line.22"></a>
-<span class="sourceLineNo">023</span>import org.apache.hadoop.fs.Path;<a 
name="line.23"></a>
-<span class="sourceLineNo">024</span>import 
org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import 
org.apache.hadoop.hbase.testclassification.LargeTests;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import 
org.apache.hadoop.hbase.client.Result;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import 
org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatTestBase;<a 
name="line.30"></a>
-<span class="sourceLineNo">031</span>import 
org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;<a 
name="line.31"></a>
-<span class="sourceLineNo">032</span>import 
org.apache.hadoop.hbase.util.Bytes;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.util.RegionSplitter;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import 
org.apache.hadoop.io.NullWritable;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import 
org.apache.hadoop.mapred.InputSplit;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import 
org.apache.hadoop.mapred.JobClient;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import 
org.apache.hadoop.mapred.JobConf;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import 
org.apache.hadoop.mapred.MapReduceBase;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import 
org.apache.hadoop.mapred.OutputCollector;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import 
org.apache.hadoop.mapred.RecordReader;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import 
org.apache.hadoop.mapred.Reducer;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import 
org.apache.hadoop.mapred.Reporter;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import 
org.apache.hadoop.mapred.RunningJob;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import 
org.apache.hadoop.mapred.lib.NullOutputFormat;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.junit.Assert;<a 
name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.junit.Rule;<a 
name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.junit.Test;<a 
name="line.47"></a>
-<span class="sourceLineNo">048</span>import 
org.junit.experimental.categories.Category;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.junit.rules.TestName;<a 
name="line.49"></a>
-<span class="sourceLineNo">050</span><a name="line.50"></a>
-<span class="sourceLineNo">051</span>import java.io.IOException;<a 
name="line.51"></a>
-<span class="sourceLineNo">052</span>import java.util.Iterator;<a 
name="line.52"></a>
-<span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>@Category({VerySlowMapReduceTests.class, 
LargeTests.class})<a name="line.54"></a>
-<span class="sourceLineNo">055</span>public class TestTableSnapshotInputFormat 
extends TableSnapshotInputFormatTestBase {<a name="line.55"></a>
-<span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>  private static final byte[] aaa = 
Bytes.toBytes("aaa");<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  private static final byte[] after_zzz = 
Bytes.toBytes("zz{"); // 'z' + 1 =&gt; '{'<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  private static final String COLUMNS =<a 
name="line.59"></a>
-<span class="sourceLineNo">060</span>    Bytes.toString(FAMILIES[0]) + " " + 
Bytes.toString(FAMILIES[1]);<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  @Rule<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  public TestName name = new 
TestName();<a name="line.63"></a>
-<span class="sourceLineNo">064</span><a name="line.64"></a>
-<span class="sourceLineNo">065</span>  @Override<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  protected byte[] getStartRow() {<a 
name="line.66"></a>
-<span class="sourceLineNo">067</span>    return aaa;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  }<a name="line.68"></a>
-<span class="sourceLineNo">069</span><a name="line.69"></a>
-<span class="sourceLineNo">070</span>  @Override<a name="line.70"></a>
-<span class="sourceLineNo">071</span>  protected byte[] getEndRow() {<a 
name="line.71"></a>
-<span class="sourceLineNo">072</span>    return after_zzz;<a 
name="line.72"></a>
-<span class="sourceLineNo">073</span>  }<a name="line.73"></a>
-<span class="sourceLineNo">074</span><a name="line.74"></a>
-<span class="sourceLineNo">075</span>  static class TestTableSnapshotMapper 
extends MapReduceBase<a name="line.75"></a>
-<span class="sourceLineNo">076</span>      implements 
TableMap&lt;ImmutableBytesWritable, NullWritable&gt; {<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    @Override<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    public void 
map(ImmutableBytesWritable key, Result value,<a name="line.78"></a>
-<span class="sourceLineNo">079</span>        
OutputCollector&lt;ImmutableBytesWritable, NullWritable&gt; collector, Reporter 
reporter)<a name="line.79"></a>
-<span class="sourceLineNo">080</span>        throws IOException {<a 
name="line.80"></a>
-<span class="sourceLineNo">081</span>      verifyRowFromMap(key, value);<a 
name="line.81"></a>
-<span class="sourceLineNo">082</span>      collector.collect(key, 
NullWritable.get());<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  public static class 
TestTableSnapshotReducer extends MapReduceBase<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      implements 
Reducer&lt;ImmutableBytesWritable, NullWritable, NullWritable, NullWritable&gt; 
{<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    HBaseTestingUtility.SeenRowTracker 
rowTracker =<a name="line.88"></a>
-<span class="sourceLineNo">089</span>      new 
HBaseTestingUtility.SeenRowTracker(aaa, after_zzz);<a name="line.89"></a>
-<span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>    @Override<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    public void 
reduce(ImmutableBytesWritable key, Iterator&lt;NullWritable&gt; values,<a 
name="line.92"></a>
-<span class="sourceLineNo">093</span>        OutputCollector&lt;NullWritable, 
NullWritable&gt; collector, Reporter reporter)<a name="line.93"></a>
-<span class="sourceLineNo">094</span>        throws IOException {<a 
name="line.94"></a>
-<span class="sourceLineNo">095</span>      rowTracker.addRow(key.get());<a 
name="line.95"></a>
-<span class="sourceLineNo">096</span>    }<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>    @Override<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    public void close() {<a 
name="line.99"></a>
-<span class="sourceLineNo">100</span>      rowTracker.validate();<a 
name="line.100"></a>
-<span class="sourceLineNo">101</span>    }<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  }<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>  @Test<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  public void 
testInitTableSnapshotMapperJobConfig() throws Exception {<a name="line.105"></a>
-<span class="sourceLineNo">106</span>    setupCluster();<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    final TableName tableName = 
TableName.valueOf(name.getMethodName());<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    String snapshotName = "foo";<a 
name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>    try {<a name="line.110"></a>
-<span class="sourceLineNo">111</span>      createTableAndSnapshot(UTIL, 
tableName, snapshotName, getStartRow(), getEndRow(), 1);<a name="line.111"></a>
-<span class="sourceLineNo">112</span>      JobConf job = new 
JobConf(UTIL.getConfiguration());<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      Path tmpTableDir = 
UTIL.getDataTestDirOnTestFS(snapshotName);<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>      
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName,<a name="line.115"></a>
-<span class="sourceLineNo">116</span>        COLUMNS, 
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.116"></a>
-<span class="sourceLineNo">117</span>        NullWritable.class, job, false, 
tmpTableDir);<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>      // TODO: would be better to examine 
directly the cache instance that results from this<a name="line.119"></a>
-<span class="sourceLineNo">120</span>      // config. Currently this is not 
possible because BlockCache initialization is static.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>      Assert.assertEquals(<a 
name="line.121"></a>
-<span class="sourceLineNo">122</span>        "Snapshot job should be 
configured for default LruBlockCache.",<a name="line.122"></a>
-<span class="sourceLineNo">123</span>        
HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        
job.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, -1), 0.01);<a 
name="line.124"></a>
-<span class="sourceLineNo">125</span>      Assert.assertEquals(<a 
name="line.125"></a>
-<span class="sourceLineNo">126</span>        "Snapshot job should not use 
BucketCache.",<a name="line.126"></a>
-<span class="sourceLineNo">127</span>        0, 
job.getFloat("hbase.bucketcache.size", -1), 0.01);<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    } finally {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      
UTIL.getAdmin().deleteSnapshot(snapshotName);<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      UTIL.deleteTable(tableName);<a 
name="line.130"></a>
-<span class="sourceLineNo">131</span>      tearDownCluster();<a 
name="line.131"></a>
-<span class="sourceLineNo">132</span>    }<a name="line.132"></a>
-<span class="sourceLineNo">133</span>  }<a name="line.133"></a>
-<span class="sourceLineNo">134</span><a name="line.134"></a>
-<span class="sourceLineNo">135</span>  // TODO: mapred does not support 
limiting input range by startrow, endrow.<a name="line.135"></a>
-<span class="sourceLineNo">136</span>  // Thus the following tests must 
override parameterverification.<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>  @Test<a name="line.138"></a>
-<span class="sourceLineNo">139</span>  @Override<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  public void 
testWithMockedMapReduceMultiRegion() throws Exception {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    testWithMockedMapReduce(UTIL, 
"testWithMockedMapReduceMultiRegion", 10, 1, 10);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
-<span class="sourceLineNo">143</span><a name="line.143"></a>
-<span class="sourceLineNo">144</span>  @Test<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  @Override<a name="line.145"></a>
-<span class="sourceLineNo">146</span>  public void 
testWithMapReduceMultiRegion() throws Exception {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    testWithMapReduce(UTIL, 
"testWithMapReduceMultiRegion", 10, 1, 10, false);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>  @Test<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  @Override<a name="line.151"></a>
-<span class="sourceLineNo">152</span>  // run the MR job while HBase is 
offline<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  public void 
testWithMapReduceAndOfflineHBaseMultiRegion() throws Exception {<a 
name="line.153"></a>
-<span class="sourceLineNo">154</span>    testWithMapReduce(UTIL, 
"testWithMapReduceAndOfflineHBaseMultiRegion", 10, 1, 10, true);<a 
name="line.154"></a>
-<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  @Override<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  public void 
testRestoreSnapshotDoesNotCreateBackRefLinksInit(TableName tableName,<a 
name="line.158"></a>
-<span class="sourceLineNo">159</span>      String snapshotName, Path 
tmpTableDir) throws Exception {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    JobConf job = new 
JobConf(UTIL.getConfiguration());<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName,<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      COLUMNS, 
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.162"></a>
-<span class="sourceLineNo">163</span>      NullWritable.class, job, false, 
tmpTableDir);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  @Override<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  protected void 
testWithMockedMapReduce(HBaseTestingUtility util, String snapshotName,<a 
name="line.167"></a>
-<span class="sourceLineNo">168</span>      int numRegions, int 
numSplitsPerRegion, int expectedNumSplits) throws Exception {<a 
name="line.168"></a>
-<span class="sourceLineNo">169</span>    setupCluster();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    final TableName tableName = 
TableName.valueOf(name.getMethodName());<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    try {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>      createTableAndSnapshot(<a 
name="line.172"></a>
-<span class="sourceLineNo">173</span>        util, tableName, snapshotName, 
getStartRow(), getEndRow(), numRegions);<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>      JobConf job = new 
JobConf(util.getConfiguration());<a name="line.175"></a>
-<span class="sourceLineNo">176</span>      Path tmpTableDir = 
util.getDataTestDirOnTestFS(snapshotName);<a name="line.176"></a>
-<span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>      if (numSplitsPerRegion &gt; 1) {<a 
name="line.178"></a>
-<span class="sourceLineNo">179</span>        
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName,<a name="line.179"></a>
-<span class="sourceLineNo">180</span>                COLUMNS, 
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.180"></a>
-<span class="sourceLineNo">181</span>                NullWritable.class, job, 
false, tmpTableDir, new RegionSplitter.UniformSplit(),<a name="line.181"></a>
-<span class="sourceLineNo">182</span>                numSplitsPerRegion);<a 
name="line.182"></a>
-<span class="sourceLineNo">183</span>      } else {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>        
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>                COLUMNS, 
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.185"></a>
-<span class="sourceLineNo">186</span>                NullWritable.class, job, 
false, tmpTableDir);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      }<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>      // mapred doesn't support start and 
end keys? o.O<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      verifyWithMockedMapReduce(job, 
numRegions, expectedNumSplits, getStartRow(), getEndRow());<a 
name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>    } finally {<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      
util.getAdmin().deleteSnapshot(snapshotName);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      util.deleteTable(tableName);<a 
name="line.194"></a>
-<span class="sourceLineNo">195</span>      tearDownCluster();<a 
name="line.195"></a>
-<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  private void 
verifyWithMockedMapReduce(JobConf job, int numRegions, int expectedNumSplits,<a 
name="line.199"></a>
-<span class="sourceLineNo">200</span>      byte[] startRow, byte[] stopRow) 
throws IOException, InterruptedException {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    TableSnapshotInputFormat tsif = new 
TableSnapshotInputFormat();<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    InputSplit[] splits = 
tsif.getSplits(job, 0);<a name="line.202"></a>
-<span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>    
Assert.assertEquals(expectedNumSplits, splits.length);<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>    HBaseTestingUtility.SeenRowTracker 
rowTracker =<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      new 
HBaseTestingUtility.SeenRowTracker(startRow, stopRow);<a name="line.207"></a>
-<span class="sourceLineNo">208</span><a name="line.208"></a>
-<span class="sourceLineNo">209</span>    for (int i = 0; i &lt; splits.length; 
i++) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      // validate input split<a 
name="line.210"></a>
-<span class="sourceLineNo">211</span>      InputSplit split = splits[i];<a 
name="line.211"></a>
-<span class="sourceLineNo">212</span>      Assert.assertTrue(split instanceof 
TableSnapshotInputFormat.TableSnapshotRegionSplit);<a name="line.212"></a>
+<span class="sourceLineNo">021</span>import static 
org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatImpl.SNAPSHOT_INPUTFORMAT_LOCALITY_ENABLED_DEFAULT;<a
 name="line.21"></a>
+<span class="sourceLineNo">022</span>import static org.mockito.Mockito.mock;<a 
name="line.22"></a>
+<span class="sourceLineNo">023</span><a name="line.23"></a>
+<span class="sourceLineNo">024</span>import org.apache.hadoop.fs.Path;<a 
name="line.24"></a>
+<span class="sourceLineNo">025</span>import 
org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import 
org.apache.hadoop.hbase.testclassification.LargeTests;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import 
org.apache.hadoop.hbase.TableName;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import 
org.apache.hadoop.hbase.client.Result;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import 
org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatTestBase;<a 
name="line.31"></a>
+<span class="sourceLineNo">032</span>import 
org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;<a 
name="line.32"></a>
+<span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.util.Bytes;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import 
org.apache.hadoop.hbase.util.RegionSplitter;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import 
org.apache.hadoop.io.NullWritable;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import 
org.apache.hadoop.mapred.InputSplit;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import 
org.apache.hadoop.mapred.JobClient;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import 
org.apache.hadoop.mapred.JobConf;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import 
org.apache.hadoop.mapred.MapReduceBase;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import 
org.apache.hadoop.mapred.OutputCollector;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import 
org.apache.hadoop.mapred.RecordReader;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import 
org.apache.hadoop.mapred.Reducer;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import 
org.apache.hadoop.mapred.Reporter;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import 
org.apache.hadoop.mapred.RunningJob;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import 
org.apache.hadoop.mapred.lib.NullOutputFormat;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.junit.Assert;<a 
name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.junit.Rule;<a 
name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.junit.Test;<a 
name="line.48"></a>
+<span class="sourceLineNo">049</span>import 
org.junit.experimental.categories.Category;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.junit.rules.TestName;<a 
name="line.50"></a>
+<span class="sourceLineNo">051</span><a name="line.51"></a>
+<span class="sourceLineNo">052</span>import java.io.IOException;<a 
name="line.52"></a>
+<span class="sourceLineNo">053</span>import java.util.Iterator;<a 
name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>@Category({VerySlowMapReduceTests.class, 
LargeTests.class})<a name="line.55"></a>
+<span class="sourceLineNo">056</span>public class TestTableSnapshotInputFormat 
extends TableSnapshotInputFormatTestBase {<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>  private static final byte[] aaa = 
Bytes.toBytes("aaa");<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  private static final byte[] after_zzz = 
Bytes.toBytes("zz{"); // 'z' + 1 =&gt; '{'<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  private static final String COLUMNS =<a 
name="line.60"></a>
+<span class="sourceLineNo">061</span>    Bytes.toString(FAMILIES[0]) + " " + 
Bytes.toString(FAMILIES[1]);<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  @Rule<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  public TestName name = new 
TestName();<a name="line.64"></a>
+<span class="sourceLineNo">065</span><a name="line.65"></a>
+<span class="sourceLineNo">066</span>  @Override<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  protected byte[] getStartRow() {<a 
name="line.67"></a>
+<span class="sourceLineNo">068</span>    return aaa;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  }<a name="line.69"></a>
+<span class="sourceLineNo">070</span><a name="line.70"></a>
+<span class="sourceLineNo">071</span>  @Override<a name="line.71"></a>
+<span class="sourceLineNo">072</span>  protected byte[] getEndRow() {<a 
name="line.72"></a>
+<span class="sourceLineNo">073</span>    return after_zzz;<a 
name="line.73"></a>
+<span class="sourceLineNo">074</span>  }<a name="line.74"></a>
+<span class="sourceLineNo">075</span><a name="line.75"></a>
+<span class="sourceLineNo">076</span>  static class TestTableSnapshotMapper 
extends MapReduceBase<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      implements 
TableMap&lt;ImmutableBytesWritable, NullWritable&gt; {<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    @Override<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    public void 
map(ImmutableBytesWritable key, Result value,<a name="line.79"></a>
+<span class="sourceLineNo">080</span>        
OutputCollector&lt;ImmutableBytesWritable, NullWritable&gt; collector, Reporter 
reporter)<a name="line.80"></a>
+<span class="sourceLineNo">081</span>        throws IOException {<a 
name="line.81"></a>
+<span class="sourceLineNo">082</span>      verifyRowFromMap(key, value);<a 
name="line.82"></a>
+<span class="sourceLineNo">083</span>      collector.collect(key, 
NullWritable.get());<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    }<a name="line.84"></a>
+<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
+<span class="sourceLineNo">086</span><a name="line.86"></a>
+<span class="sourceLineNo">087</span>  public static class 
TestTableSnapshotReducer extends MapReduceBase<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      implements 
Reducer&lt;ImmutableBytesWritable, NullWritable, NullWritable, NullWritable&gt; 
{<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    HBaseTestingUtility.SeenRowTracker 
rowTracker =<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      new 
HBaseTestingUtility.SeenRowTracker(aaa, after_zzz);<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>    @Override<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    public void 
reduce(ImmutableBytesWritable key, Iterator&lt;NullWritable&gt; values,<a 
name="line.93"></a>
+<span class="sourceLineNo">094</span>        OutputCollector&lt;NullWritable, 
NullWritable&gt; collector, Reporter reporter)<a name="line.94"></a>
+<span class="sourceLineNo">095</span>        throws IOException {<a 
name="line.95"></a>
+<span class="sourceLineNo">096</span>      rowTracker.addRow(key.get());<a 
name="line.96"></a>
+<span class="sourceLineNo">097</span>    }<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>    @Override<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    public void close() {<a 
name="line.100"></a>
+<span class="sourceLineNo">101</span>      rowTracker.validate();<a 
name="line.101"></a>
+<span class="sourceLineNo">102</span>    }<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  }<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>  @Test<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  public void 
testInitTableSnapshotMapperJobConfig() throws Exception {<a name="line.106"></a>
+<span class="sourceLineNo">107</span>    setupCluster();<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    final TableName tableName = 
TableName.valueOf(name.getMethodName());<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    String snapshotName = "foo";<a 
name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>    try {<a name="line.111"></a>
+<span class="sourceLineNo">112</span>      createTableAndSnapshot(UTIL, 
tableName, snapshotName, getStartRow(), getEndRow(), 1);<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      JobConf job = new 
JobConf(UTIL.getConfiguration());<a name="line.113"></a>
+<span class="sourceLineNo">114</span>      Path tmpTableDir = 
UTIL.getDataTestDirOnTestFS(snapshotName);<a name="line.114"></a>
+<span class="sourceLineNo">115</span><a name="line.115"></a>
+<span class="sourceLineNo">116</span>      
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName,<a name="line.116"></a>
+<span class="sourceLineNo">117</span>        COLUMNS, 
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.117"></a>
+<span class="sourceLineNo">118</span>        NullWritable.class, job, false, 
tmpTableDir);<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>      // TODO: would be better to examine 
directly the cache instance that results from this<a name="line.120"></a>
+<span class="sourceLineNo">121</span>      // config. Currently this is not 
possible because BlockCache initialization is static.<a name="line.121"></a>
+<span class="sourceLineNo">122</span>      Assert.assertEquals(<a 
name="line.122"></a>
+<span class="sourceLineNo">123</span>        "Snapshot job should be 
configured for default LruBlockCache.",<a name="line.123"></a>
+<span class="sourceLineNo">124</span>        
HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT,<a name="line.124"></a>
+<span class="sourceLineNo">125</span>        
job.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, -1), 0.01);<a 
name="line.125"></a>
+<span class="sourceLineNo">126</span>      Assert.assertEquals(<a 
name="line.126"></a>
+<span class="sourceLineNo">127</span>        "Snapshot job should not use 
BucketCache.",<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        0, 
job.getFloat("hbase.bucketcache.size", -1), 0.01);<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    } finally {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      
UTIL.getAdmin().deleteSnapshot(snapshotName);<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      UTIL.deleteTable(tableName);<a 
name="line.131"></a>
+<span class="sourceLineNo">132</span>      tearDownCluster();<a 
name="line.132"></a>
+<span class="sourceLineNo">133</span>    }<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  }<a name="line.134"></a>
+<span class="sourceLineNo">135</span><a name="line.135"></a>
+<span class="sourceLineNo">136</span>  // TODO: mapred does not support 
limiting input range by startrow, endrow.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  // Thus the following tests must 
override parameterverification.<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  @Test<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  @Override<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  public void 
testWithMockedMapReduceMultiRegion() throws Exception {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    testWithMockedMapReduce(<a 
name="line.142"></a>
+<span class="sourceLineNo">143</span>        UTIL, 
"testWithMockedMapReduceMultiRegion", 10, 1, 10, true);<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        // It does not matter whether 
true or false is given to setLocalityEnabledTo,<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        // because it is not read in 
testWithMockedMapReduce().<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  }<a name="line.146"></a>
+<span class="sourceLineNo">147</span><a name="line.147"></a>
+<span class="sourceLineNo">148</span>  @Test<a name="line.148"></a>
+<span class="sourceLineNo">149</span>  @Override<a name="line.149"></a>
+<span class="sourceLineNo">150</span>  public void 
testWithMapReduceMultiRegion() throws Exception {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    testWithMapReduce(UTIL, 
"testWithMapReduceMultiRegion", 10, 1, 10, false);<a name="line.151"></a>
+<span class="sourceLineNo">152</span>  }<a name="line.152"></a>
+<span class="sourceLineNo">153</span><a name="line.153"></a>
+<span class="sourceLineNo">154</span>  @Test<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  @Override<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  // run the MR job while HBase is 
offline<a name="line.156"></a>
+<span class="sourceLineNo">157</span>  public void 
testWithMapReduceAndOfflineHBaseMultiRegion() throws Exception {<a 
name="line.157"></a>
+<span class="sourceLineNo">158</span>    testWithMapReduce(UTIL, 
"testWithMapReduceAndOfflineHBaseMultiRegion", 10, 1, 10, true);<a 
name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  @Override<a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public void 
testRestoreSnapshotDoesNotCreateBackRefLinksInit(TableName tableName,<a 
name="line.162"></a>
+<span class="sourceLineNo">163</span>      String snapshotName, Path 
tmpTableDir) throws Exception {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    JobConf job = new 
JobConf(UTIL.getConfiguration());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName,<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      COLUMNS, 
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.166"></a>
+<span class="sourceLineNo">167</span>      NullWritable.class, job, false, 
tmpTableDir);<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  }<a name="line.168"></a>
+<span class="sourceLineNo">169</span><a name="line.169"></a>
+<span class="sourceLineNo">170</span>  @Override<a name="line.170"></a>
+<span class="sourceLineNo">171</span>  protected void 
testWithMockedMapReduce(HBaseTestingUtility util, String snapshotName,<a 
name="line.171"></a>
+<span class="sourceLineNo">172</span>      int numRegions, int 
numSplitsPerRegion, int expectedNumSplits, boolean setLocalityEnabledTo)<a 
name="line.172"></a>
+<span class="sourceLineNo">173</span>      throws Exception {<a 
name="line.173"></a>
+<span class="sourceLineNo">174</span>    setupCluster();<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    final TableName tableName = 
TableName.valueOf(name.getMethodName());<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    try {<a name="line.176"></a>
+<span class="sourceLineNo">177</span>      createTableAndSnapshot(<a 
name="line.177"></a>
+<span class="sourceLineNo">178</span>        util, tableName, snapshotName, 
getStartRow(), getEndRow(), numRegions);<a name="line.178"></a>
+<span class="sourceLineNo">179</span><a name="line.179"></a>
+<span class="sourceLineNo">180</span>      JobConf job = new 
JobConf(util.getConfiguration());<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      // setLocalityEnabledTo is ignored 
no matter what is specified, so as to test the case that<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      // 
SNAPSHOT_INPUTFORMAT_LOCALITY_ENABLED_KEY is not explicitly specified<a 
name="line.182"></a>
+<span class="sourceLineNo">183</span>      // and the default value is 
taken.<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      Path tmpTableDir = 
util.getDataTestDirOnTestFS(snapshotName);<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>      if (numSplitsPerRegion &gt; 1) {<a 
name="line.186"></a>
+<span class="sourceLineNo">187</span>        
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName,<a name="line.187"></a>
+<span class="sourceLineNo">188</span>                COLUMNS, 
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.188"></a>
+<span class="sourceLineNo">189</span>                NullWritable.class, job, 
false, tmpTableDir, new RegionSplitter.UniformSplit(),<a name="line.189"></a>
+<span class="sourceLineNo">190</span>                numSplitsPerRegion);<a 
name="line.190"></a>
+<span class="sourceLineNo">191</span>      } else {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName,<a name="line.192"></a>
+<span class="sourceLineNo">193</span>                COLUMNS, 
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.193"></a>
+<span class="sourceLineNo">194</span>                NullWritable.class, job, 
false, tmpTableDir);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      }<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>      // mapred doesn't support start and 
end keys? o.O<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      verifyWithMockedMapReduce(job, 
numRegions, expectedNumSplits, getStartRow(), getEndRow());<a 
name="line.198"></a>
+<span class="sourceLineNo">199</span><a name="line.199"></a>
+<span class="sourceLineNo">200</span>    } finally {<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      
util.getAdmin().deleteSnapshot(snapshotName);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>      util.deleteTable(tableName);<a 
name="line.202"></a>
+<span class="sourceLineNo">203</span>      tearDownCluster();<a 
name="line.203"></a>
+<span class="sourceLineNo">204</span>    }<a name="line.204"></a>
+<span class="sourceLineNo">205</span>  }<a name="line.205"></a>
+<span class="sourceLineNo">206</span><a name="line.206"></a>
+<span class="sourceLineNo">207</span>  private void 
verifyWithMockedMapReduce(JobConf job, int numRegions, int expectedNumSplits,<a 
name="line.207"></a>
+<span class="sourceLineNo">208</span>      byte[] startRow, byte[] stopRow) 
throws IOException, InterruptedException {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    TableSnapshotInputFormat tsif = new 
TableSnapshotInputFormat();<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    InputSplit[] splits = 
tsif.getSplits(job, 0);<a name="line.210"></a>
+<span class="sourceLineNo">211</span><a name="line.211"></a>
+<span class="sourceLineNo">212</span>    
Assert.assertEquals(expectedNumSplits, splits.length);<a name="line.212"></a>
 <span class="sourceLineNo">213</span><a name="line.213"></a>
-<span class="sourceLineNo">214</span>      // validate record reader<a 
name="line.214"></a>
-<span class="sourceLineNo">215</span>      OutputCollector collector = 
mock(OutputCollector.class);<a name="line.215"></a>
-<span class="sourceLineNo">216</span>      Reporter reporter = 
mock(Reporter.class);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      
RecordReader&lt;ImmutableBytesWritable, Result&gt; rr = 
tsif.getRecordReader(split, job, reporter);<a name="line.217"></a>
-<span class="sourceLineNo">218</span><a name="line.218"></a>
-<span class="sourceLineNo">219</span>      // validate we can read all the 
data back<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      ImmutableBytesWritable key = 
rr.createKey();<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      Result value = rr.createValue();<a 
name="line.221"></a>
-<span class="sourceLineNo">222</span>      while (rr.next(key, value)) {<a 
name="line.222"></a>
-<span class="sourceLineNo">223</span>        verifyRowFromMap(key, value);<a 
name="line.223"></a>
-<span class="sourceLineNo">224</span>        
rowTracker.addRow(key.copyBytes());<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      }<a name="line.225"></a>
-<span class="sourceLineNo">226</span><a name="line.226"></a>
-<span class="sourceLineNo">227</span>      rr.close();<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    }<a name="line.228"></a>
-<span class="sourceLineNo">229</span><a name="line.229"></a>
-<span class="sourceLineNo">230</span>    // validate all rows are seen<a 
name="line.230"></a>
-<span class="sourceLineNo">231</span>    rowTracker.validate();<a 
name="line.231"></a>
-<span class="sourceLineNo">232</span>  }<a name="line.232"></a>
-<span class="sourceLineNo">233</span><a name="line.233"></a>
-<span class="sourceLineNo">234</span>  @Override<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  protected void 
testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,<a 
name="line.235"></a>
-<span class="sourceLineNo">236</span>      String snapshotName, Path tableDir, 
int numRegions, int numSplitsPerRegion, int expectedNumSplits,<a 
name="line.236"></a>
-<span class="sourceLineNo">237</span>      boolean shutdownCluster) throws 
Exception {<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    doTestWithMapReduce(util, tableName, 
snapshotName, getStartRow(), getEndRow(), tableDir,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      numRegions, numSplitsPerRegion, 
expectedNumSplits, shutdownCluster);<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
+<span class="sourceLineNo">214</span>    HBaseTestingUtility.SeenRowTracker 
rowTracker =<a name="line.214"></a>
+<span class="sourceLineNo">215</span>      new 
HBaseTestingUtility.SeenRowTracker(startRow, stopRow);<a name="line.215"></a>
+<span class="sourceLineNo">216</span><a name="line.216"></a>
+<span class="sourceLineNo">217</span>    // 
SNAPSHOT_INPUTFORMAT_LOCALITY_ENABLED_KEY is not explicitly specified,<a 
name="line.217"></a>
+<span class="sourceLineNo">218</span>    // so the default value is taken.<a 
name="line.218"></a>
+<span class="sourceLineNo">219</span>    boolean localityEnabled = 
SNAPSHOT_INPUTFORMAT_LOCALITY_ENABLED_DEFAULT;<a name="line.219"></a>
+<span class="sourceLineNo">220</span><a name="line.220"></a>
+<span class="sourceLineNo">221</span>    for (int i = 0; i &lt; splits.length; 
i++) {<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      // validate input split<a 
name="line.222"></a>
+<span class="sourceLineNo">223</span>      InputSplit split = splits[i];<a 
name="line.223"></a>
+<span class="sourceLineNo">224</span>      Assert.assertTrue(split instanceof 
TableSnapshotInputFormat.TableSnapshotRegionSplit);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>      if (localityEnabled) {<a 
name="line.225"></a>
+<span class="sourceLineNo">226</span>        // When localityEnabled is true, 
meant to verify split.getLocations()<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        // by the following statement:<a 
name="line.227"></a>
+<span class="sourceLineNo">228</span>        //   
Assert.assertTrue(split.getLocations() != null &amp;&amp; 
split.getLocations().length != 0);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>        // However, getLocations() of 
some splits could return an empty array (length is 0),<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        // so drop the verification on 
length.<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        // TODO: investigate how to 
verify split.getLocations() when localityEnabled is true<a name="line.231"></a>
+<span class="sourceLineNo">232</span>        
Assert.assertTrue(split.getLocations() != null);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      } else {<a name="line.233"></a>
+<span class="sourceLineNo">234</span>        
Assert.assertTrue(split.getLocations() != null &amp;&amp; 
split.getLocations().length == 0);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>      }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>      // validate record reader<a 
name="line.237"></a>
+<span class="sourceLineNo">238</span>      OutputCollector collector = 
mock(OutputCollector.class);<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      Reporter reporter = 
mock(Reporter.class);<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      
RecordReader&lt;ImmutableBytesWritable, Result&gt; rr = 
tsif.getRecordReader(split, job, reporter);<a name="line.240"></a>
 <span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  // this is also called by the 
IntegrationTestTableSnapshotInputFormat<a name="line.242"></a>
-<span class="sourceLineNo">243</span>  public static void 
doTestWithMapReduce(HBaseTestingUtility util, TableName tableName,<a 
name="line.243"></a>
-<span class="sourceLineNo">244</span>      String snapshotName, byte[] 
startRow, byte[] endRow, Path tableDir, int numRegions,<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      int numSplitsPerRegion,int 
expectedNumSplits, boolean shutdownCluster) throws Exception {<a 
name="line.245"></a>
-<span class="sourceLineNo">246</span><a name="line.246"></a>
-<span class="sourceLineNo">247</span>    //create the table and snapshot<a 
name="line.247"></a>
-<span class="sourceLineNo">248</span>    createTableAndSnapshot(util, 
tableName, snapshotName, startRow, endRow, numRegions);<a name="line.248"></a>
+<span class="sourceLineNo">242</span>      // validate we can read all the 
data back<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      ImmutableBytesWritable key = 
rr.createKey();<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      Result value = rr.createValue();<a 
name="line.244"></a>
+<span class="sourceLineNo">245</span>      while (rr.next(key, value)) {<a 
name="line.245"></a>
+<span class="sourceLineNo">246</span>        verifyRowFromMap(key, value);<a 
name="line.246"></a>
+<span class="sourceLineNo">247</span>        
rowTracker.addRow(key.copyBytes());<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      }<a name="line.248"></a>
 <span class="sourceLineNo">249</span><a name="line.249"></a>
-<span class="sourceLineNo">250</span>    if (shutdownCluster) {<a 
name="line.250"></a>
-<span class="sourceLineNo">251</span>      util.shutdownMiniHBaseCluster();<a 
name="line.251"></a>
-<span class="sourceLineNo">252</span>    }<a name="line.252"></a>
-<span class="sourceLineNo">253</span><a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      // create the job<a 
name="line.255"></a>
-<span class="sourceLineNo">256</span>      JobConf jobConf = new 
JobConf(util.getConfiguration());<a name="line.256"></a>
-<span class="sourceLineNo">257</span><a name="line.257"></a>
-<span class="sourceLineNo">258</span>      
jobConf.setJarByClass(util.getClass());<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJarsForClasses(jobConf,<a
 name="line.259"></a>
-<span class="sourceLineNo">260</span>        
TestTableSnapshotInputFormat.class);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>      if(numSplitsPerRegion &gt; 1) {<a 
name="line.262"></a>
-<span class="sourceLineNo">263</span>        
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName, COLUMNS,<a 
name="line.263"></a>
-<span class="sourceLineNo">264</span>                
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.264"></a>
-<span class="sourceLineNo">265</span>                NullWritable.class, 
jobConf, true, tableDir, new RegionSplitter.UniformSplit(),<a 
name="line.265"></a>
-<span class="sourceLineNo">266</span>                numSplitsPerRegion);<a 
name="line.266"></a>
-<span class="sourceLineNo">267</span>      } else {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>        
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName, COLUMNS,<a 
name="line.268"></a>
-<span class="sourceLineNo">269</span>                
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.269"></a>
-<span class="sourceLineNo">270</span>                NullWritable.class, 
jobConf, true, tableDir);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      }<a name="line.271"></a>
+<span class="sourceLineNo">250</span>      rr.close();<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>    // validate all rows are seen<a 
name="line.253"></a>
+<span class="sourceLineNo">254</span>    rowTracker.validate();<a 
name="line.254"></a>
+<span class="sourceLineNo">255</span>  }<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>  @Override<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  protected void 
testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,<a 
name="line.258"></a>
+<span class="sourceLineNo">259</span>      String snapshotName, Path tableDir, 
int numRegions, int numSplitsPerRegion, int expectedNumSplits,<a 
name="line.259"></a>
+<span class="sourceLineNo">260</span>      boolean shutdownCluster) throws 
Exception {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    doTestWithMapReduce(util, tableName, 
snapshotName, getStartRow(), getEndRow(), tableDir,<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      numRegions, numSplitsPerRegion, 
expectedNumSplits, shutdownCluster);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>  }<a name="line.263"></a>
+<span class="sourceLineNo">264</span><a name="line.264"></a>
+<span class="sourceLineNo">265</span>  // this is also called by the 
IntegrationTestTableSnapshotInputFormat<a name="line.265"></a>
+<span class="sourceLineNo">266</span>  public static void 
doTestWithMapReduce(HBaseTestingUtility util, TableName tableName,<a 
name="line.266"></a>
+<span class="sourceLineNo">267</span>      String snapshotName, byte[] 
startRow, byte[] endRow, Path tableDir, int numRegions,<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      int numSplitsPerRegion,int 
expectedNumSplits, boolean shutdownCluster) throws Exception {<a 
name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    //create the table and snapshot<a 
name="line.270"></a>
+<span class="sourceLineNo">271</span>    createTableAndSnapshot(util, 
tableName, snapshotName, startRow, endRow, numRegions);<a name="line.271"></a>
 <span class="sourceLineNo">272</span><a name="line.272"></a>
-<span class="sourceLineNo">273</span>      
jobConf.setReducerClass(TestTableSnapshotInputFormat.TestTableSnapshotReducer.class);<a
 name="line.273"></a>
-<span class="sourceLineNo">274</span>      jobConf.setNumReduceTasks(1);<a 
name="line.274"></a>
-<span class="sourceLineNo">275</span>      
jobConf.setOutputFormat(NullOutputFormat.class);<a name="line.275"></a>
+<span class="sourceLineNo">273</span>    if (shutdownCluster) {<a 
name="line.273"></a>
+<span class="sourceLineNo">274</span>      util.shutdownMiniHBaseCluster();<a 
name="line.274"></a>
+<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
 <span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>      RunningJob job = 
JobClient.runJob(jobConf);<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      
Assert.assertTrue(job.isSuccessful());<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    } finally {<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      if (!shutdownCluster) {<a 
name="line.280"></a>
-<span class="sourceLineNo">281</span>        
util.getAdmin().deleteSnapshot(snapshotName);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>        util.deleteTable(tableName);<a 
name="line.282"></a>
-<span class="sourceLineNo">283</span>      }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    }<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  }<a name="line.285"></a>
-<span class="sourceLineNo">286</span>}<a name="line.286"></a>
+<span class="sourceLineNo">277</span>    try {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      // create the job<a 
name="line.278"></a>
+<span class="sourceLineNo">279</span>      JobConf jobConf = new 
JobConf(util.getConfiguration());<a name="line.279"></a>
+<span class="sourceLineNo">280</span><a name="line.280"></a>
+<span class="sourceLineNo">281</span>      
jobConf.setJarByClass(util.getClass());<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJarsForClasses(jobConf,<a
 name="line.282"></a>
+<span class="sourceLineNo">283</span>        
TestTableSnapshotInputFormat.class);<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>      if(numSplitsPerRegion &gt; 1) {<a 
name="line.285"></a>
+<span class="sourceLineNo">286</span>        
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName, COLUMNS,<a 
name="line.286"></a>
+<span class="sourceLineNo">287</span>                
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.287"></a>
+<span class="sourceLineNo">288</span>                NullWritable.class, 
jobConf, true, tableDir, new RegionSplitter.UniformSplit(),<a 
name="line.288"></a>
+<span class="sourceLineNo">289</span>                numSplitsPerRegion);<a 
name="line.289"></a>
+<span class="sourceLineNo">290</span>      } else {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>        
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName, COLUMNS,<a 
name="line.291"></a>
+<span class="sourceLineNo">292</span>                
TestTableSnapshotMapper.class, ImmutableBytesWritable.class,<a 
name="line.292"></a>
+<span class="sourceLineNo">293</span>                NullWritable.class, 
jobConf, true, tableDir);<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      }<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
+<span class="sourceLineNo">296</span>      
jobConf.setReducerClass(TestTableSnapshotInputFormat.TestTableSnapshotReducer.class);<a
 name="line.296"></a>
+<span class="sourceLineNo">297</span>      jobConf.setNumReduceTasks(1);<a 
name="line.297"></a>
+<span class="sourceLineNo">298</span>      
jobConf.setOutputFormat(NullOutputFormat.class);<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>      RunningJob job = 
JobClient.runJob(jobConf);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      
Assert.assertTrue(job.isSuccessful());<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    } finally {<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (!shutdownCluster) {<a 
name="line.303"></a>
+<span class="sourceLineNo">304</span>        
util.getAdmin().deleteSnapshot(snapshotName);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>        util.deleteTable(tableName);<a 
name="line.305"></a>
+<span class="sourceLineNo">306</span>      }<a name="line.306"></a>
+<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
+<span class="sourceLineNo">309</span>}<a name="line.309"></a>
 
 
 

Reply via email to