http://git-wip-us.apache.org/repos/asf/hbase-site/blob/06efc31c/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.html ---------------------------------------------------------------------- diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.html index 9716afd..3253dec 100644 --- a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.html +++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.html @@ -31,319 +31,297 @@ <span class="sourceLineNo">023</span>import static org.junit.Assert.assertTrue;<a name="line.23"></a> <span class="sourceLineNo">024</span><a name="line.24"></a> <span class="sourceLineNo">025</span>import java.io.IOException;<a name="line.25"></a> -<span class="sourceLineNo">026</span>import java.util.*;<a name="line.26"></a> -<span class="sourceLineNo">027</span><a name="line.27"></a> -<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a> -<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.Path;<a name="line.29"></a> -<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.30"></a> -<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.TableName;<a name="line.31"></a> -<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.Result;<a name="line.32"></a> -<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.33"></a> -<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.client.Table;<a name="line.34"></a> -<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.35"></a> -<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.36"></a> -<span class="sourceLineNo">037</span>import org.apache.hadoop.io.NullWritable;<a name="line.37"></a> -<span class="sourceLineNo">038</span>import org.apache.hadoop.mapred.JobConf;<a name="line.38"></a> -<span class="sourceLineNo">039</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.39"></a> -<span class="sourceLineNo">040</span>import org.apache.hadoop.mapreduce.Job;<a name="line.40"></a> -<span class="sourceLineNo">041</span>import org.apache.hadoop.mapreduce.Reducer;<a name="line.41"></a> -<span class="sourceLineNo">042</span>import org.apache.hadoop.mapreduce.TaskCounter;<a name="line.42"></a> -<span class="sourceLineNo">043</span>import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;<a name="line.43"></a> -<span class="sourceLineNo">044</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.44"></a> -<span class="sourceLineNo">045</span>import org.junit.AfterClass;<a name="line.45"></a> -<span class="sourceLineNo">046</span>import org.junit.Assert;<a name="line.46"></a> -<span class="sourceLineNo">047</span>import org.junit.BeforeClass;<a name="line.47"></a> -<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a> -<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a> -<span class="sourceLineNo">050</span><a name="line.50"></a> -<span class="sourceLineNo">051</span><a name="line.51"></a> -<span class="sourceLineNo">052</span>/**<a name="line.52"></a> -<span class="sourceLineNo">053</span> * <p><a name="line.53"></a> -<span class="sourceLineNo">054</span> * Tests various scan start and stop row scenarios. This is set in a scan and<a name="line.54"></a> -<span class="sourceLineNo">055</span> * tested in a MapReduce job to see if that is handed over and done properly<a name="line.55"></a> -<span class="sourceLineNo">056</span> * too.<a name="line.56"></a> -<span class="sourceLineNo">057</span> * </p><a name="line.57"></a> -<span class="sourceLineNo">058</span> * <p><a name="line.58"></a> -<span class="sourceLineNo">059</span> * This test is broken into two parts in order to side-step the test timeout<a name="line.59"></a> -<span class="sourceLineNo">060</span> * period of 900, as documented in HBASE-8326.<a name="line.60"></a> -<span class="sourceLineNo">061</span> * </p><a name="line.61"></a> -<span class="sourceLineNo">062</span> */<a name="line.62"></a> -<span class="sourceLineNo">063</span>public abstract class TestTableInputFormatScanBase {<a name="line.63"></a> -<span class="sourceLineNo">064</span><a name="line.64"></a> -<span class="sourceLineNo">065</span> private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormatScanBase.class);<a name="line.65"></a> -<span class="sourceLineNo">066</span> static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.66"></a> -<span class="sourceLineNo">067</span><a name="line.67"></a> -<span class="sourceLineNo">068</span> static final TableName TABLE_NAME = TableName.valueOf("scantest");<a name="line.68"></a> -<span class="sourceLineNo">069</span> static final byte[][] INPUT_FAMILYS = {Bytes.toBytes("content1"), Bytes.toBytes("content2")};<a name="line.69"></a> -<span class="sourceLineNo">070</span> static final String KEY_STARTROW = "startRow";<a name="line.70"></a> -<span class="sourceLineNo">071</span> static final String KEY_LASTROW = "stpRow";<a name="line.71"></a> -<span class="sourceLineNo">072</span><a name="line.72"></a> -<span class="sourceLineNo">073</span> private static Table table = null;<a name="line.73"></a> -<span class="sourceLineNo">074</span><a name="line.74"></a> -<span class="sourceLineNo">075</span> @BeforeClass<a name="line.75"></a> -<span class="sourceLineNo">076</span> public static void setUpBeforeClass() throws Exception {<a name="line.76"></a> -<span class="sourceLineNo">077</span> // test intermittently fails under hadoop2 (2.0.2-alpha) if shortcircuit-read (scr) is on.<a name="line.77"></a> -<span class="sourceLineNo">078</span> // this turns it off for this test. TODO: Figure out why scr breaks recovery.<a name="line.78"></a> -<span class="sourceLineNo">079</span> System.setProperty("hbase.tests.use.shortcircuit.reads", "false");<a name="line.79"></a> -<span class="sourceLineNo">080</span><a name="line.80"></a> -<span class="sourceLineNo">081</span> // switch TIF to log at DEBUG level<a name="line.81"></a> -<span class="sourceLineNo">082</span> TEST_UTIL.enableDebug(TableInputFormat.class);<a name="line.82"></a> -<span class="sourceLineNo">083</span> TEST_UTIL.enableDebug(TableInputFormatBase.class);<a name="line.83"></a> -<span class="sourceLineNo">084</span> // start mini hbase cluster<a name="line.84"></a> -<span class="sourceLineNo">085</span> TEST_UTIL.startMiniCluster(3);<a name="line.85"></a> -<span class="sourceLineNo">086</span> // create and fill table<a name="line.86"></a> -<span class="sourceLineNo">087</span> table = TEST_UTIL.createMultiRegionTable(TABLE_NAME, INPUT_FAMILYS);<a name="line.87"></a> -<span class="sourceLineNo">088</span> TEST_UTIL.loadTable(table, INPUT_FAMILYS, null, false);<a name="line.88"></a> -<span class="sourceLineNo">089</span> }<a name="line.89"></a> -<span class="sourceLineNo">090</span><a name="line.90"></a> -<span class="sourceLineNo">091</span> @AfterClass<a name="line.91"></a> -<span class="sourceLineNo">092</span> public static void tearDownAfterClass() throws Exception {<a name="line.92"></a> -<span class="sourceLineNo">093</span> TEST_UTIL.shutdownMiniCluster();<a name="line.93"></a> -<span class="sourceLineNo">094</span> }<a name="line.94"></a> -<span class="sourceLineNo">095</span><a name="line.95"></a> -<span class="sourceLineNo">096</span> /**<a name="line.96"></a> -<span class="sourceLineNo">097</span> * Pass the key and value to reduce.<a name="line.97"></a> -<span class="sourceLineNo">098</span> */<a name="line.98"></a> -<span class="sourceLineNo">099</span> public static class ScanMapper<a name="line.99"></a> -<span class="sourceLineNo">100</span> extends TableMapper<ImmutableBytesWritable, ImmutableBytesWritable> {<a name="line.100"></a> -<span class="sourceLineNo">101</span><a name="line.101"></a> -<span class="sourceLineNo">102</span> /**<a name="line.102"></a> -<span class="sourceLineNo">103</span> * Pass the key and value to reduce.<a name="line.103"></a> -<span class="sourceLineNo">104</span> *<a name="line.104"></a> -<span class="sourceLineNo">105</span> * @param key The key, here "aaa", "aab" etc.<a name="line.105"></a> -<span class="sourceLineNo">106</span> * @param value The value is the same as the key.<a name="line.106"></a> -<span class="sourceLineNo">107</span> * @param context The task context.<a name="line.107"></a> -<span class="sourceLineNo">108</span> * @throws IOException When reading the rows fails.<a name="line.108"></a> -<span class="sourceLineNo">109</span> */<a name="line.109"></a> -<span class="sourceLineNo">110</span> @Override<a name="line.110"></a> -<span class="sourceLineNo">111</span> public void map(ImmutableBytesWritable key, Result value,<a name="line.111"></a> -<span class="sourceLineNo">112</span> Context context)<a name="line.112"></a> -<span class="sourceLineNo">113</span> throws IOException, InterruptedException {<a name="line.113"></a> -<span class="sourceLineNo">114</span> if (value.size() != 2) {<a name="line.114"></a> -<span class="sourceLineNo">115</span> throw new IOException("There should be two input columns");<a name="line.115"></a> -<span class="sourceLineNo">116</span> }<a name="line.116"></a> -<span class="sourceLineNo">117</span> Map<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>><a name="line.117"></a> -<span class="sourceLineNo">118</span> cfMap = value.getMap();<a name="line.118"></a> -<span class="sourceLineNo">119</span><a name="line.119"></a> -<span class="sourceLineNo">120</span> if (!cfMap.containsKey(INPUT_FAMILYS[0]) || !cfMap.containsKey(INPUT_FAMILYS[1])) {<a name="line.120"></a> -<span class="sourceLineNo">121</span> throw new IOException("Wrong input columns. Missing: '" +<a name="line.121"></a> -<span class="sourceLineNo">122</span> Bytes.toString(INPUT_FAMILYS[0]) + "' or '" + Bytes.toString(INPUT_FAMILYS[1]) + "'.");<a name="line.122"></a> -<span class="sourceLineNo">123</span> }<a name="line.123"></a> -<span class="sourceLineNo">124</span><a name="line.124"></a> -<span class="sourceLineNo">125</span> String val0 = Bytes.toStringBinary(value.getValue(INPUT_FAMILYS[0], null));<a name="line.125"></a> -<span class="sourceLineNo">126</span> String val1 = Bytes.toStringBinary(value.getValue(INPUT_FAMILYS[1], null));<a name="line.126"></a> -<span class="sourceLineNo">127</span> LOG.info("map: key -> " + Bytes.toStringBinary(key.get()) +<a name="line.127"></a> -<span class="sourceLineNo">128</span> ", value -> (" + val0 + ", " + val1 + ")");<a name="line.128"></a> -<span class="sourceLineNo">129</span> context.write(key, key);<a name="line.129"></a> -<span class="sourceLineNo">130</span> }<a name="line.130"></a> -<span class="sourceLineNo">131</span> }<a name="line.131"></a> -<span class="sourceLineNo">132</span><a name="line.132"></a> -<span class="sourceLineNo">133</span> /**<a name="line.133"></a> -<span class="sourceLineNo">134</span> * Checks the last and first key seen against the scanner boundaries.<a name="line.134"></a> -<span class="sourceLineNo">135</span> */<a name="line.135"></a> -<span class="sourceLineNo">136</span> public static class ScanReducer<a name="line.136"></a> -<span class="sourceLineNo">137</span> extends Reducer<ImmutableBytesWritable, ImmutableBytesWritable,<a name="line.137"></a> -<span class="sourceLineNo">138</span> NullWritable, NullWritable> {<a name="line.138"></a> -<span class="sourceLineNo">139</span><a name="line.139"></a> -<span class="sourceLineNo">140</span> private String first = null;<a name="line.140"></a> -<span class="sourceLineNo">141</span> private String last = null;<a name="line.141"></a> -<span class="sourceLineNo">142</span><a name="line.142"></a> -<span class="sourceLineNo">143</span> protected void reduce(ImmutableBytesWritable key,<a name="line.143"></a> -<span class="sourceLineNo">144</span> Iterable<ImmutableBytesWritable> values, Context context)<a name="line.144"></a> -<span class="sourceLineNo">145</span> throws IOException ,InterruptedException {<a name="line.145"></a> -<span class="sourceLineNo">146</span> int count = 0;<a name="line.146"></a> -<span class="sourceLineNo">147</span> for (ImmutableBytesWritable value : values) {<a name="line.147"></a> -<span class="sourceLineNo">148</span> String val = Bytes.toStringBinary(value.get());<a name="line.148"></a> -<span class="sourceLineNo">149</span> LOG.info("reduce: key[" + count + "] -> " +<a name="line.149"></a> -<span class="sourceLineNo">150</span> Bytes.toStringBinary(key.get()) + ", value -> " + val);<a name="line.150"></a> -<span class="sourceLineNo">151</span> if (first == null) first = val;<a name="line.151"></a> -<span class="sourceLineNo">152</span> last = val;<a name="line.152"></a> -<span class="sourceLineNo">153</span> count++;<a name="line.153"></a> -<span class="sourceLineNo">154</span> }<a name="line.154"></a> -<span class="sourceLineNo">155</span> }<a name="line.155"></a> -<span class="sourceLineNo">156</span><a name="line.156"></a> -<span class="sourceLineNo">157</span> protected void cleanup(Context context)<a name="line.157"></a> -<span class="sourceLineNo">158</span> throws IOException, InterruptedException {<a name="line.158"></a> -<span class="sourceLineNo">159</span> Configuration c = context.getConfiguration();<a name="line.159"></a> -<span class="sourceLineNo">160</span> String startRow = c.get(KEY_STARTROW);<a name="line.160"></a> -<span class="sourceLineNo">161</span> String lastRow = c.get(KEY_LASTROW);<a name="line.161"></a> -<span class="sourceLineNo">162</span> LOG.info("cleanup: first -> \"" + first + "\", start row -> \"" + startRow + "\"");<a name="line.162"></a> -<span class="sourceLineNo">163</span> LOG.info("cleanup: last -> \"" + last + "\", last row -> \"" + lastRow + "\"");<a name="line.163"></a> -<span class="sourceLineNo">164</span> if (startRow != null && startRow.length() > 0) {<a name="line.164"></a> -<span class="sourceLineNo">165</span> assertEquals(startRow, first);<a name="line.165"></a> -<span class="sourceLineNo">166</span> }<a name="line.166"></a> -<span class="sourceLineNo">167</span> if (lastRow != null && lastRow.length() > 0) {<a name="line.167"></a> -<span class="sourceLineNo">168</span> assertEquals(lastRow, last);<a name="line.168"></a> -<span class="sourceLineNo">169</span> }<a name="line.169"></a> -<span class="sourceLineNo">170</span> }<a name="line.170"></a> -<span class="sourceLineNo">171</span><a name="line.171"></a> -<span class="sourceLineNo">172</span> }<a name="line.172"></a> -<span class="sourceLineNo">173</span><a name="line.173"></a> -<span class="sourceLineNo">174</span> /**<a name="line.174"></a> -<span class="sourceLineNo">175</span> * Tests an MR Scan initialized from properties set in the Configuration.<a name="line.175"></a> -<span class="sourceLineNo">176</span> *<a name="line.176"></a> -<span class="sourceLineNo">177</span> * @throws IOException<a name="line.177"></a> -<span class="sourceLineNo">178</span> * @throws ClassNotFoundException<a name="line.178"></a> -<span class="sourceLineNo">179</span> * @throws InterruptedException<a name="line.179"></a> -<span class="sourceLineNo">180</span> */<a name="line.180"></a> -<span class="sourceLineNo">181</span> protected void testScanFromConfiguration(String start, String stop, String last)<a name="line.181"></a> -<span class="sourceLineNo">182</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.182"></a> -<span class="sourceLineNo">183</span> String jobName = "ScanFromConfig" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") +<a name="line.183"></a> -<span class="sourceLineNo">184</span> "To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");<a name="line.184"></a> -<span class="sourceLineNo">185</span> Configuration c = new Configuration(TEST_UTIL.getConfiguration());<a name="line.185"></a> -<span class="sourceLineNo">186</span> c.set(TableInputFormat.INPUT_TABLE, TABLE_NAME.getNameAsString());<a name="line.186"></a> -<span class="sourceLineNo">187</span> c.set(TableInputFormat.SCAN_COLUMN_FAMILY, Bytes.toString(INPUT_FAMILYS[0]) + ", "<a name="line.187"></a> -<span class="sourceLineNo">188</span> + Bytes.toString(INPUT_FAMILYS[1]));<a name="line.188"></a> -<span class="sourceLineNo">189</span> c.set(KEY_STARTROW, start != null ? start : "");<a name="line.189"></a> -<span class="sourceLineNo">190</span> c.set(KEY_LASTROW, last != null ? last : "");<a name="line.190"></a> +<span class="sourceLineNo">026</span>import java.util.ArrayList;<a name="line.26"></a> +<span class="sourceLineNo">027</span>import java.util.List;<a name="line.27"></a> +<span class="sourceLineNo">028</span>import java.util.Locale;<a name="line.28"></a> +<span class="sourceLineNo">029</span>import java.util.Map;<a name="line.29"></a> +<span class="sourceLineNo">030</span>import java.util.NavigableMap;<a name="line.30"></a> +<span class="sourceLineNo">031</span>import org.apache.hadoop.conf.Configuration;<a name="line.31"></a> +<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.Path;<a name="line.32"></a> +<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.33"></a> +<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.TableName;<a name="line.34"></a> +<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.client.Result;<a name="line.35"></a> +<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.36"></a> +<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.client.Table;<a name="line.37"></a> +<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.io.ImmutableBytesWritable;<a name="line.38"></a> +<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.39"></a> +<span class="sourceLineNo">040</span>import org.apache.hadoop.io.NullWritable;<a name="line.40"></a> +<span class="sourceLineNo">041</span>import org.apache.hadoop.mapred.JobConf;<a name="line.41"></a> +<span class="sourceLineNo">042</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.42"></a> +<span class="sourceLineNo">043</span>import org.apache.hadoop.mapreduce.Job;<a name="line.43"></a> +<span class="sourceLineNo">044</span>import org.apache.hadoop.mapreduce.Reducer;<a name="line.44"></a> +<span class="sourceLineNo">045</span>import org.apache.hadoop.mapreduce.TaskCounter;<a name="line.45"></a> +<span class="sourceLineNo">046</span>import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;<a name="line.46"></a> +<span class="sourceLineNo">047</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.47"></a> +<span class="sourceLineNo">048</span>import org.junit.AfterClass;<a name="line.48"></a> +<span class="sourceLineNo">049</span>import org.junit.Assert;<a name="line.49"></a> +<span class="sourceLineNo">050</span>import org.junit.BeforeClass;<a name="line.50"></a> +<span class="sourceLineNo">051</span>import org.slf4j.Logger;<a name="line.51"></a> +<span class="sourceLineNo">052</span>import org.slf4j.LoggerFactory;<a name="line.52"></a> +<span class="sourceLineNo">053</span><a name="line.53"></a> +<span class="sourceLineNo">054</span><a name="line.54"></a> +<span class="sourceLineNo">055</span>/**<a name="line.55"></a> +<span class="sourceLineNo">056</span> * Tests various scan start and stop row scenarios. This is set in a scan and tested in a MapReduce<a name="line.56"></a> +<span class="sourceLineNo">057</span> * job to see if that is handed over and done properly too.<a name="line.57"></a> +<span class="sourceLineNo">058</span> */<a name="line.58"></a> +<span class="sourceLineNo">059</span>public abstract class TestTableInputFormatScanBase {<a name="line.59"></a> +<span class="sourceLineNo">060</span><a name="line.60"></a> +<span class="sourceLineNo">061</span> private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormatScanBase.class);<a name="line.61"></a> +<span class="sourceLineNo">062</span> static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.62"></a> +<span class="sourceLineNo">063</span><a name="line.63"></a> +<span class="sourceLineNo">064</span> static final TableName TABLE_NAME = TableName.valueOf("scantest");<a name="line.64"></a> +<span class="sourceLineNo">065</span> static final byte[][] INPUT_FAMILYS = {Bytes.toBytes("content1"), Bytes.toBytes("content2")};<a name="line.65"></a> +<span class="sourceLineNo">066</span> static final String KEY_STARTROW = "startRow";<a name="line.66"></a> +<span class="sourceLineNo">067</span> static final String KEY_LASTROW = "stpRow";<a name="line.67"></a> +<span class="sourceLineNo">068</span><a name="line.68"></a> +<span class="sourceLineNo">069</span> private static Table table = null;<a name="line.69"></a> +<span class="sourceLineNo">070</span><a name="line.70"></a> +<span class="sourceLineNo">071</span> @BeforeClass<a name="line.71"></a> +<span class="sourceLineNo">072</span> public static void setUpBeforeClass() throws Exception {<a name="line.72"></a> +<span class="sourceLineNo">073</span> // test intermittently fails under hadoop2 (2.0.2-alpha) if shortcircuit-read (scr) is on.<a name="line.73"></a> +<span class="sourceLineNo">074</span> // this turns it off for this test. TODO: Figure out why scr breaks recovery.<a name="line.74"></a> +<span class="sourceLineNo">075</span> System.setProperty("hbase.tests.use.shortcircuit.reads", "false");<a name="line.75"></a> +<span class="sourceLineNo">076</span><a name="line.76"></a> +<span class="sourceLineNo">077</span> // switch TIF to log at DEBUG level<a name="line.77"></a> +<span class="sourceLineNo">078</span> TEST_UTIL.enableDebug(TableInputFormat.class);<a name="line.78"></a> +<span class="sourceLineNo">079</span> TEST_UTIL.enableDebug(TableInputFormatBase.class);<a name="line.79"></a> +<span class="sourceLineNo">080</span> // start mini hbase cluster<a name="line.80"></a> +<span class="sourceLineNo">081</span> TEST_UTIL.startMiniCluster(3);<a name="line.81"></a> +<span class="sourceLineNo">082</span> // create and fill table<a name="line.82"></a> +<span class="sourceLineNo">083</span> table = TEST_UTIL.createMultiRegionTable(TABLE_NAME, INPUT_FAMILYS);<a name="line.83"></a> +<span class="sourceLineNo">084</span> TEST_UTIL.loadTable(table, INPUT_FAMILYS, null, false);<a name="line.84"></a> +<span class="sourceLineNo">085</span> }<a name="line.85"></a> +<span class="sourceLineNo">086</span><a name="line.86"></a> +<span class="sourceLineNo">087</span> @AfterClass<a name="line.87"></a> +<span class="sourceLineNo">088</span> public static void tearDownAfterClass() throws Exception {<a name="line.88"></a> +<span class="sourceLineNo">089</span> TEST_UTIL.shutdownMiniCluster();<a name="line.89"></a> +<span class="sourceLineNo">090</span> }<a name="line.90"></a> +<span class="sourceLineNo">091</span><a name="line.91"></a> +<span class="sourceLineNo">092</span> /**<a name="line.92"></a> +<span class="sourceLineNo">093</span> * Pass the key and value to reduce.<a name="line.93"></a> +<span class="sourceLineNo">094</span> */<a name="line.94"></a> +<span class="sourceLineNo">095</span> public static class ScanMapper<a name="line.95"></a> +<span class="sourceLineNo">096</span> extends TableMapper<ImmutableBytesWritable, ImmutableBytesWritable> {<a name="line.96"></a> +<span class="sourceLineNo">097</span><a name="line.97"></a> +<span class="sourceLineNo">098</span> /**<a name="line.98"></a> +<span class="sourceLineNo">099</span> * Pass the key and value to reduce.<a name="line.99"></a> +<span class="sourceLineNo">100</span> *<a name="line.100"></a> +<span class="sourceLineNo">101</span> * @param key The key, here "aaa", "aab" etc.<a name="line.101"></a> +<span class="sourceLineNo">102</span> * @param value The value is the same as the key.<a name="line.102"></a> +<span class="sourceLineNo">103</span> * @param context The task context.<a name="line.103"></a> +<span class="sourceLineNo">104</span> * @throws IOException When reading the rows fails.<a name="line.104"></a> +<span class="sourceLineNo">105</span> */<a name="line.105"></a> +<span class="sourceLineNo">106</span> @Override<a name="line.106"></a> +<span class="sourceLineNo">107</span> public void map(ImmutableBytesWritable key, Result value,<a name="line.107"></a> +<span class="sourceLineNo">108</span> Context context)<a name="line.108"></a> +<span class="sourceLineNo">109</span> throws IOException, InterruptedException {<a name="line.109"></a> +<span class="sourceLineNo">110</span> if (value.size() != 2) {<a name="line.110"></a> +<span class="sourceLineNo">111</span> throw new IOException("There should be two input columns");<a name="line.111"></a> +<span class="sourceLineNo">112</span> }<a name="line.112"></a> +<span class="sourceLineNo">113</span> Map<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>><a name="line.113"></a> +<span class="sourceLineNo">114</span> cfMap = value.getMap();<a name="line.114"></a> +<span class="sourceLineNo">115</span><a name="line.115"></a> +<span class="sourceLineNo">116</span> if (!cfMap.containsKey(INPUT_FAMILYS[0]) || !cfMap.containsKey(INPUT_FAMILYS[1])) {<a name="line.116"></a> +<span class="sourceLineNo">117</span> throw new IOException("Wrong input columns. Missing: '" +<a name="line.117"></a> +<span class="sourceLineNo">118</span> Bytes.toString(INPUT_FAMILYS[0]) + "' or '" + Bytes.toString(INPUT_FAMILYS[1]) + "'.");<a name="line.118"></a> +<span class="sourceLineNo">119</span> }<a name="line.119"></a> +<span class="sourceLineNo">120</span><a name="line.120"></a> +<span class="sourceLineNo">121</span> String val0 = Bytes.toStringBinary(value.getValue(INPUT_FAMILYS[0], null));<a name="line.121"></a> +<span class="sourceLineNo">122</span> String val1 = Bytes.toStringBinary(value.getValue(INPUT_FAMILYS[1], null));<a name="line.122"></a> +<span class="sourceLineNo">123</span> LOG.info("map: key -> " + Bytes.toStringBinary(key.get()) +<a name="line.123"></a> +<span class="sourceLineNo">124</span> ", value -> (" + val0 + ", " + val1 + ")");<a name="line.124"></a> +<span class="sourceLineNo">125</span> context.write(key, key);<a name="line.125"></a> +<span class="sourceLineNo">126</span> }<a name="line.126"></a> +<span class="sourceLineNo">127</span> }<a name="line.127"></a> +<span class="sourceLineNo">128</span><a name="line.128"></a> +<span class="sourceLineNo">129</span> /**<a name="line.129"></a> +<span class="sourceLineNo">130</span> * Checks the last and first key seen against the scanner boundaries.<a name="line.130"></a> +<span class="sourceLineNo">131</span> */<a name="line.131"></a> +<span class="sourceLineNo">132</span> public static class ScanReducer<a name="line.132"></a> +<span class="sourceLineNo">133</span> extends Reducer<ImmutableBytesWritable, ImmutableBytesWritable,<a name="line.133"></a> +<span class="sourceLineNo">134</span> NullWritable, NullWritable> {<a name="line.134"></a> +<span class="sourceLineNo">135</span><a name="line.135"></a> +<span class="sourceLineNo">136</span> private String first = null;<a name="line.136"></a> +<span class="sourceLineNo">137</span> private String last = null;<a name="line.137"></a> +<span class="sourceLineNo">138</span><a name="line.138"></a> +<span class="sourceLineNo">139</span> protected void reduce(ImmutableBytesWritable key,<a name="line.139"></a> +<span class="sourceLineNo">140</span> Iterable<ImmutableBytesWritable> values, Context context)<a name="line.140"></a> +<span class="sourceLineNo">141</span> throws IOException ,InterruptedException {<a name="line.141"></a> +<span class="sourceLineNo">142</span> int count = 0;<a name="line.142"></a> +<span class="sourceLineNo">143</span> for (ImmutableBytesWritable value : values) {<a name="line.143"></a> +<span class="sourceLineNo">144</span> String val = Bytes.toStringBinary(value.get());<a name="line.144"></a> +<span class="sourceLineNo">145</span> LOG.info("reduce: key[" + count + "] -> " +<a name="line.145"></a> +<span class="sourceLineNo">146</span> Bytes.toStringBinary(key.get()) + ", value -> " + val);<a name="line.146"></a> +<span class="sourceLineNo">147</span> if (first == null) first = val;<a name="line.147"></a> +<span class="sourceLineNo">148</span> last = val;<a name="line.148"></a> +<span class="sourceLineNo">149</span> count++;<a name="line.149"></a> +<span class="sourceLineNo">150</span> }<a name="line.150"></a> +<span class="sourceLineNo">151</span> }<a name="line.151"></a> +<span class="sourceLineNo">152</span><a name="line.152"></a> +<span class="sourceLineNo">153</span> protected void cleanup(Context context)<a name="line.153"></a> +<span class="sourceLineNo">154</span> throws IOException, InterruptedException {<a name="line.154"></a> +<span class="sourceLineNo">155</span> Configuration c = context.getConfiguration();<a name="line.155"></a> +<span class="sourceLineNo">156</span> String startRow = c.get(KEY_STARTROW);<a name="line.156"></a> +<span class="sourceLineNo">157</span> String lastRow = c.get(KEY_LASTROW);<a name="line.157"></a> +<span class="sourceLineNo">158</span> LOG.info("cleanup: first -> \"" + first + "\", start row -> \"" + startRow + "\"");<a name="line.158"></a> +<span class="sourceLineNo">159</span> LOG.info("cleanup: last -> \"" + last + "\", last row -> \"" + lastRow + "\"");<a name="line.159"></a> +<span class="sourceLineNo">160</span> if (startRow != null && startRow.length() > 0) {<a name="line.160"></a> +<span class="sourceLineNo">161</span> assertEquals(startRow, first);<a name="line.161"></a> +<span class="sourceLineNo">162</span> }<a name="line.162"></a> +<span class="sourceLineNo">163</span> if (lastRow != null && lastRow.length() > 0) {<a name="line.163"></a> +<span class="sourceLineNo">164</span> assertEquals(lastRow, last);<a name="line.164"></a> +<span class="sourceLineNo">165</span> }<a name="line.165"></a> +<span class="sourceLineNo">166</span> }<a name="line.166"></a> +<span class="sourceLineNo">167</span><a name="line.167"></a> +<span class="sourceLineNo">168</span> }<a name="line.168"></a> +<span class="sourceLineNo">169</span><a name="line.169"></a> +<span class="sourceLineNo">170</span> /**<a name="line.170"></a> +<span class="sourceLineNo">171</span> * Tests an MR Scan initialized from properties set in the Configuration.<a name="line.171"></a> +<span class="sourceLineNo">172</span> */<a name="line.172"></a> +<span class="sourceLineNo">173</span> protected void testScanFromConfiguration(String start, String stop, String last)<a name="line.173"></a> +<span class="sourceLineNo">174</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.174"></a> +<span class="sourceLineNo">175</span> String jobName = "ScanFromConfig" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") +<a name="line.175"></a> +<span class="sourceLineNo">176</span> "To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");<a name="line.176"></a> +<span class="sourceLineNo">177</span> Configuration c = new Configuration(TEST_UTIL.getConfiguration());<a name="line.177"></a> +<span class="sourceLineNo">178</span> c.set(TableInputFormat.INPUT_TABLE, TABLE_NAME.getNameAsString());<a name="line.178"></a> +<span class="sourceLineNo">179</span> c.set(TableInputFormat.SCAN_COLUMN_FAMILY,<a name="line.179"></a> +<span class="sourceLineNo">180</span> Bytes.toString(INPUT_FAMILYS[0]) + ", " + Bytes.toString(INPUT_FAMILYS[1]));<a name="line.180"></a> +<span class="sourceLineNo">181</span> c.set(KEY_STARTROW, start != null ? start : "");<a name="line.181"></a> +<span class="sourceLineNo">182</span> c.set(KEY_LASTROW, last != null ? last : "");<a name="line.182"></a> +<span class="sourceLineNo">183</span><a name="line.183"></a> +<span class="sourceLineNo">184</span> if (start != null) {<a name="line.184"></a> +<span class="sourceLineNo">185</span> c.set(TableInputFormat.SCAN_ROW_START, start);<a name="line.185"></a> +<span class="sourceLineNo">186</span> }<a name="line.186"></a> +<span class="sourceLineNo">187</span><a name="line.187"></a> +<span class="sourceLineNo">188</span> if (stop != null) {<a name="line.188"></a> +<span class="sourceLineNo">189</span> c.set(TableInputFormat.SCAN_ROW_STOP, stop);<a name="line.189"></a> +<span class="sourceLineNo">190</span> }<a name="line.190"></a> <span class="sourceLineNo">191</span><a name="line.191"></a> -<span class="sourceLineNo">192</span> if (start != null) {<a name="line.192"></a> -<span class="sourceLineNo">193</span> c.set(TableInputFormat.SCAN_ROW_START, start);<a name="line.193"></a> -<span class="sourceLineNo">194</span> }<a name="line.194"></a> -<span class="sourceLineNo">195</span><a name="line.195"></a> -<span class="sourceLineNo">196</span> if (stop != null) {<a name="line.196"></a> -<span class="sourceLineNo">197</span> c.set(TableInputFormat.SCAN_ROW_STOP, stop);<a name="line.197"></a> -<span class="sourceLineNo">198</span> }<a name="line.198"></a> -<span class="sourceLineNo">199</span><a name="line.199"></a> -<span class="sourceLineNo">200</span> Job job = new Job(c, jobName);<a name="line.200"></a> -<span class="sourceLineNo">201</span> job.setMapperClass(ScanMapper.class);<a name="line.201"></a> -<span class="sourceLineNo">202</span> job.setReducerClass(ScanReducer.class);<a name="line.202"></a> -<span class="sourceLineNo">203</span> job.setMapOutputKeyClass(ImmutableBytesWritable.class);<a name="line.203"></a> -<span class="sourceLineNo">204</span> job.setMapOutputValueClass(ImmutableBytesWritable.class);<a name="line.204"></a> -<span class="sourceLineNo">205</span> job.setInputFormatClass(TableInputFormat.class);<a name="line.205"></a> -<span class="sourceLineNo">206</span> job.setNumReduceTasks(1);<a name="line.206"></a> -<span class="sourceLineNo">207</span> FileOutputFormat.setOutputPath(job, new Path(job.getJobName()));<a name="line.207"></a> -<span class="sourceLineNo">208</span> TableMapReduceUtil.addDependencyJars(job);<a name="line.208"></a> -<span class="sourceLineNo">209</span> assertTrue(job.waitForCompletion(true));<a name="line.209"></a> -<span class="sourceLineNo">210</span> }<a name="line.210"></a> -<span class="sourceLineNo">211</span><a name="line.211"></a> -<span class="sourceLineNo">212</span> /**<a name="line.212"></a> -<span class="sourceLineNo">213</span> * Tests a MR scan using specific start and stop rows.<a name="line.213"></a> -<span class="sourceLineNo">214</span> *<a name="line.214"></a> -<span class="sourceLineNo">215</span> * @throws IOException<a name="line.215"></a> -<span class="sourceLineNo">216</span> * @throws ClassNotFoundException<a name="line.216"></a> -<span class="sourceLineNo">217</span> * @throws InterruptedException<a name="line.217"></a> -<span class="sourceLineNo">218</span> */<a name="line.218"></a> -<span class="sourceLineNo">219</span> protected void testScan(String start, String stop, String last)<a name="line.219"></a> -<span class="sourceLineNo">220</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.220"></a> -<span class="sourceLineNo">221</span> String jobName = "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") +<a name="line.221"></a> -<span class="sourceLineNo">222</span> "To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");<a name="line.222"></a> -<span class="sourceLineNo">223</span> LOG.info("Before map/reduce startup - job " + jobName);<a name="line.223"></a> -<span class="sourceLineNo">224</span> Configuration c = new Configuration(TEST_UTIL.getConfiguration());<a name="line.224"></a> -<span class="sourceLineNo">225</span> Scan scan = new Scan();<a name="line.225"></a> -<span class="sourceLineNo">226</span> scan.addFamily(INPUT_FAMILYS[0]);<a name="line.226"></a> -<span class="sourceLineNo">227</span> scan.addFamily(INPUT_FAMILYS[1]);<a name="line.227"></a> -<span class="sourceLineNo">228</span> if (start != null) {<a name="line.228"></a> -<span class="sourceLineNo">229</span> scan.setStartRow(Bytes.toBytes(start));<a name="line.229"></a> -<span class="sourceLineNo">230</span> }<a name="line.230"></a> -<span class="sourceLineNo">231</span> c.set(KEY_STARTROW, start != null ? start : "");<a name="line.231"></a> -<span class="sourceLineNo">232</span> if (stop != null) {<a name="line.232"></a> -<span class="sourceLineNo">233</span> scan.setStopRow(Bytes.toBytes(stop));<a name="line.233"></a> -<span class="sourceLineNo">234</span> }<a name="line.234"></a> -<span class="sourceLineNo">235</span> c.set(KEY_LASTROW, last != null ? last : "");<a name="line.235"></a> -<span class="sourceLineNo">236</span> LOG.info("scan before: " + scan);<a name="line.236"></a> -<span class="sourceLineNo">237</span> Job job = new Job(c, jobName);<a name="line.237"></a> -<span class="sourceLineNo">238</span> TableMapReduceUtil.initTableMapperJob(<a name="line.238"></a> -<span class="sourceLineNo">239</span> TABLE_NAME, scan, ScanMapper.class,<a name="line.239"></a> -<span class="sourceLineNo">240</span> ImmutableBytesWritable.class, ImmutableBytesWritable.class, job);<a name="line.240"></a> -<span class="sourceLineNo">241</span> job.setReducerClass(ScanReducer.class);<a name="line.241"></a> -<span class="sourceLineNo">242</span> job.setNumReduceTasks(1); // one to get final "first" and "last" key<a name="line.242"></a> -<span class="sourceLineNo">243</span> FileOutputFormat.setOutputPath(job, new Path(job.getJobName()));<a name="line.243"></a> -<span class="sourceLineNo">244</span> LOG.info("Started " + job.getJobName());<a name="line.244"></a> -<span class="sourceLineNo">245</span> assertTrue(job.waitForCompletion(true));<a name="line.245"></a> -<span class="sourceLineNo">246</span> LOG.info("After map/reduce completion - job " + jobName);<a name="line.246"></a> -<span class="sourceLineNo">247</span> }<a name="line.247"></a> -<span class="sourceLineNo">248</span><a name="line.248"></a> -<span class="sourceLineNo">249</span><a name="line.249"></a> -<span class="sourceLineNo">250</span> /**<a name="line.250"></a> -<span class="sourceLineNo">251</span> * Tests Number of inputSplits for MR job when specify number of mappers for TableInputFormatXXX<a name="line.251"></a> -<span class="sourceLineNo">252</span> * This test does not run MR job<a name="line.252"></a> -<span class="sourceLineNo">253</span> *<a name="line.253"></a> -<span class="sourceLineNo">254</span> * @throws IOException<a name="line.254"></a> -<span class="sourceLineNo">255</span> * @throws ClassNotFoundException<a name="line.255"></a> -<span class="sourceLineNo">256</span> * @throws InterruptedException<a name="line.256"></a> -<span class="sourceLineNo">257</span> */<a name="line.257"></a> -<span class="sourceLineNo">258</span> public void testNumOfSplits(int splitsPerRegion, int expectedNumOfSplits) throws IOException,<a name="line.258"></a> -<span class="sourceLineNo">259</span> InterruptedException,<a name="line.259"></a> -<span class="sourceLineNo">260</span> ClassNotFoundException {<a name="line.260"></a> -<span class="sourceLineNo">261</span> String jobName = "TestJobForNumOfSplits";<a name="line.261"></a> -<span class="sourceLineNo">262</span> LOG.info("Before map/reduce startup - job " + jobName);<a name="line.262"></a> -<span class="sourceLineNo">263</span> Configuration c = new Configuration(TEST_UTIL.getConfiguration());<a name="line.263"></a> -<span class="sourceLineNo">264</span> Scan scan = new Scan();<a name="line.264"></a> -<span class="sourceLineNo">265</span> scan.addFamily(INPUT_FAMILYS[0]);<a name="line.265"></a> -<span class="sourceLineNo">266</span> scan.addFamily(INPUT_FAMILYS[1]);<a name="line.266"></a> -<span class="sourceLineNo">267</span> c.setInt("hbase.mapreduce.tableinput.mappers.per.region", splitsPerRegion);<a name="line.267"></a> -<span class="sourceLineNo">268</span> c.set(KEY_STARTROW, "");<a name="line.268"></a> -<span class="sourceLineNo">269</span> c.set(KEY_LASTROW, "");<a name="line.269"></a> -<span class="sourceLineNo">270</span> Job job = new Job(c, jobName);<a name="line.270"></a> -<span class="sourceLineNo">271</span> TableMapReduceUtil.initTableMapperJob(TABLE_NAME.getNameAsString(), scan, ScanMapper.class,<a name="line.271"></a> -<span class="sourceLineNo">272</span> ImmutableBytesWritable.class, ImmutableBytesWritable.class, job);<a name="line.272"></a> -<span class="sourceLineNo">273</span> TableInputFormat tif = new TableInputFormat();<a name="line.273"></a> -<span class="sourceLineNo">274</span> tif.setConf(job.getConfiguration());<a name="line.274"></a> -<span class="sourceLineNo">275</span> Assert.assertEquals(TABLE_NAME, table.getName());<a name="line.275"></a> -<span class="sourceLineNo">276</span> List<InputSplit> splits = tif.getSplits(job);<a name="line.276"></a> -<span class="sourceLineNo">277</span> Assert.assertEquals(expectedNumOfSplits, splits.size());<a name="line.277"></a> -<span class="sourceLineNo">278</span> }<a name="line.278"></a> -<span class="sourceLineNo">279</span><a name="line.279"></a> -<span class="sourceLineNo">280</span> /**<a name="line.280"></a> -<span class="sourceLineNo">281</span> * Run MR job to check the number of mapper = expectedNumOfSplits<a name="line.281"></a> -<span class="sourceLineNo">282</span> * @throws IOException<a name="line.282"></a> -<span class="sourceLineNo">283</span> * @throws InterruptedException<a name="line.283"></a> -<span class="sourceLineNo">284</span> * @throws ClassNotFoundException<a name="line.284"></a> -<span class="sourceLineNo">285</span> */<a name="line.285"></a> -<span class="sourceLineNo">286</span> public void testNumOfSplitsMR(int splitsPerRegion, int expectedNumOfSplits) throws IOException,<a name="line.286"></a> -<span class="sourceLineNo">287</span> InterruptedException,<a name="line.287"></a> -<span class="sourceLineNo">288</span> ClassNotFoundException {<a name="line.288"></a> -<span class="sourceLineNo">289</span> String jobName = "TestJobForNumOfSplits-MR";<a name="line.289"></a> -<span class="sourceLineNo">290</span> LOG.info("Before map/reduce startup - job " + jobName);<a name="line.290"></a> -<span class="sourceLineNo">291</span> JobConf c = new JobConf(TEST_UTIL.getConfiguration());<a name="line.291"></a> -<span class="sourceLineNo">292</span> Scan scan = new Scan();<a name="line.292"></a> -<span class="sourceLineNo">293</span> scan.addFamily(INPUT_FAMILYS[0]);<a name="line.293"></a> -<span class="sourceLineNo">294</span> scan.addFamily(INPUT_FAMILYS[1]);<a name="line.294"></a> -<span class="sourceLineNo">295</span> c.setInt("hbase.mapreduce.tableinput.mappers.per.region", splitsPerRegion);<a name="line.295"></a> -<span class="sourceLineNo">296</span> c.set(KEY_STARTROW, "");<a name="line.296"></a> -<span class="sourceLineNo">297</span> c.set(KEY_LASTROW, "");<a name="line.297"></a> -<span class="sourceLineNo">298</span> Job job = Job.getInstance(c, jobName);<a name="line.298"></a> -<span class="sourceLineNo">299</span> TableMapReduceUtil.initTableMapperJob(TABLE_NAME.getNameAsString(), scan, ScanMapper.class,<a name="line.299"></a> -<span class="sourceLineNo">300</span> ImmutableBytesWritable.class, ImmutableBytesWritable.class, job);<a name="line.300"></a> -<span class="sourceLineNo">301</span> job.setReducerClass(ScanReducer.class);<a name="line.301"></a> -<span class="sourceLineNo">302</span> job.setNumReduceTasks(1);<a name="line.302"></a> -<span class="sourceLineNo">303</span> job.setOutputFormatClass(NullOutputFormat.class);<a name="line.303"></a> -<span class="sourceLineNo">304</span> assertTrue("job failed!", job.waitForCompletion(true));<a name="line.304"></a> -<span class="sourceLineNo">305</span> // for some reason, hbase does not expose JobCounter.TOTAL_LAUNCHED_MAPS,<a name="line.305"></a> -<span class="sourceLineNo">306</span> // we use TaskCounter.SHUFFLED_MAPS to get total launched maps<a name="line.306"></a> -<span class="sourceLineNo">307</span> assertEquals("Saw the wrong count of mappers per region", expectedNumOfSplits,<a name="line.307"></a> -<span class="sourceLineNo">308</span> job.getCounters().findCounter(TaskCounter.SHUFFLED_MAPS).getValue());<a name="line.308"></a> -<span class="sourceLineNo">309</span> }<a name="line.309"></a> -<span class="sourceLineNo">310</span><a name="line.310"></a> -<span class="sourceLineNo">311</span> /**<a name="line.311"></a> -<span class="sourceLineNo">312</span> * Run MR job to test autobalance for setting number of mappers for TIF<a name="line.312"></a> -<span class="sourceLineNo">313</span> * This does not run real MR job<a name="line.313"></a> -<span class="sourceLineNo">314</span> */<a name="line.314"></a> -<span class="sourceLineNo">315</span> public void testAutobalanceNumOfSplit() throws IOException {<a name="line.315"></a> -<span class="sourceLineNo">316</span> // set up splits for testing<a name="line.316"></a> -<span class="sourceLineNo">317</span> List<InputSplit> splits = new ArrayList<>(5);<a name="line.317"></a> -<span class="sourceLineNo">318</span> int[] regionLen = {10, 20, 20, 40, 60};<a name="line.318"></a> -<span class="sourceLineNo">319</span> for (int i = 0; i < 5; i++) {<a name="line.319"></a> -<span class="sourceLineNo">320</span> InputSplit split = new TableSplit(TABLE_NAME, new Scan(),<a name="line.320"></a> -<span class="sourceLineNo">321</span> Bytes.toBytes(i), Bytes.toBytes(i + 1), "", "", regionLen[i] * 1048576);<a name="line.321"></a> -<span class="sourceLineNo">322</span> splits.add(split);<a name="line.322"></a> -<span class="sourceLineNo">323</span> }<a name="line.323"></a> -<span class="sourceLineNo">324</span> TableInputFormat tif = new TableInputFormat();<a name="line.324"></a> -<span class="sourceLineNo">325</span> List<InputSplit> res = tif.calculateAutoBalancedSplits(splits, 1073741824);<a name="line.325"></a> -<span class="sourceLineNo">326</span><a name="line.326"></a> -<span class="sourceLineNo">327</span> assertEquals("Saw the wrong number of splits", 5, res.size());<a name="line.327"></a> -<span class="sourceLineNo">328</span> TableSplit ts1 = (TableSplit) res.get(0);<a name="line.328"></a> -<span class="sourceLineNo">329</span> assertEquals("The first split end key should be", 2, Bytes.toInt(ts1.getEndRow()));<a name="line.329"></a> -<span class="sourceLineNo">330</span> TableSplit ts2 = (TableSplit) res.get(1);<a name="line.330"></a> -<span class="sourceLineNo">331</span> assertEquals("The second split regionsize should be", 20 * 1048576, ts2.getLength());<a name="line.331"></a> -<span class="sourceLineNo">332</span> TableSplit ts3 = (TableSplit) res.get(2);<a name="line.332"></a> -<span class="sourceLineNo">333</span> assertEquals("The third split start key should be", 3, Bytes.toInt(ts3.getStartRow()));<a name="line.333"></a> -<span class="sourceLineNo">334</span> TableSplit ts4 = (TableSplit) res.get(4);<a name="line.334"></a> -<span class="sourceLineNo">335</span> assertNotEquals("The seventh split start key should not be", 4, Bytes.toInt(ts4.getStartRow()));<a name="line.335"></a> -<span class="sourceLineNo">336</span> }<a name="line.336"></a> -<span class="sourceLineNo">337</span>}<a name="line.337"></a> -<span class="sourceLineNo">338</span><a name="line.338"></a> +<span class="sourceLineNo">192</span> Job job = Job.getInstance(c, jobName);<a name="line.192"></a> +<span class="sourceLineNo">193</span> job.setMapperClass(ScanMapper.class);<a name="line.193"></a> +<span class="sourceLineNo">194</span> job.setReducerClass(ScanReducer.class);<a name="line.194"></a> +<span class="sourceLineNo">195</span> job.setMapOutputKeyClass(ImmutableBytesWritable.class);<a name="line.195"></a> +<span class="sourceLineNo">196</span> job.setMapOutputValueClass(ImmutableBytesWritable.class);<a name="line.196"></a> +<span class="sourceLineNo">197</span> job.setInputFormatClass(TableInputFormat.class);<a name="line.197"></a> +<span class="sourceLineNo">198</span> job.setNumReduceTasks(1);<a name="line.198"></a> +<span class="sourceLineNo">199</span> FileOutputFormat.setOutputPath(job, new Path(job.getJobName()));<a name="line.199"></a> +<span class="sourceLineNo">200</span> TableMapReduceUtil.addDependencyJars(job);<a name="line.200"></a> +<span class="sourceLineNo">201</span> assertTrue(job.waitForCompletion(true));<a name="line.201"></a> +<span class="sourceLineNo">202</span> }<a name="line.202"></a> +<span class="sourceLineNo">203</span><a name="line.203"></a> +<span class="sourceLineNo">204</span> /**<a name="line.204"></a> +<span class="sourceLineNo">205</span> * Tests a MR scan using specific start and stop rows.<a name="line.205"></a> +<span class="sourceLineNo">206</span> */<a name="line.206"></a> +<span class="sourceLineNo">207</span> protected void testScan(String start, String stop, String last)<a name="line.207"></a> +<span class="sourceLineNo">208</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.208"></a> +<span class="sourceLineNo">209</span> String jobName = "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + "To" +<a name="line.209"></a> +<span class="sourceLineNo">210</span> (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");<a name="line.210"></a> +<span class="sourceLineNo">211</span> LOG.info("Before map/reduce startup - job " + jobName);<a name="line.211"></a> +<span class="sourceLineNo">212</span> Configuration c = new Configuration(TEST_UTIL.getConfiguration());<a name="line.212"></a> +<span class="sourceLineNo">213</span> Scan scan = new Scan();<a name="line.213"></a> +<span class="sourceLineNo">214</span> scan.addFamily(INPUT_FAMILYS[0]);<a name="line.214"></a> +<span class="sourceLineNo">215</span> scan.addFamily(INPUT_FAMILYS[1]);<a name="line.215"></a> +<span class="sourceLineNo">216</span> if (start != null) {<a name="line.216"></a> +<span class="sourceLineNo">217</span> scan.withStartRow(Bytes.toBytes(start));<a name="line.217"></a> +<span class="sourceLineNo">218</span> }<a name="line.218"></a> +<span class="sourceLineNo">219</span> c.set(KEY_STARTROW, start != null ? start : "");<a name="line.219"></a> +<span class="sourceLineNo">220</span> if (stop != null) {<a name="line.220"></a> +<span class="sourceLineNo">221</span> scan.withStopRow(Bytes.toBytes(stop));<a name="line.221"></a> +<span class="sourceLineNo">222</span> }<a name="line.222"></a> +<span class="sourceLineNo">223</span> c.set(KEY_LASTROW, last != null ? last : "");<a name="line.223"></a> +<span class="sourceLineNo">224</span> LOG.info("scan before: " + scan);<a name="line.224"></a> +<span class="sourceLineNo">225</span> Job job = Job.getInstance(c, jobName);<a name="line.225"></a> +<span class="sourceLineNo">226</span> TableMapReduceUtil.initTableMapperJob(TABLE_NAME, scan, ScanMapper.class,<a name="line.226"></a> +<span class="sourceLineNo">227</span> ImmutableBytesWritable.class, ImmutableBytesWritable.class, job);<a name="line.227"></a> +<span class="sourceLineNo">228</span> job.setReducerClass(ScanReducer.class);<a name="line.228"></a> +<span class="sourceLineNo">229</span> job.setNumReduceTasks(1); // one to get final "first" and "last" key<a name="line.229"></a> +<span class="sourceLineNo">230</span> FileOutputFormat.setOutputPath(job, new Path(job.getJobName()));<a name="line.230"></a> +<span class="sourceLineNo">231</span> LOG.info("Started " + job.getJobName());<a name="line.231"></a> +<span class="sourceLineNo">232</span> assertTrue(job.waitForCompletion(true));<a name="line.232"></a> +<span class="sourceLineNo">233</span> LOG.info("After map/reduce completion - job " + jobName);<a name="line.233"></a> +<span class="sourceLineNo">234</span> }<a name="line.234"></a> +<span class="sourceLineNo">235</span><a name="line.235"></a> +<span class="sourceLineNo">236</span><a name="line.236"></a> +<span class="sourceLineNo">237</span> /**<a name="line.237"></a> +<span class="sourceLineNo">238</span> * Tests Number of inputSplits for MR job when specify number of mappers for TableInputFormatXXX<a name="line.238"></a> +<span class="sourceLineNo">239</span> * This test does not run MR job<a name="line.239"></a> +<span class="sourceLineNo">240</span> */<a name="line.240"></a> +<span class="sourceLineNo">241</span> protected void testNumOfSplits(int splitsPerRegion, int expectedNumOfSplits)<a name="line.241"></a> +<span class="sourceLineNo">242</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.242"></a> +<span class="sourceLineNo">243</span> String jobName = "TestJobForNumOfSplits";<a name="line.243"></a> +<span class="sourceLineNo">244</span> LOG.info("Before map/reduce startup - job " + jobName);<a name="line.244"></a> +<span class="sourceLineNo">245</span> Configuration c = new Configuration(TEST_UTIL.getConfiguration());<a name="line.245"></a> +<span class="sourceLineNo">246</span> Scan scan = new Scan();<a name="line.246"></a> +<span class="sourceLineNo">247</span> scan.addFamily(INPUT_FAMILYS[0]);<a name="line.247"></a> +<span class="sourceLineNo">248</span> scan.addFamily(INPUT_FAMILYS[1]);<a name="line.248"></a> +<span class="sourceLineNo">249</span> c.setInt("hbase.mapreduce.tableinput.mappers.per.region", splitsPerRegion);<a name="line.249"></a> +<span class="sourceLineNo">250</span> c.set(KEY_STARTROW, "");<a name="line.250"></a> +<span class="sourceLineNo">251</span> c.set(KEY_LASTROW, "");<a name="line.251"></a> +<span class="sourceLineNo">252</span> Job job = Job.getInstance(c, jobName);<a name="line.252"></a> +<span class="sourceLineNo">253</span> TableMapReduceUtil.initTableMapperJob(TABLE_NAME.getNameAsString(), scan, ScanMapper.class,<a name="line.253"></a> +<span class="sourceLineNo">254</span> ImmutableBytesWritable.class, ImmutableBytesWritable.class, job);<a name="line.254"></a> +<span class="sourceLineNo">255</span> TableInputFormat tif = new TableInputFormat();<a name="line.255"></a> +<span class="sourceLineNo">256</span> tif.setConf(job.getConfiguration());<a name="line.256"></a> +<span class="sourceLineNo">257</span> Assert.assertEquals(TABLE_NAME, table.getName());<a name="line.257"></a> +<span class="sourceLineNo">258</span> List<InputSplit> splits = tif.getSplits(job);<a name="line.258"></a> +<span class="sourceLineNo">259</span> Assert.assertEquals(expectedNumOfSplits, splits.size());<a name="line.259"></a> +<span class="sourceLineNo">260</span> }<a name="line.260"></a> +<span class="sourceLineNo">261</span><a name="line.261"></a> +<span class="sourceLineNo">262</span> /**<a name="line.262"></a> +<span class="sourceLineNo">263</span> * Run MR job to check the number of mapper = expectedNumOfSplits<a name="line.263"></a> +<span class="sourceLineNo">264</span> */<a name="line.264"></a> +<span class="sourceLineNo">265</span> protected void testNumOfSplitsMR(int splitsPerRegion, int expectedNumOfSplits)<a name="line.265"></a> +<span class="sourceLineNo">266</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.266"></a> +<span class="sourceLineNo">267</span> String jobName = "TestJobForNumOfSplits-MR";<a name="line.267"></a> +<span class="sourceLineNo">268</span> LOG.info("Before map/reduce startup - job " + jobName);<a name="line.268"></a> +<span class="sourceLineNo">269</span> JobConf c = new JobConf(TEST_UTIL.getConfiguration());<a name="line.269"></a> +<span class="sourceLineNo">270</span> Scan scan = new Scan();<a name="line.270"></a> +<span class="sourceLineNo">271</span> scan.addFamily(INPUT_FAMILYS[0]);<a name="line.271"></a> +<span class="sourceLineNo">272</span> scan.addFamily(INPUT_FAMILYS[1]);<a name="line.272"></a> +<span class="sourceLineNo">273</span> c.setInt("hbase.mapreduce.tableinput.mappers.per.region", splitsPerRegion);<a name="line.273"></a> +<span class="sourceLineNo">274</span> c.set(KEY_STARTROW, "");<a name="line.274"></a> +<span class="sourceLineNo">275</span> c.set(KEY_LASTROW, "");<a name="line.275"></a> +<span class="sourceLineNo">276</span> Job job = Job.getInstance(c, jobName);<a name="line.276"></a> +<span class="sourceLineNo">277</span> TableMapReduceUtil.initTableMapperJob(TABLE_NAME.getNameAsString(), scan, ScanMapper.class,<a name="line.277"></a> +<span class="sourceLineNo">278</span> ImmutableBytesWritable.class, ImmutableBytesWritable.class, job);<a name="line.278"></a> +<span class="sourceLineNo">279</span> job.setReducerClass(ScanReducer.class);<a name="line.279"></a> +<span class="sourceLineNo">280</span> job.setNumReduceTasks(1);<a name="line.280"></a> +<span class="sourceLineNo">281</span> job.setOutputFormatClass(NullOutputFormat.class);<a name="line.281"></a> +<span class="sourceLineNo">282</span> assertTrue("job failed!", job.waitForCompletion(true));<a name="line.282"></a> +<span class="sourceLineNo">283</span> // for some reason, hbase does not expose JobCounter.TOTAL_LAUNCHED_MAPS,<a name="line.283"></a> +<span class="sourceLineNo">284</span> // we use TaskCounter.SHUFFLED_MAPS to get total launched maps<a name="line.284"></a> +<span class="sourceLineNo">285</span> assertEquals("Saw the wrong count of mappers per region", expectedNumOfSplits,<a name="line.285"></a> +<span class="sourceLineNo">286</span> job.getCounters().findCounter(TaskCounter.SHUFFLED_MAPS).getValue());<a name="line.286"></a> +<span class="sourceLineNo">287</span> }<a name="line.287"></a> +<span class="sourceLineNo">288</span><a name="line.288"></a> +<span class="sourceLineNo">289</span> /**<a name="line.289"></a> +<span class="sourceLineNo">290</span> * Run MR job to test autobalance for setting number of mappers for TIF This does not run real MR<a name="line.290"></a> +<span class="sourceLineNo">291</span> * job<a name="line.291"></a> +<span class="sourceLineNo">292</span> */<a name="line.292"></a> +<span class="sourceLineNo">293</span> protected void testAutobalanceNumOfSplit() throws IOException {<a name="line.293"></a> +<span class="sourceLineNo">294</span> // set up splits for testing<a name="line.294"></a> +<span class="sourceLineNo">295</span> List<InputSplit> splits = new ArrayList<>(5);<a name="line.295"></a> +<span class="sourceLineNo">296</span> int[] regionLen = { 10, 20, 20, 40, 60 };<a name="line.296"></a> +<span class="sourceLineNo">297</span> for (int i = 0; i < 5; i++) {<a name="line.297"></a> +<span class="sourceLineNo">298</span> InputSplit split = new TableSplit(TABLE_NAME, new Scan(), Bytes.toBytes(i),<a name="line.298"></a> +<span class="sourceLineNo">299</span> Bytes.toBytes(i + 1), "", "", regionLen[i] * 1048576);<a name="line.299"></a> +<span class="sourceLineNo">300</span> splits.add(split);<a name="line.300"></a> +<span class="sourceLineNo">301</span> }<a name="line.301"></a> +<span class="sourceLineNo">302</span> TableInputFormat tif = new TableInputFormat();<a name="line.302"></a> +<span class="sourceLineNo">303</span> List<InputSplit> res = tif.calculateAutoBalancedSplits(splits, 1073741824);<a name="line.303"></a> +<span class="sourceLineNo">304</span><a name="line.304"></a> +<span class="sourceLineNo">305</span> assertEquals("Saw the wrong number of splits", 5, res.size());<a name="line.305"></a> +<span class="sourceLineNo">306</span> TableSplit ts1 = (TableSplit) res.get(0);<a name="line.306"></a> +<span class="sourceLineNo">307</span> assertEquals("The first split end key should be", 2, Bytes.toInt(ts1.getEndRow()));<a name="line.307"></a> +<span class="sourceLineNo">308</span> TableSplit ts2 = (TableSplit) res.get(1);<a name="line.308"></a> +<span class="sourceLineNo">309</span> assertEquals("The second split regionsize should be", 20 * 1048576, ts2.getLength());<a name="line.309"></a> +<span class="sourceLineNo">310</span> TableSplit ts3 = (TableSplit) res.get(2);<a name="line.310"></a> +<span class="sourceLineNo">311</span> assertEquals("The third split start key should be", 3, Bytes.toInt(ts3.getStartRow()));<a name="line.311"></a> +<span class="sourceLineNo">312</span> TableSplit ts4 = (TableSplit) res.get(4);<a name="line.312"></a> +<span class="sourceLineNo">313</span> assertNotEquals("The seventh split start key should not be", 4, Bytes.toInt(ts4.getStartRow()));<a name="line.313"></a> +<span class="sourceLineNo">314</span> }<a name="line.314"></a> +<span class="sourceLineNo">315</span>}<a name="line.315"></a> +<span class="sourceLineNo">316</span><a name="line.316"></a>
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/06efc31c/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToAPP.html ---------------------------------------------------------------------- diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToAPP.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToAPP.html new file mode 100644 index 0000000..f35fea9 --- /dev/null +++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToAPP.html @@ -0,0 +1,115 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<html lang="en"> +<head> +<title>Source code</title> +<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> +</head> +<body> +<div class="sourceContainer"> +<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a> +<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a> +<span class="sourceLineNo">003</span> * or more contributor license agreements. See the NOTICE file<a name="line.3"></a> +<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a> +<span class="sourceLineNo">005</span> * regarding copyright ownership. The ASF licenses this file<a name="line.5"></a> +<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a> +<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a> +<span class="sourceLineNo">008</span> * with the License. You may obtain a copy of the License at<a name="line.8"></a> +<span class="sourceLineNo">009</span> *<a name="line.9"></a> +<span class="sourceLineNo">010</span> * http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a> +<span class="sourceLineNo">011</span> *<a name="line.11"></a> +<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a> +<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a> +<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a> +<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a> +<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a> +<span class="sourceLineNo">017</span> */<a name="line.17"></a> +<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.mapreduce;<a name="line.18"></a> +<span class="sourceLineNo">019</span><a name="line.19"></a> +<span class="sourceLineNo">020</span>import java.io.IOException;<a name="line.20"></a> +<span class="sourceLineNo">021</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.21"></a> +<span class="sourceLineNo">022</span>import org.apache.hadoop.hbase.testclassification.MapReduceTests;<a name="line.22"></a> +<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.23"></a> +<span class="sourceLineNo">024</span>import org.junit.ClassRule;<a name="line.24"></a> +<span class="sourceLineNo">025</span>import org.junit.Test;<a name="line.25"></a> +<span class="sourceLineNo">026</span>import org.junit.experimental.categories.Category;<a name="line.26"></a> +<span class="sourceLineNo">027</span><a name="line.27"></a> +<span class="sourceLineNo">028</span>@Category({ MapReduceTests.class, MediumTests.class })<a name="line.28"></a> +<span class="sourceLineNo">029</span>public class TestTableInputFormatScanEmptyToAPP extends TestTableInputFormatScanBase {<a name="line.29"></a> +<span class="sourceLineNo">030</span><a name="line.30"></a> +<span class="sourceLineNo">031</span> @ClassRule<a name="line.31"></a> +<span class="sourceLineNo">032</span> public static final HBaseClassTestRule CLASS_RULE =<a name="line.32"></a> +<span class="sourceLineNo">033</span> HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToAPP.class);<a name="line.33"></a> +<span class="sourceLineNo">034</span><a name="line.34"></a> +<span class="sourceLineNo">035</span> /**<a name="line.35"></a> +<span class="sourceLineNo">036</span> * Tests a MR scan using specific start and stop rows.<a name="line.36"></a> +<span class="sourceLineNo">037</span> */<a name="line.37"></a> +<span class="sourceLineNo">038</span> @Test<a name="line.38"></a> +<span class="sourceLineNo">039</span> public void testScanEmptyToAPP()<a name="line.39"></a> +<span class="sourceLineNo">040</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.40"></a> +<span class="sourceLineNo">041</span> testScan(null, "app", "apo");<a name="line.41"></a> +<span class="sourceLineNo">042</span> }<a name="line.42"></a> +<span class="sourceLineNo">043</span>}<a name="line.43"></a> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +</pre> +</div> +</body> +</html> http://git-wip-us.apache.org/repos/asf/hbase-site/blob/06efc31c/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBA.html ---------------------------------------------------------------------- diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBA.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBA.html new file mode 100644 index 0000000..b265ffc --- /dev/null +++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBA.html @@ -0,0 +1,115 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<html lang="en"> +<head> +<title>Source code</title> +<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> +</head> +<body> +<div class="sourceContainer"> +<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a> +<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a> +<span class="sourceLineNo">003</span> * or more contributor license agreements. See the NOTICE file<a name="line.3"></a> +<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a> +<span class="sourceLineNo">005</span> * regarding copyright ownership. The ASF licenses this file<a name="line.5"></a> +<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a> +<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a> +<span class="sourceLineNo">008</span> * with the License. You may obtain a copy of the License at<a name="line.8"></a> +<span class="sourceLineNo">009</span> *<a name="line.9"></a> +<span class="sourceLineNo">010</span> * http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a> +<span class="sourceLineNo">011</span> *<a name="line.11"></a> +<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a> +<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a> +<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a> +<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a> +<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a> +<span class="sourceLineNo">017</span> */<a name="line.17"></a> +<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.mapreduce;<a name="line.18"></a> +<span class="sourceLineNo">019</span><a name="line.19"></a> +<span class="sourceLineNo">020</span>import java.io.IOException;<a name="line.20"></a> +<span class="sourceLineNo">021</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.21"></a> +<span class="sourceLineNo">022</span>import org.apache.hadoop.hbase.testclassification.MapReduceTests;<a name="line.22"></a> +<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.23"></a> +<span class="sourceLineNo">024</span>import org.junit.ClassRule;<a name="line.24"></a> +<span class="sourceLineNo">025</span>import org.junit.Test;<a name="line.25"></a> +<span class="sourceLineNo">026</span>import org.junit.experimental.categories.Category;<a name="line.26"></a> +<span class="sourceLineNo">027</span><a name="line.27"></a> +<span class="sourceLineNo">028</span>@Category({ MapReduceTests.class, MediumTests.class })<a name="line.28"></a> +<span class="sourceLineNo">029</span>public class TestTableInputFormatScanEmptyToBBA extends TestTableInputFormatScanBase {<a name="line.29"></a> +<span class="sourceLineNo">030</span><a name="line.30"></a> +<span class="sourceLineNo">031</span> @ClassRule<a name="line.31"></a> +<span class="sourceLineNo">032</span> public static final HBaseClassTestRule CLASS_RULE =<a name="line.32"></a> +<span class="sourceLineNo">033</span> HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToBBA.class);<a name="line.33"></a> +<span class="sourceLineNo">034</span><a name="line.34"></a> +<span class="sourceLineNo">035</span> /**<a name="line.35"></a> +<span class="sourceLineNo">036</span> * Tests a MR scan using specific start and stop rows.<a name="line.36"></a> +<span class="sourceLineNo">037</span> */<a name="line.37"></a> +<span class="sourceLineNo">038</span> @Test<a name="line.38"></a> +<span class="sourceLineNo">039</span> public void testScanEmptyToBBA()<a name="line.39"></a> +<span class="sourceLineNo">040</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.40"></a> +<span class="sourceLineNo">041</span> testScan(null, "bba", "baz");<a name="line.41"></a> +<span class="sourceLineNo">042</span> }<a name="line.42"></a> +<span class="sourceLineNo">043</span>}<a name="line.43"></a> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +</pre> +</div> +</body> +</html> http://git-wip-us.apache.org/repos/asf/hbase-site/blob/06efc31c/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBB.html ---------------------------------------------------------------------- diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBB.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBB.html new file mode 100644 index 0000000..2bb7f4c --- /dev/null +++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToBBB.html @@ -0,0 +1,115 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<html lang="en"> +<head> +<title>Source code</title> +<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> +</head> +<body> +<div class="sourceContainer"> +<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a> +<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a> +<span class="sourceLineNo">003</span> * or more contributor license agreements. See the NOTICE file<a name="line.3"></a> +<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a> +<span class="sourceLineNo">005</span> * regarding copyright ownership. The ASF licenses this file<a name="line.5"></a> +<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a> +<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a> +<span class="sourceLineNo">008</span> * with the License. You may obtain a copy of the License at<a name="line.8"></a> +<span class="sourceLineNo">009</span> *<a name="line.9"></a> +<span class="sourceLineNo">010</span> * http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a> +<span class="sourceLineNo">011</span> *<a name="line.11"></a> +<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a> +<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a> +<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a> +<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a> +<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a> +<span class="sourceLineNo">017</span> */<a name="line.17"></a> +<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.mapreduce;<a name="line.18"></a> +<span class="sourceLineNo">019</span><a name="line.19"></a> +<span class="sourceLineNo">020</span>import java.io.IOException;<a name="line.20"></a> +<span class="sourceLineNo">021</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.21"></a> +<span class="sourceLineNo">022</span>import org.apache.hadoop.hbase.testclassification.MapReduceTests;<a name="line.22"></a> +<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.23"></a> +<span class="sourceLineNo">024</span>import org.junit.ClassRule;<a name="line.24"></a> +<span class="sourceLineNo">025</span>import org.junit.Test;<a name="line.25"></a> +<span class="sourceLineNo">026</span>import org.junit.experimental.categories.Category;<a name="line.26"></a> +<span class="sourceLineNo">027</span><a name="line.27"></a> +<span class="sourceLineNo">028</span>@Category({ MapReduceTests.class, MediumTests.class })<a name="line.28"></a> +<span class="sourceLineNo">029</span>public class TestTableInputFormatScanEmptyToBBB extends TestTableInputFormatScanBase {<a name="line.29"></a> +<span class="sourceLineNo">030</span><a name="line.30"></a> +<span class="sourceLineNo">031</span> @ClassRule<a name="line.31"></a> +<span class="sourceLineNo">032</span> public static final HBaseClassTestRule CLASS_RULE =<a name="line.32"></a> +<span class="sourceLineNo">033</span> HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToBBB.class);<a name="line.33"></a> +<span class="sourceLineNo">034</span><a name="line.34"></a> +<span class="sourceLineNo">035</span> /**<a name="line.35"></a> +<span class="sourceLineNo">036</span> * Tests a MR scan using specific start and stop rows.<a name="line.36"></a> +<span class="sourceLineNo">037</span> */<a name="line.37"></a> +<span class="sourceLineNo">038</span> @Test<a name="line.38"></a> +<span class="sourceLineNo">039</span> public void testScanEmptyToBBB()<a name="line.39"></a> +<span class="sourceLineNo">040</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.40"></a> +<span class="sourceLineNo">041</span> testScan(null, "bbb", "bba");<a name="line.41"></a> +<span class="sourceLineNo">042</span> }<a name="line.42"></a> +<span class="sourceLineNo">043</span>}<a name="line.43"></a> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +</pre> +</div> +</body> +</html> http://git-wip-us.apache.org/repos/asf/hbase-site/blob/06efc31c/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToEmpty.html ---------------------------------------------------------------------- diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToEmpty.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToEmpty.html new file mode 100644 index 0000000..710d615 --- /dev/null +++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToEmpty.html @@ -0,0 +1,115 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<html lang="en"> +<head> +<title>Source code</title> +<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> +</head> +<body> +<div class="sourceContainer"> +<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a> +<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a> +<span class="sourceLineNo">003</span> * or more contributor license agreements. See the NOTICE file<a name="line.3"></a> +<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a> +<span class="sourceLineNo">005</span> * regarding copyright ownership. The ASF licenses this file<a name="line.5"></a> +<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a> +<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a> +<span class="sourceLineNo">008</span> * with the License. You may obtain a copy of the License at<a name="line.8"></a> +<span class="sourceLineNo">009</span> *<a name="line.9"></a> +<span class="sourceLineNo">010</span> * http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a> +<span class="sourceLineNo">011</span> *<a name="line.11"></a> +<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a> +<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a> +<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a> +<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a> +<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a> +<span class="sourceLineNo">017</span> */<a name="line.17"></a> +<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.mapreduce;<a name="line.18"></a> +<span class="sourceLineNo">019</span><a name="line.19"></a> +<span class="sourceLineNo">020</span>import java.io.IOException;<a name="line.20"></a> +<span class="sourceLineNo">021</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.21"></a> +<span class="sourceLineNo">022</span>import org.apache.hadoop.hbase.testclassification.MapReduceTests;<a name="line.22"></a> +<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.23"></a> +<span class="sourceLineNo">024</span>import org.junit.ClassRule;<a name="line.24"></a> +<span class="sourceLineNo">025</span>import org.junit.Test;<a name="line.25"></a> +<span class="sourceLineNo">026</span>import org.junit.experimental.categories.Category;<a name="line.26"></a> +<span class="sourceLineNo">027</span><a name="line.27"></a> +<span class="sourceLineNo">028</span>@Category({ MapReduceTests.class, MediumTests.class })<a name="line.28"></a> +<span class="sourceLineNo">029</span>public class TestTableInputFormatScanEmptyToEmpty extends TestTableInputFormatScanBase {<a name="line.29"></a> +<span class="sourceLineNo">030</span><a name="line.30"></a> +<span class="sourceLineNo">031</span> @ClassRule<a name="line.31"></a> +<span class="sourceLineNo">032</span> public static final HBaseClassTestRule CLASS_RULE =<a name="line.32"></a> +<span class="sourceLineNo">033</span> HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToEmpty.class);<a name="line.33"></a> +<span class="sourceLineNo">034</span><a name="line.34"></a> +<span class="sourceLineNo">035</span> /**<a name="line.35"></a> +<span class="sourceLineNo">036</span> * Tests a MR scan using specific start and stop rows.<a name="line.36"></a> +<span class="sourceLineNo">037</span> */<a name="line.37"></a> +<span class="sourceLineNo">038</span> @Test<a name="line.38"></a> +<span class="sourceLineNo">039</span> public void testScanEmptyToEmpty()<a name="line.39"></a> +<span class="sourceLineNo">040</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.40"></a> +<span class="sourceLineNo">041</span> testScan(null, null, null);<a name="line.41"></a> +<span class="sourceLineNo">042</span> }<a name="line.42"></a> +<span class="sourceLineNo">043</span>}<a name="line.43"></a> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +</pre> +</div> +</body> +</html> http://git-wip-us.apache.org/repos/asf/hbase-site/blob/06efc31c/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToOPP.html ---------------------------------------------------------------------- diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToOPP.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToOPP.html new file mode 100644 index 0000000..71ef05c --- /dev/null +++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanEmptyToOPP.html @@ -0,0 +1,115 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<html lang="en"> +<head> +<title>Source code</title> +<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> +</head> +<body> +<div class="sourceContainer"> +<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a> +<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a> +<span class="sourceLineNo">003</span> * or more contributor license agreements. See the NOTICE file<a name="line.3"></a> +<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a> +<span class="sourceLineNo">005</span> * regarding copyright ownership. The ASF licenses this file<a name="line.5"></a> +<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a> +<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a> +<span class="sourceLineNo">008</span> * with the License. You may obtain a copy of the License at<a name="line.8"></a> +<span class="sourceLineNo">009</span> *<a name="line.9"></a> +<span class="sourceLineNo">010</span> * http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a> +<span class="sourceLineNo">011</span> *<a name="line.11"></a> +<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a> +<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a> +<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a> +<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a> +<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a> +<span class="sourceLineNo">017</span> */<a name="line.17"></a> +<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.mapreduce;<a name="line.18"></a> +<span class="sourceLineNo">019</span><a name="line.19"></a> +<span class="sourceLineNo">020</span>import java.io.IOException;<a name="line.20"></a> +<span class="sourceLineNo">021</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.21"></a> +<span class="sourceLineNo">022</span>import org.apache.hadoop.hbase.testclassification.MapReduceTests;<a name="line.22"></a> +<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.23"></a> +<span class="sourceLineNo">024</span>import org.junit.ClassRule;<a name="line.24"></a> +<span class="sourceLineNo">025</span>import org.junit.Test;<a name="line.25"></a> +<span class="sourceLineNo">026</span>import org.junit.experimental.categories.Category;<a name="line.26"></a> +<span class="sourceLineNo">027</span><a name="line.27"></a> +<span class="sourceLineNo">028</span>@Category({ MapReduceTests.class, MediumTests.class })<a name="line.28"></a> +<span class="sourceLineNo">029</span>public class TestTableInputFormatScanEmptyToOPP extends TestTableInputFormatScanBase {<a name="line.29"></a> +<span class="sourceLineNo">030</span><a name="line.30"></a> +<span class="sourceLineNo">031</span> @ClassRule<a name="line.31"></a> +<span class="sourceLineNo">032</span> public static final HBaseClassTestRule CLASS_RULE =<a name="line.32"></a> +<span class="sourceLineNo">033</span> HBaseClassTestRule.forClass(TestTableInputFormatScanEmptyToOPP.class);<a name="line.33"></a> +<span class="sourceLineNo">034</span><a name="line.34"></a> +<span class="sourceLineNo">035</span> /**<a name="line.35"></a> +<span class="sourceLineNo">036</span> * Tests a MR scan using specific start and stop rows.<a name="line.36"></a> +<span class="sourceLineNo">037</span> */<a name="line.37"></a> +<span class="sourceLineNo">038</span> @Test<a name="line.38"></a> +<span class="sourceLineNo">039</span> public void testScanEmptyToOPP()<a name="line.39"></a> +<span class="sourceLineNo">040</span> throws IOException, InterruptedException, ClassNotFoundException {<a name="line.40"></a> +<span class="sourceLineNo">041</span> testScan(null, "opp", "opo");<a name="line.41"></a> +<span class="sourceLineNo">042</span> }<a name="line.42"></a> +<span class="sourceLineNo">043</span>}<a name="line.43"></a> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +</pre> +</div> +</body> +</html>
