Author: rvs
Date: Thu Dec 29 23:45:52 2011
New Revision: 1225693
URL: http://svn.apache.org/viewvc?rev=1225693&view=rev
Log:
BIGTOP-321. Make TestLoadAndVerify parameterizable and runnable from command
line (Jonathan Hsieh via rvs)
Removed:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/smoke/IncrementalPELoad.java
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/smoke/TestHBaseCompression.groovy
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/smoke/TestHBaseSmoke.java
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/smoke/TestHFileOutputFormat.java
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/smoke/TestLoadIncrementalHFiles.java
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/system/TestLoadAndVerify.java
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/util/HBaseTestUtil.java
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/pom.xml
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/pom.xml
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/pom.xml?rev=1225693&r1=1225692&r2=1225693&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/pom.xml
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/pom.xml
Thu Dec 29 23:45:52 2011
@@ -31,13 +31,15 @@
<name>hbasesmoke</name>
<dependencies>
- <dependency>
+ <dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapred</artifactId>
+ <artifactId>hadoop-common</artifactId>
+ <version>0.23.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>0.23.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java?rev=1225693&r1=1225692&r2=1225693&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hbase/src/main/groovy/org/apache/bigtop/itest/hbase/system/TestLoadAndVerify.java
Thu Dec 29 23:45:52 2011
@@ -17,17 +17,20 @@
*/
package org.apache.bigtop.itest.hbase.system;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Random;
-
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.bigtop.itest.hbase.util.HBaseTestUtil;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
@@ -45,20 +48,18 @@ import org.apache.hadoop.io.BytesWritabl
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
import org.junit.Test;
import com.google.common.collect.Lists;
-import org.apache.bigtop.itest.hbase.util.HBaseTestUtil;
-
-public class TestLoadAndVerify {
+public class TestLoadAndVerify extends Configured implements Tool {
private static final String TEST_NAME = "TestLoadAndVerify";
private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
private static final byte[] TEST_QUALIFIER = Bytes.toBytes("q1");
@@ -245,12 +246,11 @@ public class TestLoadAndVerify {
}
}
- private void doLoad(HTableDescriptor htd) throws Exception {
+ private void doLoad(Configuration conf, HTableDescriptor htd) throws
Exception {
Path outputDir =
new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
"load-output");
- Configuration conf = HBaseConfiguration.create();
NMapInputFormat.setNumMapTasks(conf, NUM_TASKS);
conf.set(TABLE_NAME_KEY, htd.getNameAsString());
@@ -268,12 +268,11 @@ public class TestLoadAndVerify {
assertTrue(job.waitForCompletion(true));
}
- private void doVerify(HTableDescriptor htd) throws Exception {
+ private void doVerify(Configuration conf, HTableDescriptor htd) throws
Exception {
Path outputDir =
new Path(HBaseTestUtil.getMROutputDir(TEST_NAME),
"verify-output");
- Configuration conf = HBaseConfiguration.create();
Job job = new Job(conf);
job.setJarByClass(this.getClass());
job.setJobName(TEST_NAME + " Verification for " + htd.getNameAsString());
@@ -290,8 +289,7 @@ public class TestLoadAndVerify {
FileOutputFormat.setOutputPath(job, outputDir);
assertTrue(job.waitForCompletion(true));
- long numOutputRecords = job.getCounters().findCounter(
-
org.apache.hadoop.mapred.Task.Counter.REDUCE_OUTPUT_RECORDS).getValue();
+ long numOutputRecords =
job.getCounters().findCounter(TaskCounter.REDUCE_OUTPUT_RECORDS).getValue();
assertEquals(0, numOutputRecords);
}
@@ -303,12 +301,102 @@ public class TestLoadAndVerify {
int numPreCreate = 40;
admin.createTable(htd, Bytes.toBytes(0L), Bytes.toBytes(-1L),
numPreCreate);
- doLoad(htd);
- doVerify(htd);
+ Configuration conf = HBaseConfiguration.create();
+
+ doLoad(conf, htd);
+ doVerify(conf, htd);
// Only disable and drop if we succeeded to verify - otherwise it's useful
// to leave it around for post-mortem
- admin.disableTable(htd.getName());
+ deleteTable(admin, htd);
+ }
+
+ private void deleteTable(HBaseAdmin admin, HTableDescriptor htd)
+ throws IOException, InterruptedException {
+ // Use disableTestAsync because disable can take a long time to complete
+ System.out.print("Disabling table " + htd.getNameAsString() +" ");
+ admin.disableTableAsync(htd.getName());
+
+ long start = System.currentTimeMillis();
+ // NOTE tables can be both admin.isTableEnabled=false and
+ // isTableDisabled=false, when disabling must use isTableDisabled!
+ while (!admin.isTableDisabled(htd.getName())) {
+ System.out.print(".");
+ Thread.sleep(1000);
+ }
+ long delta = System.currentTimeMillis() - start;
+ System.out.println(" " + delta +" ms");
+ System.out.println("Deleting table " + htd.getNameAsString() +" ");
admin.deleteTable(htd.getName());
}
+
+ public void usage() {
+ System.err.println(this.getClass().getSimpleName() + " [-Doptions]
<load|verify|loadAndVerify>");
+ System.err.println(" Loads a table with row dependencies and verifies the
dependency chains");
+ System.err.println("Options");
+ System.err.println(" -Dloadmapper.table=<name> Table to
write/verify (default autogen)");
+ System.err.println(" -Dloadmapper.backrefs=<n> Number of
backreferences per row (default 50)");
+ System.err.println(" -Dloadmapper.num_to_write=<n> Number of rows per
mapper (default 100,000 per mapper)");
+ System.err.println(" -Dloadmapper.deleteAfter=<bool> Delete after a
successful verify (default true)");
+ System.err.println(" -Dloadmapper.numPresplits=<n> Number of presplit
regions to start with (default 40)");
+
+ }
+
+ public int run(String argv[]) throws Exception {
+ if (argv.length < 1 || argv.length > 1) {
+ usage();
+ return 1;
+ }
+
+ boolean doLoad = false;
+ boolean doVerify = false;
+ boolean doDelete = getConf().getBoolean("loadmapper.deleteAfter",true);
+ int numPresplits = getConf().getInt("loadmapper.numPresplits", 40);
+
+ if (argv[0].equals("load")) {
+ doLoad = true;
+ } else if (argv[0].equals("verify")) {
+ doVerify= true;
+ } else if (argv[0].equals("loadAndVerify")) {
+ doLoad=true;
+ doVerify= true;
+ } else {
+ System.err.println("Invalid argument " + argv[0]);
+ usage();
+ return 1;
+ }
+
+ // create HTableDescriptor for specified table
+ String table = getConf().get(TABLE_NAME_KEY, "");
+ HTableDescriptor htd ;
+ if ("".equals(table)) {
+ // Just like the unit test.
+ htd = HBaseTestUtil.createTestTableDescriptor(TEST_NAME, TEST_FAMILY);
+ } else {
+ // overridden by the user.
+ htd = new HTableDescriptor(table);
+ htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
+ }
+
+ TestLoadAndVerify verify = new TestLoadAndVerify();
+
+ HBaseAdmin admin = new HBaseAdmin(getConf());
+ if (doLoad) {
+ admin.createTable(htd, Bytes.toBytes(0L), Bytes.toBytes(-1L),
numPresplits);
+ verify.doLoad(getConf(), htd);
+ }
+ if (doVerify) {
+ verify.doVerify(getConf(), htd);
+ if (doDelete) {
+ deleteTable(admin, htd);
+ }
+ }
+ return 0;
+ }
+
+ public static void main(String argv[]) throws Exception {
+ Configuration conf = HBaseConfiguration.create();
+ int ret = ToolRunner.run(conf, new TestLoadAndVerify(), argv);
+ System.exit(ret);
+ }
}