Author: rvs
Date: Thu Feb 23 22:04:09 2012
New Revision: 1292977
URL: http://svn.apache.org/viewvc?rev=1292977&view=rev
Log:
BIGTOP-414. enable hadoop tests in hadoop-0.23 branch to build and to run (Wing
Yew Poon via rvs)
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/FSCmdExecutor.java
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/TestCLI.java
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/resources/clitest_data/testConf.xml
- copied, changed from r1292931,
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/resources/testConfCluster.xml
Removed:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/TestTestCLI.groovy
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/resources/testConfCluster.xml
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/pom.xml
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/pom.xml
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-execution/smokes/hadoop/pom.xml
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-execution/smokes/pom.xml
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/pom.xml
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/pom.xml?rev=1292977&r1=1292976&r2=1292977&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/pom.xml
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/pom.xml
Thu Feb 23 22:04:09 2012
@@ -28,20 +28,24 @@
<groupId>org.apache.bigtop.itest</groupId>
<artifactId>hadoop-smoke</artifactId>
<version>0.3.0-hadoop23-incubating-SNAPSHOT</version>
- <name>hadoopsmoke</name>
+ <name>hadoopsmoke</name>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common-test</artifactId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs-test</artifactId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop.version}</version>
+ <type>test-jar</type>
</dependency>
</dependencies>
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy?rev=1292977&r1=1292976&r2=1292977&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopexamples/TestHadoopExamples.groovy
Thu Feb 23 22:04:09 2012
@@ -40,67 +40,61 @@ class TestHadoopExamples {
private static final String HADOOP_CONF_DIR =
System.getenv('HADOOP_CONF_DIR');
private static String hadoopExamplesJar =
JarContent.getJarName(HADOOP_HOME, 'hadoop.*examples.*.jar');
- private static String hadoopMapredTestJar =
- JarContent.getJarName(HADOOP_HOME, 'hadoop.*mapred-test.*.jar');
static {
assertNotNull("HADOOP_HOME has to be set to run this test",
HADOOP_HOME);
assertNotNull("HADOOP_CONF_DIR has to be set to run this test",
HADOOP_CONF_DIR);
assertNotNull("Can't find hadoop-examples.jar file", hadoopExamplesJar);
- assertNotNull("Can't find hadoop-mapred-test.jar file",
hadoopMapredTestJar);
}
static final String HADOOP_EXAMPLES_JAR =
HADOOP_HOME + "/" + hadoopExamplesJar;
- static final String HADOOP_MR_TEST_JAR =
- HADOOP_HOME + "/" + hadoopMapredTestJar;
- private static final String hadoop = "$HADOOP_HOME/bin/hadoop";
static Shell sh = new Shell("/bin/bash -s");
private static final String EXAMPLES = "examples";
private static final String EXAMPLES_OUT = "examples-output";
private static Configuration conf;
- private static String HADOOP_OPTIONS;
+
+ private static String mr_version = System.getProperty("mr.version", "mr2");
+ static final String RANDOMTEXTWRITER_TOTALBYTES = (mr_version == "mr1") ?
+ "test.randomtextwrite.total_bytes" :
"mapreduce.randomtextwriter.totalbytes";
@BeforeClass
static void setUp() {
conf = new Configuration();
- conf.addResource('mapred-site.xml');
- HADOOP_OPTIONS =
- "-fs ${conf.get('fs.default.name')} -jt
${conf.get('mapred.job.tracker')}";
// Unpack resource
JarContent.unpackJarContainer(TestHadoopExamples.class, '.' , null)
- sh.exec("$hadoop fs $HADOOP_OPTIONS -test -e $EXAMPLES");
+ sh.exec("hadoop fs -test -e $EXAMPLES");
if (sh.getRet() == 0) {
- sh.exec("$hadoop fs $HADOOP_OPTIONS -rmr -skipTrash $EXAMPLES");
+ sh.exec("hadoop fs -rmr -skipTrash $EXAMPLES");
assertTrue("Deletion of previous $EXAMPLES from HDFS failed",
sh.getRet() == 0);
}
- sh.exec("$hadoop fs $HADOOP_OPTIONS -test -e $EXAMPLES_OUT");
+ sh.exec("hadoop fs -test -e $EXAMPLES_OUT");
if (sh.getRet() == 0) {
- sh.exec("$hadoop fs $HADOOP_OPTIONS -rmr -skipTrash $EXAMPLES_OUT");
+ sh.exec("hadoop fs -rmr -skipTrash $EXAMPLES_OUT");
assertTrue("Deletion of previous examples output from HDFS failed",
sh.getRet() == 0);
}
-// copy test files to HDFS
- sh.exec("hadoop fs $HADOOP_OPTIONS -put $EXAMPLES $EXAMPLES",
- "hadoop fs $HADOOP_OPTIONS -mkdir $EXAMPLES_OUT");
+ // copy test files to HDFS
+ sh.exec("hadoop fs -put $EXAMPLES $EXAMPLES",
+ "hadoop fs -mkdir $EXAMPLES_OUT");
assertTrue("Could not create output directory", sh.getRet() == 0);
}
static Map examples =
[
- pi :'20 10',
+ pi :'2 1000',
wordcount :"$EXAMPLES/text $EXAMPLES_OUT/wordcount",
multifilewc :"$EXAMPLES/text $EXAMPLES_OUT/multifilewc",
-// aggregatewordcount:"$EXAMPLES/text $EXAMPLES_OUT/aggregatewordcount
5 textinputformat",
-// aggregatewordhist :"$EXAMPLES/text $EXAMPLES_OUT/aggregatewordhist 5
textinputformat",
+ aggregatewordcount:"$EXAMPLES/text $EXAMPLES_OUT/aggregatewordcount 2
textinputformat",
+ aggregatewordhist :"$EXAMPLES/text $EXAMPLES_OUT/aggregatewordhist 2
textinputformat",
grep :"$EXAMPLES/text $EXAMPLES_OUT/grep '[Cc]uriouser'",
- sleep :"-m 10 -r 10",
+// sleep :"-m 10 -r 10",
secondarysort :"$EXAMPLES/ints $EXAMPLES_OUT/secondarysort",
- randomtextwriter :"-Dtest.randomtextwrite.total_bytes=1073741824
$EXAMPLES_OUT/randomtextwriter"
+ randomtextwriter :"-D $RANDOMTEXTWRITER_TOTALBYTES=1073741824
$EXAMPLES_OUT/randomtextwriter"
];
private String testName;
@@ -117,13 +111,12 @@ class TestHadoopExamples {
public TestHadoopExamples(String name, String args) {
testName = name;
testArgs = args;
- testJar = (name == "sleep") ? HADOOP_MR_TEST_JAR :
- HADOOP_EXAMPLES_JAR;
+ testJar = HADOOP_EXAMPLES_JAR;
}
@Test
void testMRExample() {
- sh.exec("$hadoop jar $testJar $testName $HADOOP_OPTIONS $testArgs");
+ sh.exec("hadoop jar $testJar $testName $testArgs");
assertTrue("Example $testName failed",
sh.getRet() == 0);
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy?rev=1292977&r1=1292976&r2=1292977&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoopsmoke/TestHadoopSmoke.groovy
Thu Feb 23 22:04:09 2012
@@ -24,26 +24,31 @@ import org.junit.AfterClass
import org.junit.BeforeClass
import org.junit.Test
import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.hdfs.DFSConfigKeys
import static org.junit.Assert.assertEquals
-
-// TODO: we have to stub it for 0.20.2 release, once we move to 0.21+ this can
go
-// import org.apache.hadoop.hdfs.DFSConfigKeys
-class DFSConfigKeys {
- static public final FS_DEFAULT_NAME_KEY = "fs.default.name";
-}
+import static org.junit.Assert.assertNotNull
class TestHadoopSmoke {
static Shell sh = new Shell("/bin/bash -s")
- static String hadoopHome = System.getProperty('HADOOP_HOME',
'/thisfileneverwillexist')
- static String testDir = "test.hadoopsmoke." + (new Date().getTime())
- static String nn = (new
Configuration()).get(DFSConfigKeys.FS_DEFAULT_NAME_KEY)
-
- String cmd = "hadoop jar
${hadoopHome}/contrib/streaming/hadoop*streaming*.jar" +
- " -D mapred.map.tasks=1 -D mapred.reduce.tasks=1 -D
mapred.job.name=Experiment "
- String cmd2 =" -input ${testDir}/cachefile/input.txt -mapper map.sh -file
map.sh -reducer cat" +
- " -output ${testDir}/cachefile/out -verbose "
- String arg =
"${nn}/user/${System.properties['user.name']}/${testDir}/cachefile/cachedir.jar#testlink
"
+ static String hadoopHome = System.getProperty('HADOOP_HOME',
'/usr/lib/hadoop')
+ static String streamingHome = System.getenv('STREAMING_HOME')
+ static final String STREAMING_HOME =
+ (streamingHome == null) ? hadoopHome + "/contrib/streaming" :
streamingHome;
+ static String streaming_jar =
+ JarContent.getJarName(STREAMING_HOME, 'hadoop.*streaming.*.jar');
+ static {
+ assertNotNull("Can't find hadoop-streaming.jar", streaming_jar);
+ }
+ static final String STREAMING_JAR = STREAMING_HOME + "/" + streaming_jar;
+ static String testDir = "test.hadoopsmoke." + (new Date().getTime())
+ static String nn = (new
Configuration()).get(DFSConfigKeys.FS_DEFAULT_NAME_KEY)
+
+ String cmd = "hadoop jar ${STREAMING_JAR}" +
+ " -D mapred.map.tasks=1 -D mapred.reduce.tasks=1 -D
mapred.job.name=Experiment"
+ String cmd2 = " -input ${testDir}/cachefile/input.txt -mapper map.sh -file
map.sh -reducer cat" +
+ " -output ${testDir}/cachefile/out -verbose"
+ String arg =
"${nn}/user/${System.properties['user.name']}/${testDir}/cachefile/cachedir.jar#testlink"
@BeforeClass
static void setUp() throws IOException {
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/FSCmdExecutor.java
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/FSCmdExecutor.java?rev=1292977&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/FSCmdExecutor.java
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/FSCmdExecutor.java
Thu Feb 23 22:04:09 2012
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadooptests;
+
+import java.io.File;
+import java.util.StringTokenizer;
+
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.cli.CLITestHelper;
+import org.apache.hadoop.cli.util.CommandExecutor;
+import org.apache.hadoop.util.ToolRunner;
+
+public class FSCmdExecutor extends CommandExecutor {
+ protected String namenode = null;
+ protected FsShell shell = null;
+
+ public FSCmdExecutor(String namenode, FsShell shell) {
+ this.namenode = namenode;
+ this.shell = shell;
+ }
+
+ protected void execute(final String cmd) throws Exception{
+ String[] args = getCommandAsArgs(cmd, "NAMENODE", this.namenode);
+ ToolRunner.run(shell, args);
+ }
+
+ @Override
+ protected String[] getCommandAsArgs(final String cmd, final String masterKey,
+ final String master) {
+ StringTokenizer tokenizer = new StringTokenizer(cmd, " ");
+ String[] args = new String[tokenizer.countTokens()];
+ int i = 0;
+ while (tokenizer.hasMoreTokens()) {
+ args[i] = tokenizer.nextToken();
+ args[i] = args[i].replaceAll(masterKey, master);
+ args[i] = args[i].replaceAll("CLITEST_DATA",
+ new
File(CLITestHelper.TEST_CACHE_DATA_DIR).toURI().toString().replace(' ', '+'));
+ args[i] = args[i].replaceAll("TEST_DIR_ABSOLUTE",
TestCLI.TEST_DIR_ABSOLUTE);
+ args[i] = args[i].replaceAll("USERNAME",
System.getProperty("user.name"));
+
+ i++;
+ }
+ return args;
+ }
+}
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/TestCLI.java
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/TestCLI.java?rev=1292977&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/TestCLI.java
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadooptests/TestCLI.java
Thu Feb 23 22:04:09 2012
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadooptests;
+
+import java.io.File;
+
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.cli.CLITestHelper;
+import org.apache.hadoop.cli.util.CLICommand;
+import org.apache.hadoop.cli.util.CLICommandFS;
+import org.apache.hadoop.cli.util.CommandExecutor;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Tests for the Command Line Interface (CLI)
+ */
+public class TestCLI extends CLITestHelper {
+ public static final String TEST_DIR_ABSOLUTE = "/tmp/testcli";
+ private String nn;
+ private String sug;
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ readTestConfigFile();
+ conf = new HdfsConfiguration();
+ conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
+ true);
+ clitestDataDir =
+ new File(TEST_CACHE_DATA_DIR).toURI().toString().replace(' ', '+');
+ nn = conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY);
+ sug = conf.get(DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_KEY);
+ // Many of the tests expect a replication value of 1 in the output
+ conf.setInt("dfs.replication", 1);
+ }
+
+ @After
+ @Override
+ public void tearDown() throws Exception {
+ super.tearDown();
+ }
+
+ @Override
+ protected String getTestFile() {
+ return "testConf.xml";
+ }
+
+ @Test
+ @Override
+ public void testAll() {
+ super.testAll();
+ }
+
+ @Override
+ protected String expandCommand(final String cmd) {
+ String expCmd = super.expandCommand(cmd);
+ String testcliDir = TEST_DIR_ABSOLUTE;
+ expCmd = expCmd.replaceAll("TEST_DIR_ABSOLUTE", testcliDir);
+ expCmd = expCmd.replaceAll("SUPERGROUP", sug);
+ return expCmd;
+ }
+
+ @Override
+ protected CommandExecutor.Result execute(CLICommand cmd) throws Exception {
+ if (cmd.getType() instanceof CLICommandFS) {
+ CommandExecutor cmdExecutor = new FSCmdExecutor(nn, new FsShell(conf));
+ return cmdExecutor.executeCommand(cmd.getCmd());
+ } else {
+ throw new IllegalArgumentException("Unknown type of test command: " +
cmd.getType());
+ }
+ }
+}