This is an automated email from the ASF dual-hosted git repository.
evansye pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git
The following commit(s) were added to refs/heads/master by this push:
new d5e57f7 BIGTOP-3129. Failed to run QFS smoke test (#439)
d5e57f7 is described below
commit d5e57f786ecf7876000a842e0a186aee1ed76dd6
Author: Evans Ye <[email protected]>
AuthorDate: Thu Jan 10 14:29:16 2019 +0800
BIGTOP-3129. Failed to run QFS smoke test (#439)
---
.../puppet/modules/hadoop/templates/core-site.xml | 11 +++++++++++
bigtop-deploy/puppet/modules/qfs/manifests/init.pp | 8 ++++++++
bigtop-tests/smoke-tests/qfs/build.gradle | 9 +--------
.../itest/hadoop/mapreduce/TestHadoopExamples.groovy | 20 ++++++++++++--------
provisioner/utils/smoke-tests.sh | 4 +++-
5 files changed, 35 insertions(+), 17 deletions(-)
diff --git a/bigtop-deploy/puppet/modules/hadoop/templates/core-site.xml
b/bigtop-deploy/puppet/modules/hadoop/templates/core-site.xml
index 1ca75f4..f4aab69 100644
--- a/bigtop-deploy/puppet/modules/hadoop/templates/core-site.xml
+++ b/bigtop-deploy/puppet/modules/hadoop/templates/core-site.xml
@@ -156,6 +156,17 @@
<name>hadoop.http.authentication.cookie.domain</name>
<value><%= @hadoop_http_authentication_cookie_domain %></value>
</property>
+<% end -%>
+<% if (scope['::hadoop_cluster_node::cluster_components']).include? 'qfs' -%>
+ <!-- run mapreduce over QFS -->
+ <property>
+ <name>fs.AbstractFileSystem.qfs.impl</name>
+ <value>com.quantcast.qfs.hadoop.Qfs</value>
+ </property>
+ <property>
+ <name>fs.qfs.impl</name>
+ <value>com.quantcast.qfs.hadoop.QuantcastFileSystem</value>
+ </property>
<% end -%>
</configuration>
diff --git a/bigtop-deploy/puppet/modules/qfs/manifests/init.pp
b/bigtop-deploy/puppet/modules/qfs/manifests/init.pp
index e0fdf54..080701a 100644
--- a/bigtop-deploy/puppet/modules/qfs/manifests/init.pp
+++ b/bigtop-deploy/puppet/modules/qfs/manifests/init.pp
@@ -159,5 +159,13 @@ class qfs {
content => template("qfs/hadoop-qfs"),
mode => '0755',
}
+
+ # Add QFS native lib into Hadoop native lib dir
+ exec { "add_qfs_native_lib":
+ path => ['/bin','/sbin','/usr/bin','/usr/sbin'],
+ command => 'find /usr/lib/qfs/ -name "lib*" -exec ln -s {}
/usr/lib/hadoop/lib/native \;',
+ require => Package["qfs-client"],
+ notify => [ Service["hadoop-yarn-nodemanager"],
Service["hadoop-yarn-resourcemanager"] ],
+ }
}
}
diff --git a/bigtop-tests/smoke-tests/qfs/build.gradle
b/bigtop-tests/smoke-tests/qfs/build.gradle
index ca25635..f03e6e4 100644
--- a/bigtop-tests/smoke-tests/qfs/build.gradle
+++ b/bigtop-tests/smoke-tests/qfs/build.gradle
@@ -16,14 +16,7 @@
* limitations under the License.
*/
def tests_to_include() {
- def smoke_tests = System.getenv('SMOKE_TESTS')
- if (smoke_tests == null || !smoke_tests.contains("qfs")) {
- return [];
- }
-
- return [
- "TestHadoopExamples.groovy"
- ];
+ return ["TestHadoopExamples.groovy"];
}
sourceSets {
diff --git
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
index 77af19c..1033180 100644
---
a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
+++
b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/mapreduce/TestHadoopExamples.groovy
@@ -45,7 +45,11 @@ class TestHadoopExamples {
// The hadoop command is dynamic in order to support both hadoop over hdfs
// and hadoop over qfs easily.
- private static final String HADOOP_COMMAND =
System.getProperty('HADOOP_COMMAND', 'hadoop');
+ private static final String HADOOP_COMMAND = System.getenv('HADOOP_COMMAND')
?: "hadoop";
+
+ // BIGTOP-3129: Only yarn can successfully write to staging dir, hence
workaround by running as yarn.
+ private static final String BIGTOP_TEST_USER = "${HADOOP_COMMAND}" ==
"hadoop-qfs" ?
+ "yarn" : (System.getenv('BIGTOP_TEST_USER') ?: System.getenv('USER'))
private static String hadoopExamplesJar =
JarContent.getJarName(HADOOP_MAPRED_HOME, 'hadoop.*examples.*.jar');
@@ -78,8 +82,8 @@ class TestHadoopExamples {
@AfterClass
public static void tearDown() {
- sh.exec("${HADOOP_COMMAND} fs -rmr -skipTrash ${EXAMPLES}",
- "${HADOOP_COMMAND} fs -rmr -skipTrash ${EXAMPLES_OUT}");
+ sh.exec("su -s /bin/bash $BIGTOP_TEST_USER -c '${HADOOP_COMMAND} fs -rmr
-skipTrash ${EXAMPLES}'",
+ "su -s /bin/bash $BIGTOP_TEST_USER -c '${HADOOP_COMMAND} fs -rmr
-skipTrash ${EXAMPLES_OUT}'");
}
@@ -101,12 +105,12 @@ class TestHadoopExamples {
LOG.info("MAKING DIRECTORIES ..................... ${EXAMPLES}
${EXAMPLES_OUT}");
//add the files in resources/
- sh.exec("${HADOOP_COMMAND} fs -put ${source}/*.* .");
+ sh.exec("su -s /bin/bash $BIGTOP_TEST_USER -c '${HADOOP_COMMAND} fs -put
${source}/*.* .'");
//add the directories under resources (like examples/)
- sh.exec("${HADOOP_COMMAND} fs -put ${source}/${EXAMPLES} ${EXAMPLES}");
- sh.exec("${HADOOP_COMMAND} fs -mkdir -p ${EXAMPLES_OUT}");
+ sh.exec("su -s /bin/bash $BIGTOP_TEST_USER -c '${HADOOP_COMMAND} fs -put
${source}/${EXAMPLES} ${EXAMPLES}'");
+ sh.exec("su -s /bin/bash $BIGTOP_TEST_USER -c '${HADOOP_COMMAND} fs
-mkdir -p ${EXAMPLES_OUT}'");
}
- sh.exec("${HADOOP_COMMAND} fs -ls ${EXAMPLES}");
+ sh.exec("su -s /bin/bash $BIGTOP_TEST_USER -c '${HADOOP_COMMAND} fs -ls
${EXAMPLES}'");
assertTrue("Failed asserting that 'examples' were created in the DFS",
sh.getRet() == 0);
}
@@ -158,7 +162,7 @@ class TestHadoopExamples {
|| FailureVars.instance.getNetworkShutdown()) {
runFailureThread();
}
- sh.exec("${HADOOP_COMMAND} jar $testJar $testName $testArgs");
+ sh.exec("su -s /bin/bash $BIGTOP_TEST_USER -c '${HADOOP_COMMAND} jar
$testJar $testName $testArgs'");
assertTrue("Example $testName $testJar $testName $testArgs failed",
sh.getRet() == 0);
}
diff --git a/provisioner/utils/smoke-tests.sh b/provisioner/utils/smoke-tests.sh
index 1a60bf7..728b9bd 100755
--- a/provisioner/utils/smoke-tests.sh
+++ b/provisioner/utils/smoke-tests.sh
@@ -51,13 +51,15 @@ echo -e "\n===== START TO RUN SMOKE TESTS: $SMOKE_TESTS
=====\n"
prep() {
HADOOP_COMMAND=$1
- su -s /bin/bash $HCFS_USER -c "JAVA_LIBRARY_PATH=/usr/lib/qfs
$HADOOP_COMMAND fs -mkdir /user/vagrant /user/root"
+ su -s /bin/bash $HCFS_USER -c "JAVA_LIBRARY_PATH=/usr/lib/qfs
$HADOOP_COMMAND fs -mkdir -p /user/vagrant /user/root /user/yarn"
su -s /bin/bash $HCFS_USER -c "JAVA_LIBRARY_PATH=/usr/lib/qfs
$HADOOP_COMMAND fs -chmod 777 /user/vagrant"
su -s /bin/bash $HCFS_USER -c "JAVA_LIBRARY_PATH=/usr/lib/qfs
$HADOOP_COMMAND fs -chmod 777 /user/root"
+ su -s /bin/bash $HCFS_USER -c "JAVA_LIBRARY_PATH=/usr/lib/qfs
$HADOOP_COMMAND fs -chown yarn:yarn /user/yarn"
}
prep hadoop
if [[ $SMOKE_TESTS == *"qfs"* ]]; then
+ HCFS_USER=root
prep hadoop-qfs
fi