This is an automated email from the ASF dual-hosted git repository.

evansye pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git


The following commit(s) were added to refs/heads/master by this push:
     new 383a190  BIGTOP-3216: Flink smoke test failed on Debian-9 (#533)
383a190 is described below

commit 383a190eb53785cea9d4232df061def7f60c0455
Author: Jun <[email protected]>
AuthorDate: Fri May 24 23:09:37 2019 +0800

    BIGTOP-3216: Flink smoke test failed on Debian-9 (#533)
    
    Flink cannot get hdfs's defaultFS info on debian. Fix this by explicitly use
    defaultFS uri retrieved from command
    
    Change-Id: I648e10da3fd39d6daa9ae0079a47b075260c67ea
    Signed-off-by: Jun He <[email protected]>
---
 bigtop-tests/smoke-tests/flink/TestFlink.groovy | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/bigtop-tests/smoke-tests/flink/TestFlink.groovy 
b/bigtop-tests/smoke-tests/flink/TestFlink.groovy
index d569f53..b35d3cb 100644
--- a/bigtop-tests/smoke-tests/flink/TestFlink.groovy
+++ b/bigtop-tests/smoke-tests/flink/TestFlink.groovy
@@ -58,18 +58,20 @@ class TestFlink {
   void testCheckRestfulAPI() {
     // read JM address and port from conf
     execCommand("awk '{if(/jobmanager.rpc.address:/) print \$2}' < "+ 
config_file);
-    final String jmhost = sh.out.join('\n');
+    final String jmHost = sh.out.join('\n');
     execCommand("awk '{if(/jobmanager.web.port:/) print \$2}' < "+config_file);
     final String webPort = sh.out.join('\n');
     // check web API
-    execCommand("curl http://"+jmhost+":"+webPort+"/config";);
+    execCommand("curl http://"+jmHost+":"+webPort+"/config";);
     final String result = sh.out.join('\n');
     assert(result.contains("flink-version"));
   }
 
   @Test
   void testWordCountBatch() {
-    execCommand("flink run \$FLINK_HOME/examples/batch/WordCount.jar --input 
hdfs:///flink/test.data --output hdfs:///tmp/result.txt")
+    execCommand("hdfs getconf -confKey fs.defaultFS");
+    final String hdfsUri = sh.out.join('\n');
+    execCommand("flink run \$FLINK_HOME/examples/batch/WordCount.jar --input 
"+hdfsUri+"/flink/test.data --output "+hdfsUri+"/tmp/result.txt")
 
     execCommand("hadoop fs -cat /tmp/result.txt")
 

Reply via email to