This is an automated email from the ASF dual-hosted git repository.

shaofengshi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/master by this push:
     new a0c662c  KYLIN-3604 Can't build cube with spark in HBase standalone 
mode
a0c662c is described below

commit a0c662c7656751e21c006611268ff8eb252f0811
Author: Colin Ma <co...@apache.org>
AuthorDate: Tue Oct 23 10:38:29 2018 +0800

    KYLIN-3604 Can't build cube with spark in HBase standalone mode
---
 .../java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
index fd32db5..ed05cba 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
@@ -180,8 +180,10 @@ public class SparkCubeHFile extends AbstractApplication 
implements Serializable
 
             //HBase conf
             logger.info("Loading HBase configuration from:{}", hbaseConfFile);
+            final Path hbaseConfFilePath = new Path(hbaseConfFile);
+            final FileSystem hbaseClusterFs = 
hbaseConfFilePath.getFileSystem(sc.hadoopConfiguration());
 
-            try (FSDataInputStream confInput = fs.open(new 
Path(hbaseConfFile))) {
+            try (FSDataInputStream confInput = hbaseClusterFs.open(new 
Path(hbaseConfFile))) {
                 Configuration hbaseJobConf = new Configuration();
                 hbaseJobConf.addResource(confInput);
                 hbaseJobConf.set("spark.hadoop.dfs.replication", "3"); // 
HFile, replication=3
@@ -189,7 +191,9 @@ public class SparkCubeHFile extends AbstractApplication 
implements Serializable
 
                 FileOutputFormat.setOutputPath(job, new Path(outputPath));
 
-                JavaPairRDD<Text, Text> inputRDDs = 
SparkUtil.parseInputPath(inputPath, fs, sc, Text.class, Text.class);
+                // inputPath has the same FileSystem as hbaseClusterFs when in 
HBase standalone mode
+                JavaPairRDD<Text, Text> inputRDDs = 
SparkUtil.parseInputPath(inputPath, hbaseClusterFs, sc, Text.class,
+                        Text.class);
                 final JavaPairRDD<RowKeyWritable, KeyValue> hfilerdd;
                 if (quickPath) {
                     hfilerdd = inputRDDs.mapToPair(new 
PairFunction<Tuple2<Text, Text>, RowKeyWritable, KeyValue>() {

Reply via email to