This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin-on-parquet-v2
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/kylin-on-parquet-v2 by this 
push:
     new 3683c24  KYLIN-4790 Automaticly copy required jars to SPARK_HOME/jars 
for HDI3.6
3683c24 is described below

commit 3683c243534d2c9bf9dfda2a5f10016b18a9ff6c
Author: yaqian.zhang <598593...@qq.com>
AuthorDate: Fri Oct 16 17:26:56 2020 +0800

    KYLIN-4790 Automaticly copy required jars to SPARK_HOME/jars for HDI3.6
---
 build/bin/kylin.sh | 35 +++++++++++++++++++++++++++++++++++
 1 file changed, 35 insertions(+)

diff --git a/build/bin/kylin.sh b/build/bin/kylin.sh
index bb33eae..817227f 100755
--- a/build/bin/kylin.sh
+++ b/build/bin/kylin.sh
@@ -68,6 +68,41 @@ function retrieveDependency() {
         verbose "hdp_version is ${hdp_version}"
     fi
 
+    # Replace jars for HDI
+    KYLIN_SPARK_JARS_HOME="${KYLIN_HOME}/spark/jars"
+    if [[ -d "/usr/hdp/current/hdinsight-zookeeper" && $hdp_version == "2"* ]]
+    then
+       echo "The current Hadoop environment is HDI3, will replace some jars 
package for ${KYLIN_HOME}/spark/jars"
+       if [[ -d "${KYLIN_SPARK_JARS_HOME}" ]]
+       then
+          if [[ -f ${KYLIN_HOME}/hdi3_spark_jars_flag ]]
+          then
+          echo "Required jars have been added to ${KYLIN_HOME}/spark/jars, 
skip this step."
+          else
+             rm -rf ${KYLIN_HOME}/spark/jars/hadoop-*
+             cp /usr/hdp/current/spark2-client/jars/hadoop-* 
$KYLIN_SPARK_JARS_HOME
+             cp /usr/hdp/current/spark2-client/jars/azure-* 
$KYLIN_SPARK_JARS_HOME
+             cp 
/usr/hdp/current/hadoop-client/lib/microsoft-log4j-etwappender-1.0.jar 
$KYLIN_SPARK_JARS_HOME
+             cp 
/usr/hdp/current/hadoop-client/lib/hadoop-lzo-0.6.0.${hdp_version}.jar 
$KYLIN_SPARK_JARS_HOME
+
+             rm -rf $KYLIN_HOME/spark/jars/guava-14.0.1.jar
+             cp /usr/hdp/current/spark2-client/jars/guava-24.1.1-jre.jar 
$KYLIN_SPARK_JARS_HOME
+
+             echo "Upload spark jars to HDFS"
+             hdfs dfs -test -d /spark2_jars
+             if [ $? -eq 1 ]
+             then
+                hdfs dfs -mkdir /spark2_jars
+             fi
+             hdfs dfs -put $KYLIN_SPARK_JARS_HOME/* /spark2_jars
+
+             touch ${KYLIN_HOME}/hdi3_spark_jars_flag
+          fi
+       else
+          echo "${KYLIN_HOME}/spark/jars dose not exist. You can run 
${KYLIN_HOME}/download-spark.sh to download spark."
+       fi
+    fi
+
     tomcat_root=${dir}/../tomcat
     export tomcat_root
 

Reply via email to