This is an automated email from the ASF dual-hosted git repository. casion pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/linkis.git
commit 5c5259599676c8c34c5009b66d21ae5771c21e98 Author: peter.peng <[email protected]> AuthorDate: Tue Sep 26 14:01:31 2023 +0800 add service check for HDFS/Hive/Spark --- linkis-dist/bin/checkEnv.sh | 101 ++++++++++++++++++++++++++++++++++++-------- 1 file changed, 84 insertions(+), 17 deletions(-) diff --git a/linkis-dist/bin/checkEnv.sh b/linkis-dist/bin/checkEnv.sh index 9efae6865..68682a233 100644 --- a/linkis-dist/bin/checkEnv.sh +++ b/linkis-dist/bin/checkEnv.sh @@ -30,20 +30,50 @@ err() { } function checkPythonAndJava(){ + python --version > /dev/null 2>&1 isSuccess "execute cmd: python --version" + java -version > /dev/null 2>&1 isSuccess "execute cmd: java --version" } function checkHdfs(){ - hadoopVersion="`hdfs version`" + +# --- 1. check command + hdfs version > /dev/null 2>&1 + isSuccess "execute cmd: hdfs version" + +# --- 2. check version + hadoopVersion=`hdfs version` defaultHadoopVersion="3.3" + checkversion "$hadoopVersion" $defaultHadoopVersion hadoop + +# ---3. check service status + hdfs dfsadmin -report > /dev/null 2>&1 + isSuccess "execute cmd: hdfs dfsadmin -report" + } function checkHive(){ + +# --- 1. check command + hive --version > /dev/null 2>&1 + isSuccess "execute cmd: hive --version" + +# --- 2. check version & Parameters checkversion "$(whereis hive)" "3.1" hive + + if [ -z "${HIVE_META_URL}" ] || [ -z "${HIVE_META_USER}" ] || [ -z "${MYSQL_PASSWORD}" ] ;then + echo "Parameter [HIVE_META_URL/HIVE_META_USER/MYSQL_PASSWORD] are Invalid,Pls check" + exit 2 + fi + +# --- 3. check server status + beeline -u${HIVE_META_URL} -n${HIVE_META_USER} -p${MYSQL_PASSWORD} > /dev/null 2>&1 + isSuccess "execute cmd: beeline -u${HIVE_META_URL} " + } function checkversion(){ @@ -70,8 +100,32 @@ fi } function checkSpark(){ + +# --- 1. check command spark-submit --version > /dev/null 2>&1 isSuccess "execute cmd: spark-submit --version " + +# --- 2. check Parameters + if [ -z "${SPARK_HOME}" ];then + echo "Parameter SPARK_HOME is not valid, Please check" + exit 2 + fi + +# --- 3. check server status + spark-submit --class org.apache.spark.examples.SparkPi --master local ${SPARK_HOME}/examples/jars/spark-examples_2.12-3.2.1.jar 10 > /dev/null 2>&1 + isSuccess "execute cmd: spark-submit --class org.apache.spark.examples.SparkPi " + +} + +function checkMysql(){ + + if [ -z "${MYSQL_HOST}" ] || [ -z "${MYSQL_PORT}" ] || [ -z "${MYSQL_DB}" ] || [ -z "${MYSQL_USER}" ] || [ -z "${MYSQL_PASSWORD}" ];then + echo "MYSQL_HOST/MYSQL_PORT/MYSQL_USER/MYSQL_PASSWORD] are Invalid,Pls check parameter define" + exit 2 + fi + + mysql -h${MYSQL_HOST} -P${MYSQL_PORT} -u${MYSQL_USER} -p${MYSQL_PASSWORD} -e "select version();">/dev/null 2>&1 + isSuccess "execute cmd: mysql -h${MYSQL_HOST} -P${MYSQL_PORT}" } portIsOccupy=false @@ -94,7 +148,8 @@ need_cmd() { } -echo "<-----start to check used cmd---->" +echo -e "1. <-----start to check used cmd---->\n" + echo "check yum" need_cmd yum echo "check java" @@ -112,19 +167,41 @@ need_cmd lsof echo "check hdfs" need_cmd hdfs - echo "check shell" need_cmd $SHELL - +echo "check spark-submit" +need_cmd spark-submit +echo "check spark-shell" +need_cmd spark-shell echo "check spark-sql" need_cmd spark-sql - echo "check hadoop" need_cmd hadoop -echo "<-----end to check used cmd---->" +echo -e "\n<-----end to check used cmd---->" + +# --- Begin to check Spark/HDFS/Hive Service Status +echo -e "\n2. <-----start to check service status---->\n" checkPythonAndJava +checkMysql + +if [ "$ENABLE_SPARK" == "true" ]; then + checkSpark +fi + +if [ "$ENABLE_HDFS" == "true" ]; then + checkHdfs +fi + +if [ "$ENABLE_HIVE" == "true" ]; then + checkHive +fi + +echo -e "\n<-----End to check service status---->" + +# --- check Service Port +echo -e "\n3. <-----Start to check service Port---->" SERVER_PORT=$EUREKA_PORT check_service_port @@ -150,14 +227,4 @@ if [ "$portIsOccupy" = true ];then exit 1 fi -if [ "$ENABLE_SPARK" == "true" ]; then - checkSpark -fi - -if [ "$ENABLE_HDFS" == "true" ]; then - checkHdfs -fi - -if [ "$ENABLE_HIVE" == "true" ]; then - checkHive -fi +echo "\n <-----End to check service Port---->" \ No newline at end of file --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
