[ 
https://issues.apache.org/jira/browse/KYLIN-4663?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17164149#comment-17164149
 ] 

xieqifeng commented on KYLIN-4663:
----------------------------------

Using livy ui see the actural spark log

> in kylin 3.1.0,use livy submit spark failed,can't get the clear information 
> from log
> ------------------------------------------------------------------------------------
>
>                 Key: KYLIN-4663
>                 URL: https://issues.apache.org/jira/browse/KYLIN-4663
>             Project: Kylin
>          Issue Type: Improvement
>          Components: Spark Engine
>    Affects Versions: v3.1.0
>            Reporter: xieqifeng
>            Priority: Minor
>
> {code:java}
> 2020-07-24 09:47:49,283 INFO  [Scheduler 642803291 Job 
> 2cbadcc4-43ea-a36a-4f65-9a088b7168c8-97] 
> hive.CreateFlatHiveTableByLivyStep:41 : Create and distribute table. 
> 2020-07-24 09:47:49,295 INFO  [Scheduler 642803291 Job 
> 2cbadcc4-43ea-a36a-4f65-9a088b7168c8-97] hive.CreateFlatHiveTa
> bleByLivyStep:41 : Livy submit Json: 
> 2020-07-24 09:47:49,296 INFO  [Scheduler 642803291 Job 
> 2cbadcc4-43ea-a36a-4f65-9a088b7168c8-97] hive.CreateFlatHiveTa
> bleByLivyStep:41 : 
> {"className":"org.apache.kylin.engine.spark.SparkSqlOnLivyBatch","args":["set 
> hive.merge.mapredfil
> es=false; \nset hive.auto.convert.join=true; \nset dfs.replication=2; \nset 
> hive.exec.compress.output=true; \nset hiv
> e.auto.convert.join.noconditionaltask=true; \nset 
> mapreduce.job.split.metainfo.maxsize=-1; \nset hive.merge.mapfiles=
> false; \nset hive.auto.convert.join.noconditionaltask.size=100000000; \nset 
> hive.stats.autogather=true; \nUSE default
> ;\nDROP TABLE IF EXISTS 
> kylin_intermediate_kylin_sales_cube_6955e88b_7869_6123_a594_90d2410240c7;\nCREATE
>  EXTERNAL TA
> BLE IF NOT EXISTS 
> kylin_intermediate_kylin_sales_cube_6955e88b_7869_6123_a594_90d2410240c7\n(\n`KYLIN_SALES_TRANS_ID`
>  bigint\n,`KYLIN_SALES_PART_DT` date\n,`KYLIN_SALES_LEAF_CATEG_ID` 
> bigint\n,`KYLIN_SALES_LSTG_SITE_ID` int\n,`KYLIN_C
> ATEGORY_GROUPINGS_META_CATEG_NAME` 
> string\n,`KYLIN_CATEGORY_GROUPINGS_CATEG_LVL2_NAME` 
> string\n,`KYLIN_CATEGORY_GROUP
> INGS_CATEG_LVL3_NAME` string\n,`KYLIN_SALES_LSTG_FORMAT_NAME` 
> string\n,`KYLIN_SALES_SELLER_ID` bigint\n,`KYLIN_SALES_
> BUYER_ID` bigint\n,`BUYER_ACCOUNT_ACCOUNT_BUYER_LEVEL` 
> int\n,`SELLER_ACCOUNT_ACCOUNT_SELLER_LEVEL` int\n,`BUYER_ACCOU
> NT_ACCOUNT_COUNTRY` string\n,`SELLER_ACCOUNT_ACCOUNT_COUNTRY` 
> string\n,`BUYER_COUNTRY_NAME` string\n,`SELLER_COUNTRY_
> NAME` string\n,`KYLIN_SALES_OPS_USER_ID` string\n,`KYLIN_SALES_OPS_REGION` 
> string\n,`KYLIN_SALES_PRICE` decimal(19,4)
> \n)\nSTORED AS SEQUENCEFILE\nLOCATION 
> 'hdfs:\/\/bigdata\/kylin\/kylin_metadata\/kylin-2cbadcc4-43ea-a36a-4f65-9a088b7
> 168c8\/kylin_intermediate_kylin_sales_cube_6955e88b_7869_6123_a594_90d2410240c7';\nALTER
>  TABLE kylin_intermediate_kyl
> in_sales_cube_6955e88b_7869_6123_a594_90d2410240c7 SET 
> TBLPROPERTIES('auto.purge'='true');\nINSERT OVERWRITE TABLE `k
> ylin_intermediate_kylin_sales_cube_6955e88b_7869_6123_a594_90d2410240c7` 
> SELECT\n`KYLIN_SALES`.`TRANS_ID` as `KYLIN_S
> ALES_TRANS_ID`\n,`KYLIN_SALES`.`PART_DT` as 
> `KYLIN_SALES_PART_DT`\n,`KYLIN_SALES`.`LEAF_CATEG_ID` as `KYLIN_SALES_LEA
> F_CATEG_ID`\n,`KYLIN_SALES`.`LSTG_SITE_ID` as 
> `KYLIN_SALES_LSTG_SITE_ID`\n,`KYLIN_CATEGORY_GROUPINGS`.`META_CATEG_NAM
> E` as 
> `KYLIN_CATEGORY_GROUPINGS_META_CATEG_NAME`\n,`KYLIN_CATEGORY_GROUPINGS`.`CATEG_LVL2_NAME`
>  as `KYLIN_CATEGORY_GR
> OUPINGS_CATEG_LVL2_NAME`\n,`KYLIN_CATEGORY_GROUPINGS`.`CATEG_LVL3_NAME` as 
> `KYLIN_CATEGORY_GROUPINGS_CATEG_LVL3_NAME`
> \n,`KYLIN_SALES`.`LSTG_FORMAT_NAME` as 
> `KYLIN_SALES_LSTG_FORMAT_NAME`\n,`KYLIN_SALES`.`SELLER_ID` as `KYLIN_SALES_SEL
> LER_ID`\n,`KYLIN_SALES`.`BUYER_ID` as 
> `KYLIN_SALES_BUYER_ID`\n,`BUYER_ACCOUNT`.`ACCOUNT_BUYER_LEVEL` as 
> `BUYER_ACCOUN
> T_ACCOUNT_BUYER_LEVEL`\n,`SELLER_ACCOUNT`.`ACCOUNT_SELLER_LEVEL` as 
> `SELLER_ACCOUNT_ACCOUNT_SELLER_LEVEL`\n,`BUYER_AC
> COUNT`.`ACCOUNT_COUNTRY` as 
> `BUYER_ACCOUNT_ACCOUNT_COUNTRY`\n,`SELLER_ACCOUNT`.`ACCOUNT_COUNTRY` as 
> `SELLER_ACCOUNT_A
> CCOUNT_COUNTRY`\n,`BUYER_COUNTRY`.`NAME` as 
> `BUYER_COUNTRY_NAME`\n,`SELLER_COUNTRY`.`NAME` as `SELLER_COUNTRY_NAME`\n
> ,`KYLIN_SALES`.`OPS_USER_ID` as 
> `KYLIN_SALES_OPS_USER_ID`\n,`KYLIN_SALES`.`OPS_REGION` as 
> `KYLIN_SALES_OPS_REGION`\n,
> `KYLIN_SALES`.`PRICE` as `KYLIN_SALES_PRICE`\n FROM `DEFAULT`.`KYLIN_SALES` 
> as `KYLIN_SALES`\nINNER JOIN `DEFAULT`.`K
> YLIN_CAL_DT` as `KYLIN_CAL_DT`\nON `KYLIN_SALES`.`PART_DT` = 
> `KYLIN_CAL_DT`.`CAL_DT`\nINNER JOIN `DEFAULT`.`KYLIN_CAT
> EGORY_GROUPINGS` as `KYLIN_CATEGORY_GROUPINGS`\nON 
> `KYLIN_SALES`.`LEAF_CATEG_ID` = `KYLIN_CATEGORY_GROUPINGS`.`LEAF_C
> ATEG_ID` AND `KYLIN_SALES`.`LSTG_SITE_ID` = 
> `KYLIN_CATEGORY_GROUPINGS`.`SITE_ID`\nINNER JOIN `DEFAULT`.`KYLIN_ACCOUNT
> ` as `BUYER_ACCOUNT`\nON `KYLIN_SALES`.`BUYER_ID` = 
> `BUYER_ACCOUNT`.`ACCOUNT_ID`\nINNER JOIN `DEFAULT`.`KYLIN_ACCOUNT
> ` as `SELLER_ACCOUNT`\nON `KYLIN_SALES`.`SELLER_ID` = 
> `SELLER_ACCOUNT`.`ACCOUNT_ID`\nINNER JOIN `DEFAULT`.`KYLIN_COUN
> TRY` as `BUYER_COUNTRY`\nON `BUYER_ACCOUNT`.`ACCOUNT_COUNTRY` = 
> `BUYER_COUNTRY`.`COUNTRY`\nINNER JOIN `DEFAULT`.`KYLI
> N_COUNTRY` as `SELLER_COUNTRY`\nON `SELLER_ACCOUNT`.`ACCOUNT_COUNTRY` = 
> `SELLER_COUNTRY`.`COUNTRY`\nWHERE 1=1 AND (`K
> YLIN_SALES`.`PART_DT` >= '2012-01-01' AND `KYLIN_SALES`.`PART_DT` < 
> '2012-01-01')\n;\n"],"file":"hdfs:\/\/\/kylin\/ky
> lin-job-3.0.0-SNAPSHOT.jar","jars":["hdfs:\/\/\/kylin\/hbase-client-2.0.0-cdh6.0.1.jar","hdfs:\/\/\/kylin\/hbase-comm
> on-2.0.0-cdh6.0.1.jar","hdfs:\/\/\/kylin\/hbase-hadoop-compat-2.0.0-cdh6.0.1.jar","hdfs:\/\/\/kylin\/hbase-hadoop2-co
> mpat-2.0.0-cdh6.0.1.jar","hdfs:\/\/\/kylin\/hbase-server-2.0.0-cdh6.0.1.jar","hdfs:\/\/\/kylin\/htrace-core-3.2.0-inc
> ubating.jar","hdfs:\/\/\/kylin\/metrics-core-3.2.0.jar"],"conf":{}}
> {code}
> {code:java}
> 2020-07-24 09:47:59,841 INFO  [Scheduler 642803291 Job 
> 2cbadcc4-43ea-a36a-4f65-9a088b7168c8-97] hive.CreateFlatHiveTa
> bleByLivyStep:41 : spark-submit start failed
> 2020-07-24 09:47:59,841 INFO  [Scheduler 642803291 Job 
> 2cbadcc4-43ea-a36a-4f65-9a088b7168c8-97] hive.CreateFlatHiveTa
> bleByLivyStep:41 : 
> {"id":0,"name":null,"owner":null,"proxyUser":null,"state":"dead","appId":null,"appInfo":{"driverLogUrl":null,"sparkUiUrl":null}}
> 2020-07-24 09:48:09,853 INFO  [Scheduler 642803291 Job 
> 2cbadcc4-43ea-a36a-4f65-9a088b7168c8-97] 
> hive.CreateFlatHiveTableByLivyStep:41 : livy start execute failed. state is 
> dead. log is        at scala.Option.map(Option.scala:146)
>         at 
> org.apache.spark.deploy.SparkSubmit.prepareSubmitEnvironment(SparkSubmit.scala:366)
>         at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:143)
>         at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
>         at 
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
>         at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
>         at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> stderr:
> YARN Diagnostics: 
>  spark-submit start failed
>  at 
> org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:193)
>  at 
> org.apache.kylin.job.impl.threadpool.DefaultScheduler$JobRunner.run(DefaultScheduler.java:114)
>  at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>  at java.lang.Thread.run(Thread.java:748)
>  Caused by: org.apache.kylin.job.exception.ExecuteException: 
> java.lang.RuntimeException: livy execute failed. 
>  livy get status failed. state is dead. log is at 
> scala.Option.map(Option.scala:146)
>  at 
> org.apache.spark.deploy.SparkSubmit.prepareSubmitEnvironment(SparkSubmit.scala:366)
>  at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:143)
>  at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
>  at 
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
>  at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
>  at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> stderr:
> YARN Diagnostics: 
>  spark-submit start failed
>  at 
> org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:193)
>  at 
> org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:71)
>  at 
> org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:178)
>  ... 4 more
>  Caused by: java.lang.RuntimeException: livy execute failed. 
>  livy get status failed. state is dead. log is at 
> scala.Option.map(Option.scala:146)
>  at 
> org.apache.spark.deploy.SparkSubmit.prepareSubmitEnvironment(SparkSubmit.scala:366)
>  at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:143)
>  at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
>  at 
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
>  at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
>  at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> stderr:
> YARN Diagnostics: 
>  spark-submit start failed
>  at 
> org.apache.kylin.common.livy.LivyRestExecutor.execute(LivyRestExecutor.java:85)
>  at 
> org.apache.kylin.source.hive.MRHiveDictUtil.runLivySqlJob(MRHiveDictUtil.java:225)
>  at 
> org.apache.kylin.source.hive.CreateFlatHiveTableByLivyStep.createFlatHiveTable(CreateFlatHiveTableByLivyStep.java:45)
>  at 
> org.apache.kylin.source.hive.CreateFlatHiveTableByLivyStep.doWork(CreateFlatHiveTableByLivyStep.java:60)
>  at 
> org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:178)
>  ... 6 more
> {code}
> {code:java}
> kylin.engine.livy-conf.livy-key.file=hdfs:///kylin/kylin-job-3.0.0-SNAPSHOT.jar
> kylin.engine.livy-conf.livy-arr.jars=hdfs:///kylin/hbase-client-2.0.0-cdh6.0.1.jar,hdfs:///kylin/hbase-common-2.0.0-cdh6.0.1.jar,hdfs:///kylin/hbase-hadoop-compat-2.0.0-cdh6.0.1.jar,hdfs:///kylin/hbase-hadoop2-compat-2.0.0-cdh6.0.1.jar,hdfs:///kylin/hbase-server-2.0.0-cdh6.0.1.jar,hdfs:///kylin/htrace-core-3.2.0-incubating.jar,hdfs:///kylin/metrics-core-3.2.0.jar
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to