[
https://issues.apache.org/jira/browse/KYLIN-4194?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16952600#comment-16952600
]
wangrupeng edited comment on KYLIN-4194 at 10/16/19 8:23 AM:
-------------------------------------------------------------
spark job错误日志:
Driver stacktrace: at
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1651)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1639)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1638)
at
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1638) at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at scala.Option.foreach(Option.scala:257) at
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1872)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1821)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1810)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) at
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642) at
org.apache.spark.SparkContext.runJob(SparkContext.scala:2034) at
org.apache.spark.SparkContext.runJob(SparkContext.scala:2055) at
org.apache.spark.SparkContext.runJob(SparkContext.scala:2087) at
org.apache.kylin.engine.spark.MultipleOutputsRDD.saveAsNewAPIHadoopDatasetWithMultipleOutputs(MultipleOutputsRDD.scala:97)
at
org.apache.kylin.engine.spark.SparkFactDistinct.execute(SparkFactDistinct.java:233)
at
org.apache.kylin.common.util.AbstractApplication.execute(AbstractApplication.java:37)
... 11 more
{color:#FF0000}Caused by:
org.apache.kylin.common.KylinConfigCannotInitException: Didn't find KYLIN_CONF
or KYLIN_HOME, please set one of them at{color}
org.apache.kylin.common.KylinConfig.getSitePropertiesFile(KylinConfig.java:336)
at
org.apache.kylin.common.KylinConfig.buildSiteOrderedProps(KylinConfig.java:378)
at
org.apache.kylin.common.KylinConfig.buildSiteProperties(KylinConfig.java:358)
at
org.apache.kylin.common.{color:#FF0000}KylinConfig.getInstanceFromEnv{color}(KylinConfig.java:137)
at
org.apache.kylin.dict.{color:#FF0000}CacheDictionary.enableCache{color}(CacheDictionary.java:105)
at org.apache.kylin.dict.TrieDictionary.init(TrieDictionary.java:119) at
org.apache.kylin.dict.TrieDictionary.<init>(TrieDictionary.java:89) at
org.apache.kylin.dict.TrieDictionaryBuilder.build(TrieDictionaryBuilder.java:419)
at
org.apache.kylin.dict.TrieDictionaryForestBuilder.build(TrieDictionaryForestBuilder.java:110)
at
org.apache.kylin.dict.DictionaryGenerator$NumberTrieDictForestBuilder.build(DictionaryGenerator.java:312)
at
org.apache.kylin.engine.spark.SparkFactDistinct$MultiOutputFunction.call({color:#FF0000}SparkFactDistinct.java:774{color})
at
org.apache.kylin.engine.spark.SparkFactDistinct$MultiOutputFunction.call(SparkFactDistinct.java:650)
at
org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$7$1.apply(JavaRDDLike.scala:186)
at
org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$7$1.apply(JavaRDDLike.scala:186)
at
org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at
org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:49) at
org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) at
org.apache.spark.rdd.RDD.iterator(RDD.scala:288) at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at
org.apache.spark.scheduler.Task.run(Task.scala:109) at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
was (Author: wangrupeng):
spark job错误日志:
Driver stacktrace: at
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1651)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1639)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1638)
at
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1638) at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at scala.Option.foreach(Option.scala:257) at
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1872)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1821)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1810)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) at
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642) at
org.apache.spark.SparkContext.runJob(SparkContext.scala:2034) at
org.apache.spark.SparkContext.runJob(SparkContext.scala:2055) at
org.apache.spark.SparkContext.runJob(SparkContext.scala:2087) at
org.apache.kylin.engine.spark.MultipleOutputsRDD.saveAsNewAPIHadoopDatasetWithMultipleOutputs(MultipleOutputsRDD.scala:97)
at
org.apache.kylin.engine.spark.SparkFactDistinct.execute(SparkFactDistinct.java:233)
at
org.apache.kylin.common.util.AbstractApplication.execute(AbstractApplication.java:37)
... 11 more
Caused by: org.apache.kylin.common.KylinConfigCannotInitException: Didn't find
KYLIN_CONF or KYLIN_HOME, please set one of them at
org.apache.kylin.common.KylinConfig.getSitePropertiesFile(KylinConfig.java:336)
at
org.apache.kylin.common.KylinConfig.buildSiteOrderedProps(KylinConfig.java:378)
at
org.apache.kylin.common.KylinConfig.buildSiteProperties(KylinConfig.java:358)
at org.apache.kylin.common.KylinConfig.getInstanceFromEnv(KylinConfig.java:137)
at org.apache.kylin.dict.CacheDictionary.enableCache(CacheDictionary.java:105)
at org.apache.kylin.dict.TrieDictionary.init(TrieDictionary.java:119) at
org.apache.kylin.dict.TrieDictionary.<init>(TrieDictionary.java:89) at
org.apache.kylin.dict.TrieDictionaryBuilder.build(TrieDictionaryBuilder.java:419)
at
org.apache.kylin.dict.TrieDictionaryForestBuilder.build(TrieDictionaryForestBuilder.java:110)
at
org.apache.kylin.dict.DictionaryGenerator$NumberTrieDictForestBuilder.build(DictionaryGenerator.java:312)
at
org.apache.kylin.engine.spark.SparkFactDistinct$MultiOutputFunction.call(SparkFactDistinct.java:774)
at
org.apache.kylin.engine.spark.SparkFactDistinct$MultiOutputFunction.call(SparkFactDistinct.java:650)
at
org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$7$1.apply(JavaRDDLike.scala:186)
at
org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$7$1.apply(JavaRDDLike.scala:186)
at
org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at
org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:49) at
org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) at
org.apache.spark.rdd.RDD.iterator(RDD.scala:288) at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at
org.apache.spark.scheduler.Task.run(Task.scala:109) at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
> Throw KylinConfigCannotInitException at STEP "Extract Fact Table Distinct
> Columns" with spark
> ---------------------------------------------------------------------------------------------
>
> Key: KYLIN-4194
> URL: https://issues.apache.org/jira/browse/KYLIN-4194
> Project: Kylin
> Issue Type: Bug
> Affects Versions: v3.0.0-beta
> Reporter: wangrupeng
> Assignee: wangrupeng
> Priority: Major
> Attachments: image-2019-10-16-16-13-00-190.png, kylin_spark.log
>
>
> When I use spark to extract fact table distinct columns values, kylin throw
> an exception,
> org.apache.kylin.common.KylinConfigCannotInitException: Didn't find
> KYLIN_CONF or KYLIN_HOME, please set one of them,
> see more details in [^kylin_spark.log] ^^ . I tried cdh5.7 and hdp2.4, both
> platforms got this problem.
> It seems like an environment variables issue, but when I set KYLIN_CONF or
> KYLIN_HOME on each yarn cluster node, it still not work.
> As we can see from the log, this exception occurs when TrieDictionaryBuilder
> try to build Dictionary<String> objects, but cannot find KylinConfig using
> KylinConfig.getInstanceFromEnv() method.
> My solution is to add code in spark job by reset
> KylinConfig.THREAD_ENV_INSTANCE
> from hdfs metadata as the spark job's metadata is stored in hdfs.And finally
> it works fine.
--
This message was sent by Atlassian Jira
(v8.3.4#803005)