See 
<https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-mv-core/3737/display/redirect>

------------------------------------------
[...truncated 896.82 KB...]
        at scala.Option.foreach(Option.scala:257)
        at 
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594)
        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
        at 
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:1918)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:1931)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:1944)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:1958)
        at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:935)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
        at org.apache.spark.rdd.RDD.collect(RDD.scala:934)
        at 
org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadDataFrame(CarbonDataRDDFactory.scala:1140)
        at 
org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:382)
        at 
org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:627)
        at 
org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:356)
        at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:148)
        at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:145)
        at 
org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)
        at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand.runWithAudit(package.scala:141)
        at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:145)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
        at 
org.apache.spark.sql.util.SparkSQLUtil$.execute(SparkSQLUtil.scala:38)
        at 
org.apache.carbondata.mv.datamap.MVDataMapProvider.rebuildInternal(MVDataMapProvider.scala:160)
        at 
org.apache.carbondata.core.datamap.DataMapProvider.rebuild(DataMapProvider.java:240)
        at 
org.apache.spark.sql.execution.command.datamap.CarbonDataMapRebuildCommand.processData(CarbonDataMapRebuildCommand.scala:74)
        at 
org.apache.spark.sql.execution.command.DataCommand$$anonfun$run$2.apply(package.scala:132)
        at 
org.apache.spark.sql.execution.command.DataCommand$$anonfun$run$2.apply(package.scala:132)
        at 
org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)
        at 
org.apache.spark.sql.execution.command.DataCommand.runWithAudit(package.scala:130)
        at 
org.apache.spark.sql.execution.command.DataCommand.run(package.scala:132)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at 
org.apache.spark.sql.CarbonSession$$anonfun$sql$1.apply(CarbonSession.scala:91)
        at 
org.apache.spark.sql.CarbonSession$$anonfun$sql$1.apply(CarbonSession.scala:90)
        at 
org.apache.spark.sql.CarbonSession.withProfiler(CarbonSession.scala:136)
        at org.apache.spark.sql.CarbonSession.sql(CarbonSession.scala:88)
        at 
org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
        at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:113)
        at 
org.apache.carbondata.mv.rewrite.TestAllOperationsOnMV.beforeEach(TestAllOperationsOnMV.scala:49)
        at 
org.scalatest.BeforeAndAfterEach$class.beforeEach(BeforeAndAfterEach.scala:154)
        at 
org.apache.carbondata.mv.rewrite.TestAllOperationsOnMV.beforeEach(TestAllOperationsOnMV.scala:37)
        at 
org.scalatest.BeforeAndAfterEach$class.beforeEach(BeforeAndAfterEach.scala:173)
        at 
org.apache.carbondata.mv.rewrite.TestAllOperationsOnMV.beforeEach(TestAllOperationsOnMV.scala:37)
        at 
org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:253)
        at 
org.apache.carbondata.mv.rewrite.TestAllOperationsOnMV.runTest(TestAllOperationsOnMV.scala:37)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at 
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at 
org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at 
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at 
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at org.scalatest.FunSuite.run(FunSuite.scala:1555)
        at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
        at 
org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
        at 
org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
        at 
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
        at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
        at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
        at 
org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
        at org.scalatest.Suite$class.run(Suite.scala:1421)
        at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
        at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
        at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
        at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
        at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
        at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
        at 
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
        at 
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
        at org.scalatest.tools.Runner$.main(Runner.scala:860)
        at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: java.io.FileNotFoundException: 
/tmp/blockmgr-394ca08a-0e95-434d-804b-a6821f1d2532/2d/shuffle_263_0_0.data.c8b374a6-a1b2-4078-a97a-cc3c24ac5573
 (No such file or directory)
        at java.io.FileOutputStream.open0(Native Method)
        at java.io.FileOutputStream.open(FileOutputStream.java:270)
        at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
        at 
org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.writePartitionedFile(BypassMergeSortShuffleWriter.java:192)
        at 
org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:164)
        at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
        at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
        at org.apache.spark.scheduler.Task.run(Task.scala:99)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
2019-09-15 09:54:24 ERROR CarbonDataRDDFactory$:438 - 
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in 
stage 1693.0 failed 1 times, most recent failure: Lost task 0.0 in stage 1693.0 
(TID 30011, localhost, executor driver): java.io.FileNotFoundException: 
/tmp/blockmgr-394ca08a-0e95-434d-804b-a6821f1d2532/2d/shuffle_263_0_0.data.c8b374a6-a1b2-4078-a97a-cc3c24ac5573
 (No such file or directory)
        at java.io.FileOutputStream.open0(Native Method)
        at java.io.FileOutputStream.open(FileOutputStream.java:270)
        at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
        at 
org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.writePartitionedFile(BypassMergeSortShuffleWriter.java:192)
        at 
org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:164)
        at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
        at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
        at org.apache.spark.scheduler.Task.run(Task.scala:99)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)

Driver stacktrace:
2019-09-15 09:54:24 ERROR CarbonLoadDataCommand:390 - java.lang.Exception: 
DataLoad failure: 
/tmp/blockmgr-394ca08a-0e95-434d-804b-a6821f1d2532/2d/shuffle_263_0_0.data.c8b374a6-a1b2-4078-a97a-cc3c24ac5573
 (No such file or directory)
2019-09-15 09:54:24 ERROR CarbonLoadDataCommand:166 - Got exception 
java.lang.Exception: DataLoad failure: 
/tmp/blockmgr-394ca08a-0e95-434d-804b-a6821f1d2532/2d/shuffle_263_0_0.data.c8b374a6-a1b2-4078-a97a-cc3c24ac5573
 (No such file or directory) when processing data. But this command does not 
support undo yet, skipping the undo part.
2019-09-15 09:54:24 AUDIT audit:93 - {"time":"September 15, 2019 2:54:24 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"25177498565352557","opStatus":"FAILED","opTime":"267 
ms","table":"default.dm1_table","extraInfo":{"Exception":"java.lang.Exception","Message":"DataLoad
 failure: 
/tmp/blockmgr-394ca08a-0e95-434d-804b-a6821f1d2532/2d/shuffle_263_0_0.data.c8b374a6-a1b2-4078-a97a-cc3c24ac5573
 (No such file or directory)"}}
2019-09-15 09:54:24 ERROR MVDataMapProvider:166 - Data Load failed for DataMap: 
java.lang.Exception: DataLoad failure: 
/tmp/blockmgr-394ca08a-0e95-434d-804b-a6821f1d2532/2d/shuffle_263_0_0.data.c8b374a6-a1b2-4078-a97a-cc3c24ac5573
 (No such file or directory)
        at 
org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:501)
        at 
org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:627)
        at 
org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:356)
        at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:148)
        at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:145)
        at 
org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)
        at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand.runWithAudit(package.scala:141)
        at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:145)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
        at 
org.apache.spark.sql.util.SparkSQLUtil$.execute(SparkSQLUtil.scala:38)
        at 
org.apache.carbondata.mv.datamap.MVDataMapProvider.rebuildInternal(MVDataMapProvider.scala:160)
        at 
org.apache.carbondata.core.datamap.DataMapProvider.rebuild(DataMapProvider.java:240)
        at 
org.apache.spark.sql.execution.command.datamap.CarbonDataMapRebuildCommand.processData(CarbonDataMapRebuildCommand.scala:74)
        at 
org.apache.spark.sql.execution.command.DataCommand$$anonfun$run$2.apply(package.scala:132)
        at 
org.apache.spark.sql.execution.command.DataCommand$$anonfun$run$2.apply(package.scala:132)
        at 
org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)
        at 
org.apache.spark.sql.execution.command.DataCommand.runWithAudit(package.scala:130)
        at 
org.apache.spark.sql.execution.command.DataCommand.run(package.scala:132)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at 
org.apache.spark.sql.CarbonSession$$anonfun$sql$1.apply(CarbonSession.scala:91)
        at 
org.apache.spark.sql.CarbonSession$$anonfun$sql$1.apply(CarbonSession.scala:90)
        at 
org.apache.spark.sql.CarbonSession.withProfiler(CarbonSession.scala:136)
        at org.apache.spark.sql.CarbonSession.sql(CarbonSession.scala:88)
        at 
org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
        at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:113)
        at 
org.apache.carbondata.mv.rewrite.TestAllOperationsOnMV.beforeEach(TestAllOperationsOnMV.scala:49)
        at 
org.scalatest.BeforeAndAfterEach$class.beforeEach(BeforeAndAfterEach.scala:154)
        at 
org.apache.carbondata.mv.rewrite.TestAllOperationsOnMV.beforeEach(TestAllOperationsOnMV.scala:37)
        at 
org.scalatest.BeforeAndAfterEach$class.beforeEach(BeforeAndAfterEach.scala:173)
        at 
org.apache.carbondata.mv.rewrite.TestAllOperationsOnMV.beforeEach(TestAllOperationsOnMV.scala:37)
        at 
org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:253)
        at 
org.apache.carbondata.mv.rewrite.TestAllOperationsOnMV.runTest(TestAllOperationsOnMV.scala:37)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at 
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at 
org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at 
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at 
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at org.scalatest.FunSuite.run(FunSuite.scala:1555)
        at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
        at 
org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
        at 
org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
        at 
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
        at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
        at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
        at 
org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
        at org.scalatest.Suite$class.run(Suite.scala:1421)
        at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
        at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
        at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
        at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
        at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
        at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
        at 
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
        at 
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
        at org.scalatest.tools.Runner$.main(Runner.scala:860)
        at org.scalatest.tools.Runner.main(Runner.scala)
2019-09-15 09:54:24 AUDIT audit:93 - {"time":"September 15, 2019 2:54:24 AM 
PDT","username":"jenkins","opName":"REBUILD 
DATAMAP","opId":"25177498540105962","opStatus":"FAILED","opTime":"298 
ms","table":"default.dm1_table","extraInfo":{"Exception":"java.lang.Exception","Message":"DataLoad
 failure: 
/tmp/blockmgr-394ca08a-0e95-434d-804b-a6821f1d2532/2d/shuffle_263_0_0.data.c8b374a6-a1b2-4078-a97a-cc3c24ac5573
 (No such file or directory)"}}
*** RUN ABORTED ***
  java.lang.Exception: DataLoad failure: 
/tmp/blockmgr-394ca08a-0e95-434d-804b-a6821f1d2532/2d/shuffle_263_0_0.data.c8b374a6-a1b2-4078-a97a-cc3c24ac5573
 (No such file or directory)
  at 
org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:501)
  at 
org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:627)
  at 
org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:356)
  at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:148)
  at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:145)
  at 
org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)
  at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand.runWithAudit(package.scala:141)
  at 
org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:145)
  at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
  at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
  ...
[JENKINS] Recording test results

Reply via email to