See
<https://builds.apache.org/job/carbondata-master-spark-2.1/org.apache.carbondata$carbondata-mv-core/3739/display/redirect>
------------------------------------------
[...truncated 17.28 MB...]
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
at
org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadDataWithPartition(CarbonLoadDataCommand.scala:840)
at
org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:620)
at
org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:356)
at
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:148)
at
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:145)
at
org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)
at
org.apache.spark.sql.execution.command.AtomicRunnableCommand.runWithAudit(package.scala:141)
at
org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:145)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
at
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
at
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
at
org.apache.spark.sql.util.SparkSQLUtil$.execute(SparkSQLUtil.scala:38)
at
org.apache.carbondata.mv.datamap.MVDataMapProvider.rebuildInternal(MVDataMapProvider.scala:160)
at
org.apache.carbondata.core.datamap.DataMapProvider.rebuild(DataMapProvider.java:240)
at
org.apache.carbondata.mv.datamap.MVDataMapProvider.initData(MVDataMapProvider.scala:81)
at
org.apache.spark.sql.execution.command.datamap.CarbonCreateDataMapCommand.processData(CarbonCreateDataMapCommand.scala:183)
at
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:148)
at
org.apache.spark.sql.execution.command.AtomicRunnableCommand$$anonfun$run$3.apply(package.scala:145)
at
org.apache.spark.sql.execution.command.Auditable$class.runWithAudit(package.scala:104)
at
org.apache.spark.sql.execution.command.AtomicRunnableCommand.runWithAudit(package.scala:141)
at
org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:145)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
at
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
at
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
at
org.apache.spark.sql.CarbonSession$$anonfun$sql$1.apply(CarbonSession.scala:91)
at
org.apache.spark.sql.CarbonSession$$anonfun$sql$1.apply(CarbonSession.scala:90)
at
org.apache.spark.sql.CarbonSession.withProfiler(CarbonSession.scala:136)
at org.apache.spark.sql.CarbonSession.sql(CarbonSession.scala:88)
at
org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:113)
at
org.apache.carbondata.mv.rewrite.TestPartitionWithMV$$anonfun$32.apply$mcV$sp(TestPartitionWithMV.scala:676)
at
org.apache.carbondata.mv.rewrite.TestPartitionWithMV$$anonfun$32.apply(TestPartitionWithMV.scala:671)
at
org.apache.carbondata.mv.rewrite.TestPartitionWithMV$$anonfun$32.apply(TestPartitionWithMV.scala:671)
at
org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
at
org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
at
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
at
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
at
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
at
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
at org.scalatest.Suite$class.run(Suite.scala:1424)
at
org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
at
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
at
org.apache.carbondata.mv.rewrite.TestPartitionWithMV.org$scalatest$BeforeAndAfterAll$$super$run(TestPartitionWithMV.scala:33)
at
org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
at
org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
at
org.apache.carbondata.mv.rewrite.TestPartitionWithMV.run(TestPartitionWithMV.scala:33)
at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
at
org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
at
org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
at
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
at
org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
at org.scalatest.Suite$class.run(Suite.scala:1421)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
at
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
at
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
at
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
at
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
at
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
at
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
at org.scalatest.tools.Runner$.main(Runner.scala:860)
at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: org.apache.spark.SparkException: Job aborted due to stage failure:
Task 0 in stage 2544.0 failed 1 times, most recent failure: Lost task 0.0 in
stage 2544.0 (TID 45181, localhost, executor driver):
java.io.FileNotFoundException:
/tmp/blockmgr-a61f7cc6-1d43-4547-8418-73b85808a07a/15/temp_shuffle_202a8e6b-c77f-4a2f-9865-6bcc959f668a
(No such file or directory)
at java.io.FileOutputStream.open0(Native Method)
at java.io.FileOutputStream.open(FileOutputStream.java:270)
at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
at
org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)
at
org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)
at
org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)
at
org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
at org.apache.spark.scheduler.Task.run(Task.scala:99)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Driver stacktrace:
at
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422)
at
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
at scala.Option.foreach(Option.scala:257)
at
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
at
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1918)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1931)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1951)
at
org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply$mcV$sp(FileFormatWriter.scala:127)
... 123 more
Caused by: java.io.FileNotFoundException:
/tmp/blockmgr-a61f7cc6-1d43-4547-8418-73b85808a07a/15/temp_shuffle_202a8e6b-c77f-4a2f-9865-6bcc959f668a
(No such file or directory)
at java.io.FileOutputStream.open0(Native Method)
at java.io.FileOutputStream.open(FileOutputStream.java:270)
at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
at
org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)
at
org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)
at
org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)
at
org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)
at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
at org.apache.spark.scheduler.Task.run(Task.scala:99)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2019-09-16 09:52:59 AUDIT audit:72 - {"time":"September 16, 2019 2:52:59 AM
PDT","username":"jenkins","opName":"DROP
DATAMAP","opId":"25263813974328030","opStatus":"START"}
2019-09-16 09:52:59 AUDIT audit:93 - {"time":"September 16, 2019 2:52:59 AM
PDT","username":"jenkins","opName":"DROP
DATAMAP","opId":"25263813974328030","opStatus":"SUCCESS","opTime":"113
ms","table":"partition_mv.partitionone","extraInfo":{"dmName":"dm1"}}
2019-09-16 09:52:59 AUDIT audit:93 - {"time":"September 16, 2019 2:52:59 AM
PDT","username":"jenkins","opName":"CREATE
DATAMAP","opId":"25263813208749813","opStatus":"FAILED","opTime":"878
ms","table":"partition_mv.partitionone","extraInfo":{"Exception":"org.apache.spark.SparkException","Message":"Job
aborted."}}
[31m- test partition at last column *** FAILED ***[0m
[31m org.apache.spark.SparkException: Job aborted.[0m
[31m at
org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply$mcV$sp(FileFormatWriter.scala:147)[0m
[31m at
org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:121)[0m
[31m at
org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:121)[0m
[31m at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)[0m
[31m at
org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:121)[0m
[31m at
org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:101)[0m
[31m at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)[0m
[31m at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)[0m
[31m at
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)[0m
[31m at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)[0m
[31m ...[0m
[31m Cause: org.apache.spark.SparkException: Job aborted due to stage
failure: Task 0 in stage 2544.0 failed 1 times, most recent failure: Lost task
0.0 in stage 2544.0 (TID 45181, localhost, executor driver):
java.io.FileNotFoundException:
/tmp/blockmgr-a61f7cc6-1d43-4547-8418-73b85808a07a/15/temp_shuffle_202a8e6b-c77f-4a2f-9865-6bcc959f668a
(No such file or directory)[0m
[31m at java.io.FileOutputStream.open0(Native Method)[0m
[31m at java.io.FileOutputStream.open(FileOutputStream.java:270)[0m
[31m at java.io.FileOutputStream.<init>(FileOutputStream.java:213)[0m
[31m at
org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)[0m
[31m at
org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)[0m
[31m at
org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)[0m
[31m at
org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)[0m
[31m at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)[0m
[31m at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)[0m
[31m at org.apache.spark.scheduler.Task.run(Task.scala:99)[0m
[31m at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)[0m
[31m at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)[0m
[31m at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)[0m
[31m at java.lang.Thread.run(Thread.java:748)[0m
[31m[0m
[31mDriver stacktrace:[0m
[31m at
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422)[0m
[31m at
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)[0m
[31m at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)[0m
[31m at scala.Option.foreach(Option.scala:257)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802)[0m
[31m ...[0m
[31m Cause: java.io.FileNotFoundException:
/tmp/blockmgr-a61f7cc6-1d43-4547-8418-73b85808a07a/15/temp_shuffle_202a8e6b-c77f-4a2f-9865-6bcc959f668a
(No such file or directory)[0m
[31m at java.io.FileOutputStream.open0(Native Method)[0m
[31m at java.io.FileOutputStream.open(FileOutputStream.java:270)[0m
[31m at java.io.FileOutputStream.<init>(FileOutputStream.java:213)[0m
[31m at
org.apache.spark.storage.DiskBlockObjectWriter.initialize(DiskBlockObjectWriter.scala:102)[0m
[31m at
org.apache.spark.storage.DiskBlockObjectWriter.open(DiskBlockObjectWriter.scala:115)[0m
[31m at
org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:229)[0m
[31m at
org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:152)[0m
[31m at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96)[0m
[31m at
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)[0m
[31m at org.apache.spark.scheduler.Task.run(Task.scala:99)[0m
[31m ...[0m
2019-09-16 09:52:59 AUDIT audit:72 - {"time":"September 16, 2019 2:52:59 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263814093098170","opStatus":"START"}
2019-09-16 09:52:59 AUDIT audit:93 - {"time":"September 16, 2019 2:52:59 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263814093098170","opStatus":"SUCCESS","opTime":"1
ms","table":"partition_mv.ag_table","extraInfo":{}}
2019-09-16 09:52:59 AUDIT audit:72 - {"time":"September 16, 2019 2:52:59 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263814094301120","opStatus":"START"}
2019-09-16 09:53:00 AUDIT audit:93 - {"time":"September 16, 2019 2:53:00 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263814094301120","opStatus":"SUCCESS","opTime":"941
ms","table":"partition_mv.droppartition","extraInfo":{}}
2019-09-16 09:53:00 AUDIT audit:72 - {"time":"September 16, 2019 2:53:00 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263815034712090","opStatus":"START"}
2019-09-16 09:53:00 AUDIT audit:93 - {"time":"September 16, 2019 2:53:00 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263815034712090","opStatus":"SUCCESS","opTime":"486
ms","table":"partition_mv.maintable","extraInfo":{}}
2019-09-16 09:53:00 AUDIT audit:72 - {"time":"September 16, 2019 2:53:00 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263815521304869","opStatus":"START"}
2019-09-16 09:53:00 AUDIT audit:93 - {"time":"September 16, 2019 2:53:00 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263815521304869","opStatus":"SUCCESS","opTime":"110
ms","table":"partition_mv.par","extraInfo":{}}
2019-09-16 09:53:00 AUDIT audit:72 - {"time":"September 16, 2019 2:53:00 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263815631449447","opStatus":"START"}
2019-09-16 09:53:01 AUDIT audit:93 - {"time":"September 16, 2019 2:53:01 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263815631449447","opStatus":"SUCCESS","opTime":"634
ms","table":"partition_mv.partitionallcompaction","extraInfo":{}}
2019-09-16 09:53:01 AUDIT audit:72 - {"time":"September 16, 2019 2:53:01 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263816266068790","opStatus":"START"}
2019-09-16 09:53:01 AUDIT audit:93 - {"time":"September 16, 2019 2:53:01 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263816266068790","opStatus":"SUCCESS","opTime":"199
ms","table":"partition_mv.partitionone","extraInfo":{}}
2019-09-16 09:53:01 AUDIT audit:72 - {"time":"September 16, 2019 2:53:01 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263816465636116","opStatus":"START"}
2019-09-16 09:53:02 AUDIT audit:93 - {"time":"September 16, 2019 2:53:02 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263816465636116","opStatus":"SUCCESS","opTime":"867
ms","table":"partition_mv.partitiontable","extraInfo":{}}
2019-09-16 09:53:02 AUDIT audit:72 - {"time":"September 16, 2019 2:53:02 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263817333019204","opStatus":"START"}
2019-09-16 09:53:02 AUDIT audit:93 - {"time":"September 16, 2019 2:53:02 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263817333019204","opStatus":"SUCCESS","opTime":"1
ms","table":"partition_mv.sensor_1_table","extraInfo":{}}
2019-09-16 09:53:02 AUDIT audit:72 - {"time":"September 16, 2019 2:53:02 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263817334304590","opStatus":"START"}
2019-09-16 09:53:03 AUDIT audit:93 - {"time":"September 16, 2019 2:53:03 AM
PDT","username":"jenkins","opName":"DROP
TABLE","opId":"25263817334304590","opStatus":"SUCCESS","opTime":"800
ms","table":"partition_mv.updatetime_8","extraInfo":{}}
[32mMVTPCDSTestCase:[0m
[33m- test create datamap with tpcds_1_4_testCases case_1 !!! IGNORED !!![0m
[33m- test create datamap with tpcds_1_4_testCases case_3 !!! IGNORED !!![0m
[33m- test create datamap with tpcds_1_4_testCases case_4 !!! IGNORED !!![0m
[33m- test create datamap with tpcds_1_4_testCases case_5 !!! IGNORED !!![0m
[33m- test create datamap with tpcds_1_4_testCases case_6 !!! IGNORED !!![0m
[33m- test create datamap with tpcds_1_4_testCases case_8 !!! IGNORED !!![0m
[33m- test create datamap with tpcds_1_4_testCases case_11 !!! IGNORED !!![0m
[33m- test create datamap with tpcds_1_4_testCases case_15 !!! IGNORED !!![0m
[33m- test create datamap with tpcds_1_4_testCases case_16 !!! IGNORED !!![0m
[36mRun completed in 13 minutes, 9 seconds.[0m
[36mTotal number of tests run: 194[0m
[36mSuites: completed 19, aborted 0[0m
[36mTests: succeeded 181, failed 13, canceled 0, ignored 27, pending 0[0m
[31m*** 13 TESTS FAILED ***[0m
[JENKINS] Recording test results