[
https://issues.apache.org/jira/browse/HUDI-1454?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Vinoth Chandar updated HUDI-1454:
---------------------------------
Fix Version/s: 0.14.0
(was: 1.0.0)
> in unit test have error as Error reading clustering plan 006
> -------------------------------------------------------------
>
> Key: HUDI-1454
> URL: https://issues.apache.org/jira/browse/HUDI-1454
> Project: Apache Hudi
> Issue Type: Task
> Reporter: liwei
> Assignee: liwei
> Priority: Major
> Fix For: 0.14.0
>
>
> https://travis-ci.com/github/apache/hudi/jobs/458936905
> [INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.245
> s - in org.apache.hudi.table.action.compact.TestInlineCompaction[INFO] Tests
> run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.245 s - in
> org.apache.hudi.table.action.compact.TestInlineCompaction[INFO] Running
> org.apache.hudi.table.action.compact.TestAsyncCompaction[WARN ] 2020-12-12
> 15:13:43,814 org.apache.hudi.testutils.HoodieClientTestHarness - Closing
> file-system instance used in previous test-run[WARN ] 2020-12-12 15:13:50,370
> org.apache.hudi.testutils.HoodieClientTestHarness - Closing file-system
> instance used in previous test-run[WARN ] 2020-12-12 15:14:02,285
> org.apache.hudi.testutils.HoodieClientTestHarness - Closing file-system
> instance used in previous test-run[WARN ] 2020-12-12 15:14:08,596
> org.apache.hudi.testutils.HoodieClientTestHarness - Closing file-system
> instance used in previous test-run[WARN ] 2020-12-12 15:14:16,857
> org.apache.hudi.common.util.ClusteringUtils - No content found in requested
> file for instant [==>006__replacecommit__REQUESTED][WARN ] 2020-12-12
> 15:14:16,861 org.apache.hudi.common.util.ClusteringUtils - No content found
> in requested file for instant [==>006__replacecommit__REQUESTED][ERROR]
> 2020-12-12 15:14:16,919
> org.apache.hudi.timeline.service.FileSystemViewHandler - Got runtime
> exception servicing request
> partition=2015%2F03%2F17&basepath=%2Ftmp%2Fjunit7781027189613842524%2Fdataset&lastinstantts=005&timelinehash=ba1d2bb94a4b1d1e6e294e77086957b6c7c43b5a306e36cba6bbaa955a0ed8ceorg.apache.hudi.exception.HoodieIOException:
> Error reading clustering plan 006 at
> org.apache.hudi.common.util.ClusteringUtils.getClusteringPlan(ClusteringUtils.java:85)
> at
> org.apache.hudi.common.util.ClusteringUtils.lambda$getAllPendingClusteringPlans$0(ClusteringUtils.java:67)
> at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193)
> at
> java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1374)
> at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481) at
> java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)
> at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
> at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) at
> java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499) at
> org.apache.hudi.common.util.ClusteringUtils.getAllFileGroupsInPendingClusteringPlans(ClusteringUtils.java:100)
> at
> org.apache.hudi.common.table.view.AbstractTableFileSystemView.init(AbstractTableFileSystemView.java:111)
> at
> org.apache.hudi.common.table.view.RocksDbBasedFileSystemView.init(RocksDbBasedFileSystemView.java:91)
> at
> org.apache.hudi.common.table.view.AbstractTableFileSystemView.runSync(AbstractTableFileSystemView.java:1077)
> at
> org.apache.hudi.common.table.view.IncrementalTimelineSyncFileSystemView.runSync(IncrementalTimelineSyncFileSystemView.java:97)
> at
> org.apache.hudi.common.table.view.AbstractTableFileSystemView.sync(AbstractTableFileSystemView.java:1059)
> at
> org.apache.hudi.timeline.service.FileSystemViewHandler.syncIfLocalViewBehind(FileSystemViewHandler.java:124)
> at
> org.apache.hudi.timeline.service.FileSystemViewHandler.access$100(FileSystemViewHandler.java:55)
> at
> org.apache.hudi.timeline.service.FileSystemViewHandler$ViewHandler.handle(FileSystemViewHandler.java:338)
> at io.javalin.security.SecurityUtil.noopAccessManager(SecurityUtil.kt:22) at
> io.javalin.Javalin.lambda$addHandler$0(Javalin.java:606) at
> io.javalin.core.JavalinServlet$service$2$1.invoke(JavalinServlet.kt:46) at
> io.javalin.core.JavalinServlet$service$2$1.invoke(JavalinServlet.kt:17) at
> io.javalin.core.JavalinServlet$service$1.invoke(JavalinServlet.kt:143) at
> io.javalin.core.JavalinServlet$service$2.invoke(JavalinServlet.kt:41) at
> io.javalin.core.JavalinServlet.service(JavalinServlet.kt:107) at
> io.javalin.core.util.JettyServerUtil$initialize$httpHandler$1.doHandle(JettyServerUtil.kt:72)
> at
> org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)
> at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)
> at
> org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1668)
> at
> org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)
> at
> org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247)
> at
> org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)
> at org.eclipse.jetty.server.handler.HandlerList.handle(HandlerList.java:61)
> at
> org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:174)
> at
> org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)
> at org.eclipse.jetty.server.Server.handle(Server.java:502) at
> org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:370) at
> org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:267)
> at
> org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:305)
> at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103) at
> org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117) at
> org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)
> at
> org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)
> at
> org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)
> at
> org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)
> at
> org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)
> at
> org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
> at
> org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
> at java.lang.Thread.run(Thread.java:748)Caused by: java.io.IOException: Not
> an Avro data file at
> org.apache.avro.file.DataFileReader.openReader(DataFileReader.java:63) at
> org.apache.hudi.common.table.timeline.TimelineMetadataUtils.deserializeAvroMetadata(TimelineMetadataUtils.java:160)
> at
> org.apache.hudi.common.table.timeline.TimelineMetadataUtils.deserializeRequestedReplaceMetadta(TimelineMetadataUtils.java:154)
> at
> org.apache.hudi.common.util.ClusteringUtils.getClusteringPlan(ClusteringUtils.java:79)
> ... 48 more[ERROR] 2020-12-12 15:14:16,933
> org.apache.hudi.common.table.view.PriorityBasedFileSystemView - Got error
> running preferred function. Trying
> secondaryorg.apache.hudi.exception.HoodieRemoteException: Server Error at
> org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.getLatestFileSlices(RemoteHoodieTableFileSystemView.java:279)
> at
> org.apache.hudi.common.table.view.PriorityBasedFileSystemView.execute(PriorityBasedFileSystemView.java:81)
> at
> org.apache.hudi.common.table.view.PriorityBasedFileSystemView.getLatestFileSlices(PriorityBasedFileSystemView.java:160)
> at
> org.apache.hudi.table.action.commit.SparkInsertOverwriteCommitActionExecutor.getAllExistingFileIds(SparkInsertOverwriteCommitActionExecutor.java:82)
> at
> org.apache.hudi.table.action.commit.SparkInsertOverwriteCommitActionExecutor.lambda$getPartitionToReplacedFileIds$81998308$1(SparkInsertOverwriteCommitActionExecutor.java:77)
> at
> org.apache.spark.api.java.JavaPairRDD$$anonfun$pairFunToScalaFun$1.apply(JavaPairRDD.scala:1043)
> at
> org.apache.spark.api.java.JavaPairRDD$$anonfun$pairFunToScalaFun$1.apply(JavaPairRDD.scala:1043)
> at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) at
> scala.collection.Iterator$class.foreach(Iterator.scala:891) at
> scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at
> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) at
> scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) at
> scala.collection.AbstractIterator.to(Iterator.scala:1334) at
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) at
> scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334) at
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) at
> scala.collection.AbstractIterator.toArray(Iterator.scala:1334) at
> org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:945)
> at
> org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:945)
> at
> org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
> at
> org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) at
> org.apache.spark.scheduler.Task.run(Task.scala:123) at
> org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
> at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) at
> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:748)Caused by:
> org.apache.http.client.HttpResponseException: Server Error at
> org.apache.http.impl.client.AbstractResponseHandler.handleResponse(AbstractResponseHandler.java:69)
> at org.apache.http.client.fluent.Response.handleResponse(Response.java:90)
> at org.apache.http.client.fluent.Response.returnContent(Response.java:97) at
> org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.executeRequest(RemoteHoodieTableFileSystemView.java:173)
> at
> org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.getLatestFileSlices(RemoteHoodieTableFileSystemView.java:275)
> ... 30 more[WARN ] 2020-12-12 15:14:18,248
> org.apache.hudi.testutils.HoodieClientTestHarness - Closing file-system
> instance used in previous test-run[WARN ] 2020-12-12 15:14:26,284
> org.apache.hudi.testutils.HoodieClientTestHarness - Closing file-system
> instance used in previous test-run[WARN ] 2020-12-12 15:14:37,355
> org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor - Rollback
> finished without deleting inflight instant file.
> Instant=[==>005__compaction__INFLIGHT][WARN ] 2020-12-12 15:14:38,244
> org.apache.hudi.testutils.HoodieClientTestHarness - Closing file-system
> instance used in previous test-run[WARN ] 2020-12-12 15:14:46,809
> org.apache.hudi.testutils.HoodieClientTestHarness - Closing file-system
> instance used in previous test-run[WARN ] 2020-12-12 15:14:53,337
> org.apache.hudi.table.action.rollback.BaseRollbackActionExecutor - Rollback
> finished without deleting inflight instant file.
> Instant=[==>005__compaction__INFLIGHT][WARN ] 2020-12-12 15:14:53,395
> org.apache.hudi.testutils.HoodieClientTestHarness - Closing file-system
> instance used in previous test-run[INFO] Tests run: 9, Failures: 0, Errors:
> 0, Skipped: 0, Time elapsed: 76.984 s - in
> org.apache.hudi.table.action.compact.TestAsyncCompaction[INFO] Running
> org.apache.hudi.table.action.compact.TestHoodieCompactor[WARN ] 2020-12-12
> 15:14:54,314 org.apache.hudi.testutils.HoodieClientTestHarness - Closing
> file-system instance used in previous test-run[WARN ] 2020-12-12 15:14:57,842
> org.apache.hudi.testutils.HoodieClientTestHarness - Closing file-system
> instance used in previous test-run
--
This message was sent by Atlassian Jira
(v8.20.10#820010)