[
https://issues.apache.org/jira/browse/HUDI-7453?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Davis Zhang closed HUDI-7453.
-----------------------------
Resolution: Won't Fix
no failure in the past 30 days
> Flaky test: [1] true(testMORTableRestore
> ----------------------------------------
>
> Key: HUDI-7453
> URL: https://issues.apache.org/jira/browse/HUDI-7453
> Project: Apache Hudi
> Issue Type: Sub-task
> Components: compaction, table-service
> Reporter: Y Ethan Guo
> Assignee: Davis Zhang
> Priority: Blocker
> Fix For: 1.0.1
>
> Attachments: Screenshot 2024-02-28 at 10.53.48.png
>
>
> {code:java}
> Job aborted due to stage failure: Task 0 in stage 569.0 failed 1 times, most
> recent failure: Lost task 0.0 in stage 569.0 (TID 1683)
> (fv-az163-354.ltzebx3loavedkx3ppd21kczsa.ex.internal.cloudapp.net executor
> driver): org.apache.hudi.exception.HoodieRemoteException: Server Error
> at
> org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.getLatestFileSlicesStateless(RemoteHoodieTableFileSystemView.java:347)
> at
> org.apache.hudi.table.action.compact.plan.generators.BaseHoodieCompactionPlanGenerator.lambda$generateCompactionPlan$7141a08d$1(BaseHoodieCompactionPlanGenerator.java:120)
> at
> org.apache.hudi.client.common.HoodieSparkEngineContext.lambda$flatMap$7d470b86$1(HoodieSparkEngineContext.java:150)
> at
> org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMap$1(JavaRDDLike.scala:125)
> at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:484)
> at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:490)
> at scala.collection.Iterator.foreach(Iterator.scala:941)
> at scala.collection.Iterator.foreach$(Iterator.scala:941)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
> at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)
> at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)
> at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)
> at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)
> at scala.collection.TraversableOnce.to(TraversableOnce.scala:315)
> at scala.collection.TraversableOnce.to$(TraversableOnce.scala:313)
> at scala.collection.AbstractIterator.to(Iterator.scala:1429)
> at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:307)
> at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:307)
> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1429)
> at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:294)
> at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:288)
> at scala.collection.AbstractIterator.toArray(Iterator.scala:1429)
> at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1030)
> at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2254)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
> at org.apache.spark.scheduler.Task.run(Task.scala:131)
> at
> org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:506)
> at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1491)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:509)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:750)
> Caused by: org.apache.http.client.HttpResponseException: Server Error
> at
> org.apache.http.impl.client.AbstractResponseHandler.handleResponse(AbstractResponseHandler.java:69)
> at org.apache.http.client.fluent.Response.handleResponse(Response.java:90)
> at org.apache.http.client.fluent.Response.returnContent(Response.java:97)
> at
> org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.executeRequest(RemoteHoodieTableFileSystemView.java:208)
> at
> org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.getLatestFileSlicesStateless(RemoteHoodieTableFileSystemView.java:343)
> ... 31 more
> Driver stacktrace:Stack traceorg.apache.spark.SparkException:
> Job aborted due to stage failure: Task 0 in stage 569.0 failed 1 times, most
> recent failure: Lost task 0.0 in stage 569.0 (TID 1683)
> (fv-az163-354.ltzebx3loavedkx3ppd21kczsa.ex.internal.cloudapp.net executor
> driver): org.apache.hudi.exception.HoodieRemoteException: Server Error
> at
> org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.getLatestFileSlicesStateless(RemoteHoodieTableFileSystemView.java:347)
> at
> org.apache.hudi.table.action.compact.plan.generators.BaseHoodieCompactionPlanGenerator.lambda$generateCompactionPlan$7141a08d$1(BaseHoodieCompactionPlanGenerator.java:120)
> at
> org.apache.hudi.client.common.HoodieSparkEngineContext.lambda$flatMap$7d470b86$1(HoodieSparkEngineContext.java:150)
> at
> org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMap$1(JavaRDDLike.scala:125)
> at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:484)
> at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:490)
> at scala.collection.Iterator.foreach(Iterator.scala:941)
> at scala.collection.Iterator.foreach$(Iterator.scala:941)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
> at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)
> at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)
> at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105)
> at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49)
> at scala.collection.TraversableOnce.to(TraversableOnce.scala:315)
> at scala.collection.TraversableOnce.to$(TraversableOnce.scala:313)
> at scala.collection.AbstractIterator.to(Iterator.scala:1429)
> at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:307)
> at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:307)
> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1429)
> at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:294)
> at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:288)
> at scala.collection.AbstractIterator.toArray(Iterator.scala:1429)
> at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1030)
> at
> org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2254)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
> at org.apache.spark.scheduler.Task.run(Task.scala:131)
> at
> org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:506)
> at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1491)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:509)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:750)
> Caused by: org.apache.http.client.HttpResponseException: Server Error
> at
> org.apache.http.impl.client.AbstractResponseHandler.handleResponse(AbstractResponseHandler.java:69)
> at
> org.apache.http.client.fluent.Response.handleResponse(Response.java:90)
> at
> org.apache.http.client.fluent.Response.returnContent(Response.java:97)
> at
> org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.executeRequest(RemoteHoodieTableFileSystemView.java:208)
> at
> org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.getLatestFileSlicesStateless(RemoteHoodieTableFileSystemView.java:343)
> ... 31 more
> Driver stacktrace:
> at
> org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2454)
> at
> org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2403)
> at
> org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2402)
> at
> scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
> at
> scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
> at
> org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2402)
> {code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)