PhantomHunt commented on issue #8678:
URL: https://github.com/apache/hudi/issues/8678#issuecomment-1545918096
Hi @ad1happy2go, We added this configuration to the table (with 999+ objects)
`'hoodie.compact.inline' : "true",
# 'hoodie.compact.inline.max.delta.commits': compaction_commits`
but getting this error-
```
An error occurred while calling o469.save.
-> org.apache.hudi.exception.HoodieCompactionException-> Could not compact
s3a->//***/assets/asset
at
org.apache.hudi.table.action.compact.RunCompactionActionExecutor.execute(RunCompactionActionExecutor.java->116)
at
org.apache.hudi.table.HoodieSparkMergeOnReadTable.compact(HoodieSparkMergeOnReadTable.java->140)
at
org.apache.hudi.client.SparkRDDTableServiceClient.compact(SparkRDDTableServiceClient.java->75)
at
org.apache.hudi.client.BaseHoodieTableServiceClient.lambda$runAnyPendingCompactions$2(BaseHoodieTableServiceClient.java->193)
at java.base/java.util.ArrayList.forEach(ArrayList.java->1541)
at
org.apache.hudi.client.BaseHoodieTableServiceClient.runAnyPendingCompactions(BaseHoodieTableServiceClient.java->191)
at
org.apache.hudi.client.BaseHoodieTableServiceClient.inlineCompaction(BaseHoodieTableServiceClient.java->162)
at
org.apache.hudi.client.BaseHoodieTableServiceClient.runTableServicesInline(BaseHoodieTableServiceClient.java->336)
at
org.apache.hudi.client.BaseHoodieWriteClient.runTableServicesInline(BaseHoodieWriteClient.java->542)
at
org.apache.hudi.client.BaseHoodieWriteClient.commitStats(BaseHoodieWriteClient.java->249)
at
org.apache.hudi.client.SparkRDDWriteClient.commit(SparkRDDWriteClient.java->102)
at
org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala->945)
at
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala->372)
at
org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala->150)
at
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala->47)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala->75)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala->73)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala->84)
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala->98)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala->109)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala->169)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala->95)
at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala->779)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala->64)
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala->98)
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala->94)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala->584)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala->176)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala->584)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala->30)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala->267)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala->263)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala->30)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala->30)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala->560)
at
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala->94)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala->81)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala->79)
at
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala->116)
at
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala->860)
at
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala->390)
at
org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala->363)
at
org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala->239)
at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java->62)
at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java->43)
at java.base/java.lang.reflect.Method.invoke(Method.java->566)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java->244)
at
py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java->357)
at py4j.Gateway.invoke(Gateway.java->282)
at
py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java->132)
at py4j.commands.CallCommand.execute(CallCommand.java->79)
at
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java->182)
at py4j.ClientServerConnection.run(ClientServerConnection.java->106)
at java.base/java.lang.Thread.run(Thread.java->829)
Caused by-> org.apache.spark.SparkException-> Job aborted due to stage
failure-> Task 0 in stage 301.0 failed 1 times, most recent failure-> Lost task
0.0 in stage 301.0 (TID 1868) (ip-10-208-6-19.us-west-2.compute.internal
executor driver)-> org.apache.hudi.exception.HoodieException-> Exception when
reading log file
at
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java->376)
at
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternal(AbstractHoodieLogRecordReader.java->223)
at
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.performScan(HoodieMergedLogRecordScanner.java->198)
at
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java->114)
at
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java->73)
at
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner$Builder.build(HoodieMergedLogRecordScanner.java->464)
at
org.apache.hudi.table.action.compact.HoodieCompactor.compact(HoodieCompactor.java->204)
at
org.apache.hudi.table.action.compact.HoodieCompactor.lambda$compact$9cd4b1be$1(HoodieCompactor.java->129)
at
org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala->1070)
at scala.collection.Iterator$$anon$10.next(Iterator.scala->461)
at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala->486)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala->492)
at
org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala->223)
at
org.apache.spark.storage.memory.MemoryStore.putIteratorAsBytes(MemoryStore.scala->352)
at
org.apache.spark.storage.BlockManager.$anonfun$doPutIterator$1(BlockManager.scala->1535)
at
org.apache.spark.storage.BlockManager.org$apache$spark$storage$BlockManager$$doPut(BlockManager.scala->1445)
at
org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala->1509)
at
org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala->1332)
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala->376)
at org.apache.spark.rdd.RDD.iterator(RDD.scala->327)
at
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala->52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala->365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala->329)
at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala->90)
at org.apache.spark.scheduler.Task.run(Task.scala->136)
at
org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala->548)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala->1504)
at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala->551)
at
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java->1128)
at
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java->628)
at java.base/java.lang.Thread.run(Thread.java->829)
Caused by-> org.apache.hudi.exception.HoodieIOException-> unable to
initialize read with log file
at
org.apache.hudi.common.table.log.HoodieLogFormatReader.hasNext(HoodieLogFormatReader.java->113)
at
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java->247)
... 30 more
Caused by-> java.io.InterruptedIOException-> getFileStatus on
s3a->//gn-video-richmedia-nonprod-hudi-tables/nonprod_hudi_tables/assets/asset/.d9c690aa-d02c-45cc-826c-03a022b52d91-0_20230328091114717.log.46_0-23-37->
com.amazonaws.SdkClientException-> Unable to execute HTTP request-> Timeout
waiting for connection from pool
at
org.apache.hadoop.fs.s3a.S3AUtils.translateInterruptedException(S3AUtils.java->352)
at
org.apache.hadoop.fs.s3a.S3AUtils.translateException(S3AUtils.java->177)
at
org.apache.hadoop.fs.s3a.S3AUtils.translateException(S3AUtils.java->151)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.s3GetFileStatus(S3AFileSystem.java->2278)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.innerGetFileStatus(S3AFileSystem.java->2226)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.getFileStatus(S3AFileSystem.java->2160)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.open(S3AFileSystem.java->727)
at
org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java->203)
at
org.apache.hudi.common.table.log.HoodieLogFileReader.getFSDataInputStream(HoodieLogFileReader.java->498)
at
org.apache.hudi.common.table.log.HoodieLogFileReader.<init>(HoodieLogFileReader.java->118)
at
org.apache.hudi.common.table.log.HoodieLogFormatReader.hasNext(HoodieLogFormatReader.java->110)
... 31 more
Caused by-> com.amazonaws.SdkClientException-> Unable to execute HTTP
request-> Timeout waiting for connection from pool
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.handleRetryableException(AmazonHttpClient.java->1216)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeHelper(AmazonHttpClient.java->1162)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.doExecute(AmazonHttpClient.java->811)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeWithTimer(AmazonHttpClient.java->779)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.execute(AmazonHttpClient.java->753)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.access$500(AmazonHttpClient.java->713)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutionBuilderImpl.execute(AmazonHttpClient.java->695)
at
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java->559)
at
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java->539)
at
com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java->5453)
at
com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java->5400)
at
com.amazonaws.services.s3.AmazonS3Client.getObjectMetadata(AmazonS3Client.java->1372)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.lambda$getObjectMetadata$4(S3AFileSystem.java->1307)
at
org.apache.hadoop.fs.s3a.Invoker.retryUntranslated(Invoker.java->322)
at
org.apache.hadoop.fs.s3a.Invoker.retryUntranslated(Invoker.java->285)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.getObjectMetadata(S3AFileSystem.java->1304)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.s3GetFileStatus(S3AFileSystem.java->2264)
... 38 more
Caused by->
com.amazonaws.thirdparty.apache.http.conn.ConnectionPoolTimeoutException->
Timeout waiting for connection from pool
at
com.amazonaws.thirdparty.apache.http.impl.conn.PoolingHttpClientConnectionManager.leaseConnection(PoolingHttpClientConnectionManager.java->316)
at
com.amazonaws.thirdparty.apache.http.impl.conn.PoolingHttpClientConnectionManager$1.get(PoolingHttpClientConnectionManager.java->282)
at jdk.internal.reflect.GeneratedMethodAccessor22.invoke(Unknown
Source)
at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java->43)
at java.base/java.lang.reflect.Method.invoke(Method.java->566)
at
com.amazonaws.http.conn.ClientConnectionRequestFactory$Handler.invoke(ClientConnectionRequestFactory.java->70)
at com.amazonaws.http.conn.$Proxy34.get(Unknown Source)
at
com.amazonaws.thirdparty.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java->190)
at
com.amazonaws.thirdparty.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java->186)
at
com.amazonaws.thirdparty.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java->185)
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java->83)
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java->56)
at
com.amazonaws.http.apache.client.impl.SdkHttpClient.execute(SdkHttpClient.java->72)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeOneRequest(AmazonHttpClient.java->1343)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeHelper(AmazonHttpClient.java->1154)
... 53 more
Driver stacktrace->
at
org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala->2672)
at
org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala->2608)
at
org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala->2607)
at
scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala->62)
at
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala->55)
at
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala->49)
at
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala->2607)
at
org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala->1182)
at
org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala->1182)
at scala.Option.foreach(Option.scala->407)
at
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala->1182)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala->2860)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala->2802)
at
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala->2791)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala->49)
at
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala->952)
at org.apache.spark.SparkContext.runJob(SparkContext.scala->2228)
at org.apache.spark.SparkContext.runJob(SparkContext.scala->2249)
at org.apache.spark.SparkContext.runJob(SparkContext.scala->2268)
at org.apache.spark.SparkContext.runJob(SparkContext.scala->2293)
at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala->1021)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala->151)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala->112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala->406)
at org.apache.spark.rdd.RDD.collect(RDD.scala->1020)
at
org.apache.spark.api.java.JavaRDDLike.collect(JavaRDDLike.scala->362)
at
org.apache.spark.api.java.JavaRDDLike.collect$(JavaRDDLike.scala->361)
at
org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala->45)
at
org.apache.hudi.data.HoodieJavaRDD.collectAsList(HoodieJavaRDD.java->163)
at
org.apache.hudi.table.action.compact.RunCompactionActionExecutor.execute(RunCompactionActionExecutor.java->101)
... 54 more
Caused by-> org.apache.hudi.exception.HoodieException-> Exception when
reading log file
at
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java->376)
at
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternal(AbstractHoodieLogRecordReader.java->223)
at
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.performScan(HoodieMergedLogRecordScanner.java->198)
at
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java->114)
at
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java->73)
at
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner$Builder.build(HoodieMergedLogRecordScanner.java->464)
at
org.apache.hudi.table.action.compact.HoodieCompactor.compact(HoodieCompactor.java->204)
at
org.apache.hudi.table.action.compact.HoodieCompactor.lambda$compact$9cd4b1be$1(HoodieCompactor.java->129)
at
org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala->1070)
at scala.collection.Iterator$$anon$10.next(Iterator.scala->461)
at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala->486)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala->492)
at
org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala->223)
at
org.apache.spark.storage.memory.MemoryStore.putIteratorAsBytes(MemoryStore.scala->352)
at
org.apache.spark.storage.BlockManager.$anonfun$doPutIterator$1(BlockManager.scala->1535)
at
org.apache.spark.storage.BlockManager.org$apache$spark$storage$BlockManager$$doPut(BlockManager.scala->1445)
at
org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala->1509)
at
org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala->1332)
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala->376)
at org.apache.spark.rdd.RDD.iterator(RDD.scala->327)
at
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala->52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala->365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala->329)
at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala->90)
at org.apache.spark.scheduler.Task.run(Task.scala->136)
at
org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala->548)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala->1504)
at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala->551)
at
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java->1128)
at
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java->628)
... 1 more
Caused by-> org.apache.hudi.exception.HoodieIOException-> unable to
initialize read with log file
at
org.apache.hudi.common.table.log.HoodieLogFormatReader.hasNext(HoodieLogFormatReader.java->113)
at
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java->247)
... 30 more
Caused by-> java.io.InterruptedIOException-> getFileStatus on
s3a->//***/assets/asset/.d9c690aa-d02c-45cc-826c-03a022b52d91-0_20230328091114717.log.46_0-23-37->
com.amazonaws.SdkClientException-> Unable to execute HTTP request-> Timeout
waiting for connection from pool
at
org.apache.hadoop.fs.s3a.S3AUtils.translateInterruptedException(S3AUtils.java->352)
at
org.apache.hadoop.fs.s3a.S3AUtils.translateException(S3AUtils.java->177)
at
org.apache.hadoop.fs.s3a.S3AUtils.translateException(S3AUtils.java->151)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.s3GetFileStatus(S3AFileSystem.java->2278)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.innerGetFileStatus(S3AFileSystem.java->2226)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.getFileStatus(S3AFileSystem.java->2160)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.open(S3AFileSystem.java->727)
at
org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java->203)
at
org.apache.hudi.common.table.log.HoodieLogFileReader.getFSDataInputStream(HoodieLogFileReader.java->498)
at
org.apache.hudi.common.table.log.HoodieLogFileReader.<init>(HoodieLogFileReader.java->118)
at
org.apache.hudi.common.table.log.HoodieLogFormatReader.hasNext(HoodieLogFormatReader.java->110)
... 31 more
Caused by-> com.amazonaws.SdkClientException-> Unable to execute HTTP
request-> Timeout waiting for connection from pool
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.handleRetryableException(AmazonHttpClient.java->1216)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeHelper(AmazonHttpClient.java->1162)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.doExecute(AmazonHttpClient.java->811)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeWithTimer(AmazonHttpClient.java->779)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.execute(AmazonHttpClient.java->753)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.access$500(AmazonHttpClient.java->713)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutionBuilderImpl.execute(AmazonHttpClient.java->695)
at
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java->559)
at
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java->539)
at
com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java->5453)
at
com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java->5400)
at
com.amazonaws.services.s3.AmazonS3Client.getObjectMetadata(AmazonS3Client.java->1372)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.lambda$getObjectMetadata$4(S3AFileSystem.java->1307)
at
org.apache.hadoop.fs.s3a.Invoker.retryUntranslated(Invoker.java->322)
at
org.apache.hadoop.fs.s3a.Invoker.retryUntranslated(Invoker.java->285)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.getObjectMetadata(S3AFileSystem.java->1304)
at
org.apache.hadoop.fs.s3a.S3AFileSystem.s3GetFileStatus(S3AFileSystem.java->2264)
... 38 more
Caused by->
com.amazonaws.thirdparty.apache.http.conn.ConnectionPoolTimeoutException->
Timeout waiting for connection from pool
at
com.amazonaws.thirdparty.apache.http.impl.conn.PoolingHttpClientConnectionManager.leaseConnection(PoolingHttpClientConnectionManager.java->316)
at
com.amazonaws.thirdparty.apache.http.impl.conn.PoolingHttpClientConnectionManager$1.get(PoolingHttpClientConnectionManager.java->282)
at jdk.internal.reflect.GeneratedMethodAccessor22.invoke(Unknown
Source)
at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java->43)
at java.base/java.lang.reflect.Method.invoke(Method.java->566)
at
com.amazonaws.http.conn.ClientConnectionRequestFactory$Handler.invoke(ClientConnectionRequestFactory.java->70)
at com.amazonaws.http.conn.$Proxy34.get(Unknown Source)
at
com.amazonaws.thirdparty.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java->190)
at
com.amazonaws.thirdparty.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java->186)
at
com.amazonaws.thirdparty.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java->185)
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java->83)
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java->56)
at
com.amazonaws.http.apache.client.impl.SdkHttpClient.execute(SdkHttpClient.java->72)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeOneRequest(AmazonHttpClient.java->1343)
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeHelper(AmazonHttpClient.java->1154)
... 53 more
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]