alexey-fin opened a new issue #3046:
URL: https://github.com/apache/iceberg/issues/3046


   We've upgrade to iceberg 0.12, immediately started getting 
"com.amazonaws.SdkClientException: Unable to execute HTTP request: Timeout 
waiting for connection from pool", reverting back to 0.11 resolves the issue.
   When taking the threaddump all iceberg threads are in 
   
   `sun.misc.Unsafe.park(Native Method)
   java.util.concurrent.locks.LockSupport.parkUntil(LockSupport.java:256)
   
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitUntil(AbstractQueuedSynchronizer.java:2125)
   
org.apache.http.pool.AbstractConnPool.getPoolEntryBlocking(AbstractConnPool.java:391)
   org.apache.http.pool.AbstractConnPool.access$300(AbstractConnPool.java:70)
   org.apache.http.pool.AbstractConnPool$2.get(AbstractConnPool.java:253) => 
holding Monitor(org.apache.http.pool.AbstractConnPool$2@636063850})
   org.apache.http.pool.AbstractConnPool$2.get(AbstractConnPool.java:198)
   
org.apache.http.impl.conn.PoolingHttpClientConnectionManager.leaseConnection(PoolingHttpClientConnectionManager.java:303)
   
org.apache.http.impl.conn.PoolingHttpClientConnectionManager$1.get(PoolingHttpClientConnectionManager.java:279)
   sun.reflect.GeneratedMethodAccessor152.invoke(Unknown Source)
   
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   java.lang.reflect.Method.invoke(Method.java:498)
   
com.amazonaws.http.conn.ClientConnectionRequestFactory$Handler.invoke(ClientConnectionRequestFactory.java:70)
   com.amazonaws.http.conn.$Proxy46.get(Unknown Source)
   
org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:191)
   org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185)
   
org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
   
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
   
org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
   
com.amazonaws.http.apache.client.impl.SdkHttpClient.execute(SdkHttpClient.java:72)
   
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeOneRequest(AmazonHttpClient.java:1297)
   
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeHelper(AmazonHttpClient.java:1113)
   
com.amazonaws.http.AmazonHttpClient$RequestExecutor.doExecute(AmazonHttpClient.java:770)
   
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeWithTimer(AmazonHttpClient.java:744)
   
com.amazonaws.http.AmazonHttpClient$RequestExecutor.execute(AmazonHttpClient.java:726)
   
com.amazonaws.http.AmazonHttpClient$RequestExecutor.access$500(AmazonHttpClient.java:686)
   
com.amazonaws.http.AmazonHttpClient$RequestExecutionBuilderImpl.execute(AmazonHttpClient.java:668)
   com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:532)
   com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:512)
   com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java:4926)
   com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java:4872)
   com.amazonaws.services.s3.AmazonS3Client.getObject(AmazonS3Client.java:1472)
   
shaded.databricks.org.apache.hadoop.fs.s3a.S3AInputStream.lambda$reopen$0(S3AInputStream.java:186)
   
shaded.databricks.org.apache.hadoop.fs.s3a.S3AInputStream$$Lambda$3104/683710472.execute(Unknown
 Source)
   shaded.databricks.org.apache.hadoop.fs.s3a.Invoker.once(Invoker.java:109)
   
shaded.databricks.org.apache.hadoop.fs.s3a.S3AInputStream.reopen(S3AInputStream.java:185)
 => holding 
Monitor(shaded.databricks.org.apache.hadoop.fs.s3a.S3AInputStream@529646547})
   
shaded.databricks.org.apache.hadoop.fs.s3a.S3AInputStream.lambda$lazySeek$1(S3AInputStream.java:333)
   
shaded.databricks.org.apache.hadoop.fs.s3a.S3AInputStream$$Lambda$3102/1793658966.execute(Unknown
 Source)
   
shaded.databricks.org.apache.hadoop.fs.s3a.Invoker.lambda$retry$2(Invoker.java:195)
   
shaded.databricks.org.apache.hadoop.fs.s3a.Invoker$$Lambda$3103/330885726.execute(Unknown
 Source)
   shaded.databricks.org.apache.hadoop.fs.s3a.Invoker.once(Invoker.java:109)
   
shaded.databricks.org.apache.hadoop.fs.s3a.Invoker.lambda$retry$3(Invoker.java:265)
   
shaded.databricks.org.apache.hadoop.fs.s3a.Invoker$$Lambda$2781/2102803475.execute(Unknown
 Source)
   
shaded.databricks.org.apache.hadoop.fs.s3a.Invoker.retryUntranslated(Invoker.java:322)
   shaded.databricks.org.apache.hadoop.fs.s3a.Invoker.retry(Invoker.java:261)
   shaded.databricks.org.apache.hadoop.fs.s3a.Invoker.retry(Invoker.java:193)
   shaded.databricks.org.apache.hadoop.fs.s3a.Invoker.retry(Invoker.java:215)
   
shaded.databricks.org.apache.hadoop.fs.s3a.S3AInputStream.lazySeek(S3AInputStream.java:326)
   
shaded.databricks.org.apache.hadoop.fs.s3a.S3AInputStream.read(S3AInputStream.java:438)
 => holding 
Monitor(shaded.databricks.org.apache.hadoop.fs.s3a.S3AInputStream@529646547})
   java.io.DataInputStream.read(DataInputStream.java:149)
   
org.apache.iceberg.hadoop.HadoopStreams$HadoopSeekableInputStream.read(HadoopStreams.java:113)
   org.apache.iceberg.avro.AvroIO$AvroInputStreamAdapter.read(AvroIO.java:120)
   
org.apache.iceberg.shaded.org.apache.avro.file.DataFileReader$SeekableInputStream.read(DataFileReader.java:283)
   
org.apache.iceberg.shaded.org.apache.avro.io.BinaryDecoder$InputStreamByteSource.readRaw(BinaryDecoder.java:848)
   
org.apache.iceberg.shaded.org.apache.avro.io.BinaryDecoder.doReadBytes(BinaryDecoder.java:373)
   
org.apache.iceberg.shaded.org.apache.avro.io.BinaryDecoder.readFixed(BinaryDecoder.java:329)
   
org.apache.iceberg.shaded.org.apache.avro.io.Decoder.readFixed(Decoder.java:159)
   
org.apache.iceberg.shaded.org.apache.avro.file.DataFileStream.initialize(DataFileStream.java:106)
   
org.apache.iceberg.shaded.org.apache.avro.file.DataFileReader.<init>(DataFileReader.java:130)
   
org.apache.iceberg.shaded.org.apache.avro.file.DataFileReader.<init>(DataFileReader.java:122)
   
org.apache.iceberg.shaded.org.apache.avro.file.DataFileReader.openReader(DataFileReader.java:66)
   org.apache.iceberg.avro.AvroIterable.newFileReader(AvroIterable.java:100)
   org.apache.iceberg.avro.AvroIterable.iterator(AvroIterable.java:77)
   org.apache.iceberg.io.CloseableIterable$4$1.<init>(CloseableIterable.java:99)
   org.apache.iceberg.io.CloseableIterable$4.iterator(CloseableIterable.java:98)
   org.apache.iceberg.io.CloseableIterable$4.iterator(CloseableIterable.java:90)
   java.lang.Iterable.forEach(Iterable.java:74)
   
org.apache.iceberg.ManifestFilterManager.filterManifestWithDeletedFiles(ManifestFilterManager.java:383)
   
org.apache.iceberg.ManifestFilterManager.filterManifest(ManifestFilterManager.java:301)
   
org.apache.iceberg.ManifestFilterManager.lambda$filterManifests$0(ManifestFilterManager.java:182)
   
org.apache.iceberg.ManifestFilterManager$$Lambda$10161/1540121859.run(Unknown 
Source)
   org.apache.iceberg.util.Tasks$Builder.runTaskWithRetry(Tasks.java:405)
   org.apache.iceberg.util.Tasks$Builder.access$300(Tasks.java:71)
   org.apache.iceberg.util.Tasks$Builder$1.run(Tasks.java:311)
   java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
   java.util.concurrent.FutureTask.run(FutureTask.java:266)
   
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
   
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
   java.lang.Thread.run(Thread.java:748)`
   
   putting iceberg workers to 1 also resolves the issue


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to