Yao-MR opened a new pull request, #14176:
URL: https://github.com/apache/iceberg/pull/14176

   Reason
   
   Fix #14146 
   
   Bug description
   
   1. local plan in spark will always use the ThreadPools.getWorkerPool() which 
is the unique
   2. the unique will always hold the inital ugi token when spark master 
started, and will expire when the token expired, whether 
   spark update or not
   
   
   
   `2025-09-22 10:16:49,787 [INFO] [SparkSQLSessionManager-exec-pool: 
Thread-489] Created broadcast 13 from broadcast at SparkBatch.java:79 
(org.apache.spark.SparkContext(org.apache.spark.internal.Logging.logInfo:60))
   2025-09-22 10:16:49,796 [WARN] [iceberg-worker-pool-64] Exception 
encountered while connecting to the server 
(org.apache.hadoop.ipc.Client(org.apache.hadoop.ipc.Client$Connection$1.run:787))
   org.apache.hadoop.ipc.RemoteException: token (token for hadoop: 
HDFS_DELEGATION_TOKEN owner=hadoop, renewer=hadoop, 
realUser=hadoop/-{$IP}@-9ZHNN93W, issueDate=1758251993978, 
maxDate=1758856793978, sequenceNumber=64812, masterKeyId=108) can't be found in 
cache
   at 
org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:495) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:636) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.ipc.Client$Connection.access$2300(Client.java:420) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:838) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:834) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at java.security.AccessController.doPrivileged(Native Method) ~[?:?]
   at javax.security.auth.Subject.doAs(Subject.java:423) ~[?:?]
   at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:2065)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:834) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.ipc.Client$Connection.access$3900(Client.java:420) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.ipc.Client.getConnection(Client.java:1682) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.ipc.Client.call(Client.java:1498) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.ipc.Client.call(Client.java:1451) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at com.sun.proxy.$Proxy28.getBlockLocations(Unknown Source) ~[?:?]
   at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:232)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at jdk.internal.reflect.GeneratedMethodAccessor63.invoke(Unknown Source) 
~[?:?]
   at 
jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:?]
   at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?]
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:434)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at com.sun.proxy.$Proxy29.getBlockLocations(Unknown Source) ~[?:?]
   at 
org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:973) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:962) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at org.apache.hadoop.hdfs.DFSClient.getBlockLocations(DFSClient.java:1021) 
~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.hdfs.DistributedFileSystem$2.doCall(DistributedFileSystem.java:297)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.hdfs.DistributedFileSystem$2.doCall(DistributedFileSystem.java:294)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.hadoop.hdfs.DistributedFileSystem.getFileBlockLocations(DistributedFileSystem.java:304)
 ~[hadoop-client-api-3.2.2--5.3.1.3.jar:?]
   at 
org.apache.iceberg.hadoop.HadoopInputFile.getBlockLocations(HadoopInputFile.java:210)
 ~[iceberg-spark-runtime-3.5_2.12-1.6.1--5.3.1_2025p4-SNAPSHOT.jar:?]
   at org.apache.iceberg.hadoop.Util.blockLocations(Util.java:111) 
~[iceberg-spark-runtime-3.5_2.12-1.6.1--5.3.1_2025p4-SNAPSHOT.jar:?]
   at org.apache.iceberg.hadoop.Util.blockLocations(Util.java:84) 
~[iceberg-spark-runtime-3.5_2.12-1.6.1--5.3.1_2025p4-SNAPSHOT.jar:?]
   at 
org.apache.iceberg.spark.source.SparkPlanningUtil.lambda$fetchBlockLocations$0(SparkPlanningUtil.java:49)
 ~[iceberg-spark-runtime-3.5_2.12-1.6.1--5.3.1_2025p4-SNAPSHOT.jar:?]
   at org.apache.iceberg.util.Tasks$Builder.runTaskWithRetry(Tasks.java:413) 
~[iceberg-spark-runtime-3.5_2.12-1.6.1--5.3.1_2025p4-SNAPSHOT.jar:?]
   at org.apache.iceberg.util.Tasks$Builder.access$300(Tasks.java:69) 
~[iceberg-spark-runtime-3.5_2.12-1.6.1--5.3.1_2025p4-SNAPSHOT.jar:?]
   at org.apache.iceberg.util.Tasks$Builder$1.run(Tasks.java:315) 
~[iceberg-spark-runtime-3.5_2.12-1.6.1-*****-5.3.1_2025p4-SNAPSHOT.jar:?]
   at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) 
~[?:?]
   at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?]
   at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) 
~[?:?]
   at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) 
~[?:?]
   at java.lang.Thread.run(Thread.java:829) ~[?:?]
   
   `
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org
For additional commands, e-mail: issues-h...@iceberg.apache.org

Reply via email to