This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e8384d5af58e [SPARK-52613][CORE][SQL] Restore printing full stacktrace 
when HBase/Hive DelegationTokenProvider hit exception
e8384d5af58e is described below

commit e8384d5af58e352f32e87716aa1dc1c6d18860a8
Author: Cheng Pan <cheng...@apache.org>
AuthorDate: Thu Jul 3 15:30:39 2025 +0800

    [SPARK-52613][CORE][SQL] Restore printing full stacktrace when HBase/Hive 
DelegationTokenProvider hit exception
    
    ### What changes were proposed in this pull request?
    
    After SPARK-35747(3.2.0), Spark stops printing full stacktrace when 
HBase/Hive/Kafka DelegationTokenProvider hit exceptions.
    
    SPARK-35747 actually makes two changes:
    1. Remove full stacktrace from logs
    2. Guide disabling unused DelegationTokenProvider - `If $serviceName is not 
used, set spark.security.credentials.$serviceName.enabled to false`
    
    I think 2 is useful, but 1 is incorrect, this PR logically reverts 1 
(changes in KafkaDelegationTokenProvider have been reverted in 
SPARK-47593(4.0.0))
    
    We recently hit an issue related to HBaseDelegationTokenProvider, the 
printed error message is
    ```
    [WARN] [main] HBaseDelegationTokenProvider#94 - Failed to get token from 
service hbase due to java.lang.reflect.InvocationTargetException. If hbase is 
not used, set spark.security.credentials.hbase.enabled to false. Retrying to 
fetch HBase security token with hbase connection parameter.
    ```
    it makes us think it's related to a classpath issue, but eventually, after 
we changed the code to print the full stacktrace, we found the root cause is
    ```
    org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient 
permissions (user=***/******.COM, scope=hbase:meta, 
params=[table=hbase:meta,],action=EXEC)
    ```
    Full stacktrace: 
https://gist.github.com/pan3793/46a3f6b956a7982646e1ee75603a1865
    
    ### Why are the changes needed?
    
    Improve the diagnosis experience.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, the user would see rich error messages when HBase/Hive 
DelegationTokenProvider hit exceptions.
    
    ### How was this patch tested?
    
    Manual test.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #51320 from pan3793/SPARK-52613.
    
    Authored-by: Cheng Pan <cheng...@apache.org>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../apache/spark/deploy/security/HBaseDelegationTokenProvider.scala | 6 +++---
 core/src/main/scala/org/apache/spark/util/Utils.scala               | 6 +++---
 .../spark/sql/hive/security/HiveDelegationTokenProvider.scala       | 2 +-
 3 files changed, 7 insertions(+), 7 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/deploy/security/HBaseDelegationTokenProvider.scala
 
b/core/src/main/scala/org/apache/spark/deploy/security/HBaseDelegationTokenProvider.scala
index 3a262a0d19fb..b3d67028e417 100644
--- 
a/core/src/main/scala/org/apache/spark/deploy/security/HBaseDelegationTokenProvider.scala
+++ 
b/core/src/main/scala/org/apache/spark/deploy/security/HBaseDelegationTokenProvider.scala
@@ -54,8 +54,8 @@ private[security] class HBaseDelegationTokenProvider
       creds.addToken(token.getService, token)
     } catch {
       case NonFatal(e) =>
-        logWarning(Utils.createFailedToGetTokenMessage(serviceName, e) + log" 
Retrying to fetch " +
-          log"HBase security token with ${MDC(SERVICE_NAME, serviceName)} 
connection parameter.")
+        logWarning(Utils.createFailedToGetTokenMessage(serviceName) + log" 
Retrying to fetch " +
+          log"HBase security token with ${MDC(SERVICE_NAME, serviceName)} 
connection parameter.", e)
         // Seems to be spark is trying to get the token from HBase 2.x.x  
version or above where the
         // obtainToken(Configuration conf) API has been removed. Lets try 
obtaining the token from
         // another compatible API of HBase service.
@@ -98,7 +98,7 @@ private[security] class HBaseDelegationTokenProvider
       creds.addToken(token.getService, token)
     } catch {
       case NonFatal(e) =>
-        logWarning(Utils.createFailedToGetTokenMessage(serviceName, e))
+        logWarning(Utils.createFailedToGetTokenMessage(serviceName), e)
     } finally {
       if (null != hbaseConnection) {
         hbaseConnection.close()
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 8f60857eb691..4c808eb5b905 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -3060,9 +3060,9 @@ private[spark] object Utils
   }
 
   /** Returns a string message about delegation token generation failure */
-  def createFailedToGetTokenMessage(serviceName: String, e: scala.Throwable): 
MessageWithContext = {
-    log"Failed to get token from service ${MDC(SERVICE_NAME, serviceName)} " +
-      log"due to ${MDC(ERROR, e)}. If ${MDC(SERVICE_NAME, serviceName)} is not 
used, " +
+  def createFailedToGetTokenMessage(serviceName: String): MessageWithContext = 
{
+    log"Failed to get token from service ${MDC(SERVICE_NAME, serviceName)}. " +
+      log"If ${MDC(SERVICE_NAME, serviceName)} is not used, " +
       log"set spark.security.credentials.${MDC(SERVICE_NAME, 
serviceName)}.enabled to false."
   }
 
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/security/HiveDelegationTokenProvider.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/security/HiveDelegationTokenProvider.scala
index 0e357d5e39b2..9113b9be8f3c 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/security/HiveDelegationTokenProvider.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/security/HiveDelegationTokenProvider.scala
@@ -114,7 +114,7 @@ private[spark] class HiveDelegationTokenProvider
       None
     } catch {
       case NonFatal(e) =>
-        logWarning(Utils.createFailedToGetTokenMessage(serviceName, e))
+        logWarning(Utils.createFailedToGetTokenMessage(serviceName), e)
         None
       case e: NoClassDefFoundError =>
         logWarning(classNotFoundErrorStr)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to