This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new cc4f5787414e [SPARK-46453][CONNECT] Throw exception from 
`internalError()` in `SessionHolder`
cc4f5787414e is described below

commit cc4f5787414e4392499a349dec5b24c8e25e50f3
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Tue Dec 19 12:21:20 2023 +0300

    [SPARK-46453][CONNECT] Throw exception from `internalError()` in 
`SessionHolder`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to throw `SparkException` returned by `internalError` 
in `SessionHolder`.
    
    ### Why are the changes needed?
    Without the bug fix user won't see the internal error.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    N/a
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #44400 from MaxGekk/throw-internal-error.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
    (cherry picked from commit dc0bfc4c700c347f2f58625facec8c5771bde59a)
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../scala/org/apache/spark/sql/connect/service/SessionHolder.scala    | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
index 1cef02d7e346..218819d114c1 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
@@ -197,7 +197,7 @@ case class SessionHolder(userId: String, sessionId: String, 
session: SparkSessio
    */
   private[connect] def cacheDataFrameById(dfId: String, df: DataFrame): Unit = 
{
     if (dataFrameCache.putIfAbsent(dfId, df) != null) {
-      SparkException.internalError(s"A dataframe is already associated with id 
$dfId")
+      throw SparkException.internalError(s"A dataframe is already associated 
with id $dfId")
     }
   }
 
@@ -221,7 +221,7 @@ case class SessionHolder(userId: String, sessionId: String, 
session: SparkSessio
    */
   private[connect] def cacheListenerById(id: String, listener: 
StreamingQueryListener): Unit = {
     if (listenerCache.putIfAbsent(id, listener) != null) {
-      SparkException.internalError(s"A listener is already associated with id 
$id")
+      throw SparkException.internalError(s"A listener is already associated 
with id $id")
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to