This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new eda850708e3 [SPARK-45392][SQL][FOLLOWUP] Restore the original 
exception from data source
eda850708e3 is described below

commit eda850708e309212c45babf611fb4fccb9396c2f
Author: Wenchen Fan <[email protected]>
AuthorDate: Thu Oct 19 19:34:57 2023 +0800

    [SPARK-45392][SQL][FOLLOWUP] Restore the original exception from data source
    
    ### What changes were proposed in this pull request?
    
    This is a followup of https://github.com/apache/spark/pull/43193 to fix a 
behavior change. `Class.getDeclaredConstructor().newInstance()` will wrap the 
exception with `InvocationTargetException`, and this PR unwraps it to still 
throw the original exception from the data source
    
    ### Why are the changes needed?
    
    restore a behavior change
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    new test
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    no
    
    Closes #43446 from cloud-fan/follow.
    
    Authored-by: Wenchen Fan <[email protected]>
    Signed-off-by: yangjie01 <[email protected]>
---
 .../apache/spark/sql/execution/datasources/DataSource.scala  |  8 +++++++-
 .../org/apache/spark/sql/connector/DataSourceV2Suite.scala   | 12 ++++++++++++
 2 files changed, 19 insertions(+), 1 deletion(-)

diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
index bfb919bf498..68f17787c89 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
@@ -699,7 +699,13 @@ object DataSource extends Logging {
     val useV1Sources = 
conf.getConf(SQLConf.USE_V1_SOURCE_LIST).toLowerCase(Locale.ROOT)
       .split(",").map(_.trim)
     val cls = lookupDataSource(provider, conf)
-    cls.getDeclaredConstructor().newInstance() match {
+    val instance = try {
+      cls.getDeclaredConstructor().newInstance()
+    } catch {
+      // Throw the original error from the data source implementation.
+      case e: java.lang.reflect.InvocationTargetException => throw e.getCause
+    }
+    instance match {
       case d: DataSourceRegister if useV1Sources.contains(d.shortName()) => 
None
       case t: TableProvider
           if 
!useV1Sources.contains(cls.getCanonicalName.toLowerCase(Locale.ROOT)) =>
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala
index 52d0151ee46..e8525eee296 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala
@@ -77,6 +77,12 @@ class DataSourceV2Suite extends QueryTest with 
SharedSparkSession with AdaptiveS
     }.head
   }
 
+  test("invalid data source") {
+    intercept[IllegalArgumentException] {
+      spark.read.format(classOf[InvalidDataSource].getName).load()
+    }
+  }
+
   test("simplest implementation") {
     Seq(classOf[SimpleDataSourceV2], classOf[JavaSimpleDataSourceV2]).foreach 
{ cls =>
       withClue(cls.getName) {
@@ -1155,3 +1161,9 @@ class ReportStatisticsDataSource extends 
SimpleWritableDataSource {
     }
   }
 }
+
+class InvalidDataSource extends TestingV2Source {
+  throw new IllegalArgumentException("test error")
+
+  override def getTable(options: CaseInsensitiveStringMap): Table = null
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to