This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new d5c31a85a4 [KYUUBI #6640] [AUTHZ] Adapt Derby 10.16 new JDBC driver 
package name
d5c31a85a4 is described below

commit d5c31a85a4f1e927a9dfcb95b613484af5b6c058
Author: Cheng Pan <[email protected]>
AuthorDate: Fri Aug 23 12:27:48 2024 +0800

    [KYUUBI #6640] [AUTHZ] Adapt Derby 10.16 new JDBC driver package name
    
    # :mag: Description
    
    SPARK-46257 (Spark 4.0.0) moves to Derby 10.16, 
`org.apache.derby.jdbc.AutoloadedDriver` has been moved to 
`org.apache.derby.iapi.jdbc.AutoloadedDriver`
    
    ## Types of changes :bookmark:
    
    - [ ] Bugfix (non-breaking change which fixes an issue)
    - [x] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    Manually tested with Spark 4.0.
    
    ---
    
    # Checklist ๐Ÿ“
    
    - [x] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    **Be nice. Be informative.**
    
    Closes #6640 from pan3793/authz-derby.
    
    Closes #6640
    
    46edb32be [Cheng Pan] Update 
extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
    7eee47f0d [Cheng Pan] Adapt Derby 10.16 new JDBC driver package name
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .../org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala    | 8 ++++++++
 .../spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala    | 5 ++---
 .../ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala      | 4 +---
 .../authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala      | 7 +++----
 .../authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala    | 7 +++----
 5 files changed, 17 insertions(+), 14 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
index 2477c9e45d..523b866beb 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/AuthZUtils.scala
@@ -88,11 +88,19 @@ private[authz] object AuthZUtils {
   lazy val isSparkV33OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.3"
   lazy val isSparkV34OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.4"
   lazy val isSparkV35OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "3.5"
+  lazy val isSparkV40OrGreater: Boolean = SPARK_RUNTIME_VERSION >= "4.0"
 
   lazy val SCALA_RUNTIME_VERSION: SemanticVersion =
     SemanticVersion(scala.util.Properties.versionNumberString)
   lazy val isScalaV213: Boolean = SCALA_RUNTIME_VERSION >= "2.13"
 
+  def derbyJdbcDriverClass: String = if (isSparkV40OrGreater) {
+    // SPARK-46257 (Spark 4.0.0) moves to Derby 10.16
+    "org.apache.derby.iapi.jdbc.AutoloadedDriver"
+  } else {
+    "org.apache.derby.jdbc.AutoloadedDriver"
+  }
+
   def quoteIfNeeded(part: String): String = {
     if (part.matches("[a-zA-Z0-9_]+") && !part.matches("\\d+")) {
       part
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
index d1a6f4ae8b..0ad11eb39a 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
@@ -24,6 +24,7 @@ import org.scalatest.Outcome
 
 import 
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
 import org.apache.kyuubi.plugin.spark.authz.serde._
+import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
 import org.apache.kyuubi.util.AssertionUtils._
 
 class V2JdbcTableCatalogPrivilegesBuilderSuite extends 
V2CommandsPrivilegesSuite {
@@ -41,9 +42,7 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends 
V2CommandsPrivilegesSuite
   override def beforeAll(): Unit = {
     spark.conf.set(s"spark.sql.catalog.$catalogV2", 
v2JdbcTableCatalogClassName)
     spark.conf.set(s"spark.sql.catalog.$catalogV2.url", jdbcUrl)
-    spark.conf.set(
-      s"spark.sql.catalog.$catalogV2.driver",
-      "org.apache.derby.jdbc.AutoloadedDriver")
+    spark.conf.set(s"spark.sql.catalog.$catalogV2.driver", 
derbyJdbcDriverClass)
     super.beforeAll()
   }
 
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
index 3a22f45d5b..49d51e5d24 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
@@ -47,9 +47,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
   override def beforeAll(): Unit = {
     spark.conf.set(s"spark.sql.catalog.$catalogV2", 
v2JdbcTableCatalogClassName)
     spark.conf.set(s"spark.sql.catalog.$catalogV2.url", jdbcUrl)
-    spark.conf.set(
-      s"spark.sql.catalog.$catalogV2.driver",
-      "org.apache.derby.jdbc.AutoloadedDriver")
+    spark.conf.set(s"spark.sql.catalog.$catalogV2.driver", 
derbyJdbcDriverClass)
 
     super.beforeAll()
 
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
index 411d98cf93..d25c4bda09 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
@@ -24,16 +24,15 @@ import org.apache.spark.SparkConf
 import org.scalatest.Outcome
 
 import 
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
+import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
 
 class DataMaskingForJDBCV2Suite extends DataMaskingTestBase {
   override protected val extraSparkConf: SparkConf = {
     new SparkConf()
       .set("spark.sql.defaultCatalog", "testcat")
       .set("spark.sql.catalog.testcat", v2JdbcTableCatalogClassName)
-      .set(s"spark.sql.catalog.testcat.url", 
"jdbc:derby:memory:testcat;create=true")
-      .set(
-        s"spark.sql.catalog.testcat.driver",
-        "org.apache.derby.jdbc.AutoloadedDriver")
+      .set("spark.sql.catalog.testcat.url", 
"jdbc:derby:memory:testcat;create=true")
+      .set("spark.sql.catalog.testcat.driver", derbyJdbcDriverClass)
   }
 
   override protected val catalogImpl: String = "in-memory"
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
index bfe1cd9e49..5305c7bd46 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
@@ -25,16 +25,15 @@ import org.apache.spark.SparkConf
 import org.scalatest.Outcome
 
 import 
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
+import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
 
 class RowFilteringForJDBCV2Suite extends RowFilteringTestBase {
   override protected val extraSparkConf: SparkConf = {
     new SparkConf()
       .set("spark.sql.defaultCatalog", "testcat")
       .set("spark.sql.catalog.testcat", v2JdbcTableCatalogClassName)
-      .set(s"spark.sql.catalog.testcat.url", 
"jdbc:derby:memory:testcat;create=true")
-      .set(
-        s"spark.sql.catalog.testcat.driver",
-        "org.apache.derby.jdbc.AutoloadedDriver")
+      .set("spark.sql.catalog.testcat.url", 
"jdbc:derby:memory:testcat;create=true")
+      .set("spark.sql.catalog.testcat.driver", derbyJdbcDriverClass)
   }
 
   override protected val catalogImpl: String = "in-memory"

Reply via email to