This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new aee6b1582775 [SPARK-46227][SQL] Move `withSQLConf` from `SQLHelper` to 
`SQLConfHelper`
aee6b1582775 is described below

commit aee6b158277537709a717223b518923431bca0a6
Author: ulysses-you <ulyssesyo...@gmail.com>
AuthorDate: Sun Dec 3 21:23:34 2023 -0800

    [SPARK-46227][SQL] Move `withSQLConf` from `SQLHelper` to `SQLConfHelper`
    
    ### What changes were proposed in this pull request?
    
    This pr moves method `withSQLConf` from `SQLHelper` in catalyst test module 
to `SQLConfHelper` trait in catalyst module. To make it easy to use such case: 
`val x = withSQLConf {}`, this pr also changes its return type.
    
    ### Why are the changes needed?
    
    A part of https://github.com/apache/spark/pull/44013
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    Pass CI
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    no
    
    Closes #44142 from ulysses-you/withSQLConf.
    
    Authored-by: ulysses-you <ulyssesyo...@gmail.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../apache/spark/sql/catalyst/SQLConfHelper.scala  | 29 ++++++++++++++++++++
 .../spark/sql/catalyst/plans/SQLHelper.scala       | 32 ++--------------------
 .../sql/internal/ExecutorSideSQLConfSuite.scala    |  2 +-
 .../org/apache/spark/sql/test/SQLTestUtils.scala   |  2 +-
 .../spark/sql/hive/execution/HiveSerDeSuite.scala  |  2 +-
 5 files changed, 34 insertions(+), 33 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala
index cee35cdb8d84..f4605b9218f0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.catalyst
 
+import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.internal.SQLConf
 
 /**
@@ -29,4 +30,32 @@ trait SQLConfHelper {
    * See [[SQLConf.get]] for more information.
    */
   def conf: SQLConf = SQLConf.get
+
+  /**
+   * Sets all SQL configurations specified in `pairs`, calls `f`, and then 
restores all SQL
+   * configurations.
+   */
+  protected def withSQLConf[T](pairs: (String, String)*)(f: => T): T = {
+    val conf = SQLConf.get
+    val (keys, values) = pairs.unzip
+    val currentValues = keys.map { key =>
+      if (conf.contains(key)) {
+        Some(conf.getConfString(key))
+      } else {
+        None
+      }
+    }
+    keys.lazyZip(values).foreach { (k, v) =>
+      if (SQLConf.isStaticConfigKey(k)) {
+        throw new AnalysisException(s"Cannot modify the value of a static 
config: $k")
+      }
+      conf.setConfString(k, v)
+    }
+    try f finally {
+      keys.zip(currentValues).foreach {
+        case (key, Some(value)) => conf.setConfString(key, value)
+        case (key, None) => conf.unsetConf(key)
+      }
+    }
+  }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
index eb844e6f057f..92681613bd83 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
@@ -23,41 +23,13 @@ import scala.util.control.NonFatal
 
 import org.scalatest.Assertions.fail
 
-import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.SQLConfHelper
 import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
 import org.apache.spark.sql.catalyst.util.DateTimeUtils.getZoneId
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.util.Utils
 
-trait SQLHelper {
-
-  /**
-   * Sets all SQL configurations specified in `pairs`, calls `f`, and then 
restores all SQL
-   * configurations.
-   */
-  protected def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit = {
-    val conf = SQLConf.get
-    val (keys, values) = pairs.unzip
-    val currentValues = keys.map { key =>
-      if (conf.contains(key)) {
-        Some(conf.getConfString(key))
-      } else {
-        None
-      }
-    }
-    keys.lazyZip(values).foreach { (k, v) =>
-      if (SQLConf.isStaticConfigKey(k)) {
-        throw new AnalysisException(s"Cannot modify the value of a static 
config: $k")
-      }
-      conf.setConfString(k, v)
-    }
-    try f finally {
-      keys.zip(currentValues).foreach {
-        case (key, Some(value)) => conf.setConfString(key, value)
-        case (key, None) => conf.unsetConf(key)
-      }
-    }
-  }
+trait SQLHelper extends SQLConfHelper {
 
   /**
    * Generates a temporary path without creating the actual file/directory, 
then pass it to `f`. If
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/ExecutorSideSQLConfSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/ExecutorSideSQLConfSuite.scala
index 6149f3eaab17..247253c47ff6 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/ExecutorSideSQLConfSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/ExecutorSideSQLConfSuite.scala
@@ -58,7 +58,7 @@ class ExecutorSideSQLConfSuite extends SparkFunSuite with 
SQLTestUtils {
     }
   }
 
-  override def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit = {
+  override def withSQLConf[T](pairs: (String, String)*)(f: => T): T = {
     pairs.foreach { case (k, v) =>
       SQLConf.get.setConfString(k, v)
     }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
index 3264141aa7ab..7da2bb47038e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
@@ -243,7 +243,7 @@ private[sql] trait SQLTestUtilsBase
     protected override def _sqlContext: SQLContext = self.spark.sqlContext
   }
 
-  protected override def withSQLConf(pairs: (String, String)*)(f: => Unit): 
Unit = {
+  protected override def withSQLConf[T](pairs: (String, String)*)(f: => T): T 
= {
     SparkSession.setActiveSession(spark)
     super.withSQLConf(pairs: _*)(f)
   }
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala
index e92dee09cc1e..aac601043f33 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala
@@ -84,7 +84,7 @@ class HiveSerDeSuite extends HiveComparisonTest with PlanTest 
with BeforeAndAfte
   }
 
   // Make sure we set the config values to TestHive.conf.
-  override def withSQLConf(pairs: (String, String)*)(f: => Unit): Unit =
+  override def withSQLConf[T](pairs: (String, String)*)(f: => T): T =
     SQLConf.withExistingConf(TestHive.conf)(super.withSQLConf(pairs: _*)(f))
 
   test("Test the default fileformat for Hive-serde tables") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to