Github user HyukjinKwon commented on a diff in the pull request:
https://github.com/apache/spark/pull/21990#discussion_r210790581
--- Diff: python/pyspark/sql/tests.py ---
@@ -3563,6 +3563,51 @@ def
test_query_execution_listener_on_collect_with_arrow(self):
"The callback from the query execution listener should be
called after 'toPandas'")
+class SparkExtensionsTest(unittest.TestCase, SQLTestUtils):
+ # These tests are separate because it uses 'spark.sql.extensions'
which is
+ # static and immutable. This can't be set or unset, for example, via
`spark.conf`.
+
+ @classmethod
+ def setUpClass(cls):
+ import glob
+ from pyspark.find_spark_home import _find_spark_home
+
+ SPARK_HOME = _find_spark_home()
+ filename_pattern = (
+ "sql/core/target/scala-*/test-classes/org/apache/spark/sql/"
+ "SparkSessionExtensionSuite.class")
+ if not glob.glob(os.path.join(SPARK_HOME, filename_pattern)):
+ raise unittest.SkipTest(
+ "'org.apache.spark.sql.SparkSessionExtensionSuite.' is not
"
+ "available. Will skip the related tests.")
+
+ # Note that 'spark.sql.extensions' is a static immutable
configuration.
+ cls.spark = SparkSession.builder \
+ .master("local[4]") \
+ .appName(cls.__name__) \
+ .config(
+ "spark.sql.extensions",
+ "org.apache.spark.sql.MyExtensions") \
+ .getOrCreate()
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.spark.stop()
+
+ def tearDown(self):
+ self.spark._jvm.OnSuccessCall.clear()
--- End diff --
This wouldn't be needed since I did this for testing if the callback is
called or not in the PR pointed out.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]