This is an automated email from the ASF dual-hosted git repository. dongjoon pushed a commit to branch branch-3.4 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.4 by this push: new c2a867bed83 [SPARK-42644][INFRA] Add `hive` dependency to `connect` module c2a867bed83 is described below commit c2a867bed83b2c09fccbb707306c7837b7374e8b Author: Dongjoon Hyun <dh...@apple.com> AuthorDate: Wed Mar 1 22:46:21 2023 -0800 [SPARK-42644][INFRA] Add `hive` dependency to `connect` module ### What changes were proposed in this pull request? This PR aims to update `connect` module dependency from `sql` to hive. ### Why are the changes needed? SPARK-41725 added `hive` dependency via test suite. https://github.com/apache/spark/blob/41d3103f4d69a9ec25d9f78f3f94ff5f3b64ef78/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala#L75 ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Pass the CIs. Closes #40246 from dongjoon-hyun/SPARK-42644. Authored-by: Dongjoon Hyun <dh...@apple.com> Signed-off-by: Dongjoon Hyun <dongj...@apache.org> (cherry picked from commit 11f4a6f1e54372c6bf828ec2aa4bc1598e588a9a) Signed-off-by: Dongjoon Hyun <dongj...@apache.org> --- dev/sparktestsupport/modules.py | 2 +- dev/sparktestsupport/utils.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py index b82f8dbb4d6..d8dee651c2b 100644 --- a/dev/sparktestsupport/modules.py +++ b/dev/sparktestsupport/modules.py @@ -273,7 +273,7 @@ sql_kafka = Module( connect = Module( name="connect", - dependencies=[sql], + dependencies=[hive], source_file_regexes=[ "connector/connect", ], diff --git a/dev/sparktestsupport/utils.py b/dev/sparktestsupport/utils.py index a3df038b555..6b190eb5ab2 100755 --- a/dev/sparktestsupport/utils.py +++ b/dev/sparktestsupport/utils.py @@ -109,11 +109,11 @@ def determine_modules_to_test(changed_modules, deduplicated=True): ['root'] >>> [x.name for x in determine_modules_to_test([modules.graphx])] ['graphx', 'examples'] - >>> [x.name for x in determine_modules_to_test([modules.sql])] + >>> sorted([x.name for x in determine_modules_to_test([modules.sql])]) ... # doctest: +NORMALIZE_WHITESPACE - ['sql', 'avro', 'connect', 'docker-integration-tests', 'hive', 'mllib', 'protobuf', - 'sql-kafka-0-10', 'examples', 'hive-thriftserver', 'pyspark-sql', 'repl', 'sparkr', - 'pyspark-connect', 'pyspark-mllib', 'pyspark-pandas', 'pyspark-pandas-slow', 'pyspark-ml'] + ['avro', 'connect', 'docker-integration-tests', 'examples', 'hive', 'hive-thriftserver', + 'mllib', 'protobuf', 'pyspark-connect', 'pyspark-ml', 'pyspark-mllib', 'pyspark-pandas', + 'pyspark-pandas-slow', 'pyspark-sql', 'repl', 'sparkr', 'sql', 'sql-kafka-0-10'] >>> sorted([x.name for x in determine_modules_to_test( ... [modules.sparkr, modules.sql], deduplicated=False)]) ... # doctest: +NORMALIZE_WHITESPACE --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org