This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 116e07a8244 [SPARK-42043][CONNECT][TESTS] Fix connect jar finding
issue for scala 2.13
116e07a8244 is described below
commit 116e07a82446e808af4d5d787b6849c34adf5669
Author: Zhen Li <[email protected]>
AuthorDate: Thu Jan 19 18:17:29 2023 -0800
[SPARK-42043][CONNECT][TESTS] Fix connect jar finding issue for scala 2.13
### What changes were proposed in this pull request?
Fix the server jar finding error for Scala 2.13 distribution for Scala
client E2E tests. See https://github.com/apache/spark/pull/39541
### Why are the changes needed?
Bug fix
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Existing tests.
Closes #39658 from zhenlineo/zhen-e2e-fix.
Authored-by: Zhen Li <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../spark/sql/connect/client/util/RemoteSparkSession.scala | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
index f843b651ae8..31b710de3bf 100644
---
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
+++
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
@@ -118,12 +118,13 @@ object SparkConnectServerUtils {
val parentDir = new File(sparkHome, target)
assert(
parentDir.exists(),
- s"Fail to locate the spark connect target folder:
'${parentDir.getCanonicalPath}'. " +
+ s"Fail to locate the spark connect server target folder:
'${parentDir.getCanonicalPath}'. " +
s"SPARK_HOME='${new File(sparkHome).getCanonicalPath}'. " +
- "Make sure system property `SPARK_HOME` is set correctly.")
+ "Make sure the spark connect server jar has been built " +
+ "and the system property `SPARK_HOME` is set correctly.")
val jars = recursiveListFiles(parentDir).filter { f =>
// SBT jar
- (f.getParent.endsWith("scala-2.12") &&
+ (f.getParentFile.getName.startsWith("scala-") &&
f.getName.startsWith("spark-connect-assembly") &&
f.getName.endsWith("SNAPSHOT.jar")) ||
// Maven Jar
(f.getParent.endsWith("target") &&
@@ -188,7 +189,7 @@ trait RemoteSparkSession
override def afterAll(): Unit = {
try {
- spark.close()
+ if (spark != null) spark.close()
} catch {
case e: Throwable => debug(e)
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]