This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new ab7c4f8d039 [SPARK-42801][CONNECT][TESTS] Ignore flaky `write jdbc` 
test of `ClientE2ETestSuite` on Java 8
ab7c4f8d039 is described below

commit ab7c4f8d0392b7e2f5c7ff6d8f6375cad8e874f3
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Tue Mar 14 23:27:52 2023 -0700

    [SPARK-42801][CONNECT][TESTS] Ignore flaky `write jdbc` test of 
`ClientE2ETestSuite` on Java 8
    
    ### What changes were proposed in this pull request?
    
    This PR aims to ignore the flaky `write jdbc` test of `ClientE2ETestSuite` 
on Java 8
    
    ![Screenshot 2023-03-14 at 10 56 34 
PM](https://user-images.githubusercontent.com/9700541/225219845-94eaea79-ade6-435d-9d03-19fc73cb8617.png)
    
    ### Why are the changes needed?
    
    Currently, this happens on `branch-3.4` with Java 8 only.
    
    **BRANCH-3.4**
    https://github.com/apache/spark/commits/branch-3.4
    
    ![Screenshot 2023-03-14 at 10 55 29 
PM](https://user-images.githubusercontent.com/9700541/225219670-f8a68dc0-5aa6-428f-9c02-ae41580a38bc.png)
    
    **JAVA 8**
    
    1. Currently, `Connect` server is using `Hive` catalog during testing and 
uses `Derby` with disk store when it creates a table
    2. `Connect Client` is trying to use `Derby` with `mem` store and it fails 
with `No suitable driver` at the first attempt.
    ```
    $ bin/spark-shell -c spark.sql.catalogImplementation=hive
    Setting default log level to "WARN".
    To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use 
setLogLevel(newLevel).
    23/03/14 21:50:43 WARN NativeCodeLoader: Unable to load native-hadoop 
library for your platform... using builtin-java classes where applicable
    Spark context available as 'sc' (master = local[64], app id = 
local-1678855843831).
    Spark session available as 'spark'.
    Welcome to
          ____              __
         / __/__  ___ _____/ /__
        _\ \/ _ \/ _ `/ __/  '_/
       /___/ .__/\_,_/_/ /_/\_\   version 3.4.1-SNAPSHOT
          /_/
    
    Using Scala version 2.12.17 (OpenJDK 64-Bit Server VM, Java 1.8.0_312)
    Type in expressions to have them evaluated.
    Type :help for more information.
    
    scala> sc.setLogLevel("INFO")
    
    scala> sql("CREATE TABLE t(a int)")
    23/03/14 21:51:08 INFO SharedState: Setting hive.metastore.warehouse.dir 
('null') to the value of spark.sql.warehouse.dir.
    23/03/14 21:51:08 INFO SharedState: Warehouse path is 
'file:/Users/dongjoon/APACHE/spark-merge/spark-warehouse'.
    23/03/14 21:51:10 WARN ResolveSessionCatalog: A Hive serde table will be 
created as there is no table provider specified. You can set 
spark.sql.legacy.createHiveTableByDefault to false so that native data source 
table will be created instead.
    23/03/14 21:51:10 INFO HiveUtils: Initializing HiveMetastoreConnection 
version 2.3.9 using Spark classes.
    23/03/14 21:51:11 INFO HiveClientImpl: Warehouse location for Hive client 
(version 2.3.9) is file:/Users/dongjoon/APACHE/spark-merge/spark-warehouse
    res1: org.apache.spark.sql.DataFrame = []
    
    scala> 
java.sql.DriverManager.getConnection("jdbc:derby:memory:1234;create=true").createStatement().execute("CREATE
 TABLE s(a int)");
    java.sql.SQLException: No suitable driver found for 
jdbc:derby:memory:1234;create=true
      at java.sql.DriverManager.getConnection(DriverManager.java:689)
      at java.sql.DriverManager.getConnection(DriverManager.java:270)
      ... 47 elided
    ```
    
    **JAVA 11**
    ```
    $ bin/spark-shell -c spark.sql.catalogImplementation=hive
    Setting default log level to "WARN".
    To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use 
setLogLevel(newLevel).
    23/03/14 21:57:59 WARN NativeCodeLoader: Unable to load native-hadoop 
library for your platform... using builtin-java classes where applicable
    Spark context Web UI available at http://localhost:4040
    Spark context available as 'sc' (master = local[*], app id = 
local-1678856279685).
    Spark session available as 'spark'.
    Welcome to
          ____              __
         / __/__  ___ _____/ /__
        _\ \/ _ \/ _ `/ __/  '_/
       /___/ .__/\_,_/_/ /_/\_\   version 3.4.1-SNAPSHOT
          /_/
    
    Using Scala version 2.12.17 (OpenJDK 64-Bit Server VM, Java 11.0.18)
    Type in expressions to have them evaluated.
    Type :help for more information.
    
    scala> sql("CREATE TABLE hive_t2(a int)")
    23/03/14 21:58:06 WARN ResolveSessionCatalog: A Hive serde table will be 
created as there is no table provider specified. You can set 
spark.sql.legacy.createHiveTableByDefault to false so that native data source 
table will be created instead.
    23/03/14 21:58:06 WARN HiveConf: HiveConf of name hive.stats.jdbc.timeout 
does not exist
    23/03/14 21:58:06 WARN HiveConf: HiveConf of name hive.stats.retries.wait 
does not exist
    23/03/14 21:58:07 WARN ObjectStore: Version information not found in 
metastore. hive.metastore.schema.verification is not enabled so recording the 
schema version 2.3.0
    23/03/14 21:58:07 WARN ObjectStore: setMetaStoreSchemaVersion called but 
recording version is disabled: version = 2.3.0, comment = Set by MetaStore 
dongjoon127.0.0.1
    23/03/14 21:58:07 WARN SessionState: METASTORE_FILTER_HOOK will be ignored, 
since hive.security.authorization.manager is set to instance of 
HiveAuthorizerFactory.
    23/03/14 21:58:07 WARN HiveConf: HiveConf of name 
hive.internal.ss.authz.settings.applied.marker does not exist
    23/03/14 21:58:07 WARN HiveConf: HiveConf of name hive.stats.jdbc.timeout 
does not exist
    23/03/14 21:58:07 WARN HiveConf: HiveConf of name hive.stats.retries.wait 
does not exist
    23/03/14 21:58:07 WARN HiveMetaStore: Location: 
file:/Users/dongjoon/APACHE/spark-merge/spark-warehouse/hive_t2 specified for 
non-external table:hive_t2
    res0: org.apache.spark.sql.DataFrame = []
    
    scala> 
java.sql.DriverManager.getConnection("jdbc:derby:memory:1234;create=true").createStatement().execute("CREATE
 TABLE derby_t2(a int)");
    res1: Boolean = false
    
    scala> :quit
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    No. This is a test only PR.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    Closes #40434 from dongjoon-hyun/SPARK-42801.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
    (cherry picked from commit ec1d3b354c95e09d28c163a6d3550047c73e15c8)
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../org/apache/spark/sql/ClientE2ETestSuite.scala  | 23 ++++++++++++----------
 1 file changed, 13 insertions(+), 10 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala
index 8665c067326..c948f192c90 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala
@@ -25,6 +25,7 @@ import io.grpc.StatusRuntimeException
 import java.util.Properties
 import org.apache.commons.io.FileUtils
 import org.apache.commons.io.output.TeeOutputStream
+import org.apache.commons.lang3.{JavaVersion, SystemUtils}
 import org.scalactic.TolerantNumerics
 
 import org.apache.spark.SPARK_VERSION
@@ -202,16 +203,18 @@ class ClientE2ETestSuite extends RemoteSparkSession with 
SQLHelper {
   }
 
   test("write jdbc") {
-    val url = "jdbc:derby:memory:1234"
-    val table = "t1"
-    try {
-      spark.range(10).write.jdbc(url = s"$url;create=true", table, new 
Properties())
-      val result = spark.read.jdbc(url = url, table, new 
Properties()).collect()
-      assert(result.length == 10)
-    } finally {
-      // clean up
-      assertThrows[StatusRuntimeException] {
-        spark.read.jdbc(url = s"$url;drop=true", table, new 
Properties()).collect()
+    if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9)) {
+      val url = "jdbc:derby:memory:1234"
+      val table = "t1"
+      try {
+        spark.range(10).write.jdbc(url = s"$url;create=true", table, new 
Properties())
+        val result = spark.read.jdbc(url = url, table, new 
Properties()).collect()
+        assert(result.length == 10)
+      } finally {
+        // clean up
+        assertThrows[StatusRuntimeException] {
+          spark.read.jdbc(url = s"$url;drop=true", table, new 
Properties()).collect()
+        }
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to