This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 8910dae  [SPARK-31859][SPARK-31861][FOLLOWUP] Fix typo in tests
8910dae is described below

commit 8910dae0966ec7bbd9886245310fa07b81426e74
Author: Juliusz Sompolski <ju...@databricks.com>
AuthorDate: Fri Jun 5 10:01:00 2020 +0000

    [SPARK-31859][SPARK-31861][FOLLOWUP] Fix typo in tests
    
    ### What changes were proposed in this pull request?
    
    It appears I have unintentionally used nested JDBC statements in the two 
tests I added.
    
    ### Why are the changes needed?
    
    Cleanup a typo. Please merge to master/branch-3.0
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Unit tests.
    
    Closes #28735 from juliuszsompolski/SPARK-31859-fixup.
    
    Authored-by: Juliusz Sompolski <ju...@databricks.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit ea010138e95cc9c0f62603d431b906bd5104f4e3)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../thriftserver/HiveThriftServer2Suites.scala     | 36 ++++++++++------------
 1 file changed, 16 insertions(+), 20 deletions(-)

diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index 21256ad..4e6d4e1 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -813,31 +813,27 @@ class HiveThriftBinaryServerSuite extends 
HiveThriftJdbcTest {
   }
 
   test("SPARK-31859 Thriftserver works with 
spark.sql.datetime.java8API.enabled=true") {
-    withJdbcStatement() { statement =>
-      withJdbcStatement() { st =>
-        st.execute("set spark.sql.datetime.java8API.enabled=true")
-        val rs = st.executeQuery("select date '2020-05-28', timestamp 
'2020-05-28 00:00:00'")
-        rs.next()
-        assert(rs.getDate(1).toString() == "2020-05-28")
-        assert(rs.getTimestamp(2).toString() == "2020-05-28 00:00:00.0")
-      }
+    withJdbcStatement() { st =>
+      st.execute("set spark.sql.datetime.java8API.enabled=true")
+      val rs = st.executeQuery("select date '2020-05-28', timestamp 
'2020-05-28 00:00:00'")
+      rs.next()
+      assert(rs.getDate(1).toString() == "2020-05-28")
+      assert(rs.getTimestamp(2).toString() == "2020-05-28 00:00:00.0")
     }
   }
 
   test("SPARK-31861 Thriftserver respects spark.sql.session.timeZone") {
-    withJdbcStatement() { statement =>
-      withJdbcStatement() { st =>
-        st.execute("set spark.sql.session.timeZone=+03:15") // different than 
Thriftserver's JVM tz
+    withJdbcStatement() { st =>
+      st.execute("set spark.sql.session.timeZone=+03:15") // different than 
Thriftserver's JVM tz
       val rs = st.executeQuery("select timestamp '2020-05-28 10:00:00'")
-        rs.next()
-        // The timestamp as string is the same as the literal
-        assert(rs.getString(1) == "2020-05-28 10:00:00.0")
-        // Parsing it to java.sql.Timestamp in the client will always result 
in a timestamp
-        // in client default JVM timezone. The string value of the Timestamp 
will match the literal,
-        // but if the JDBC application cares about the internal timezone and 
UTC offset of the
-        // Timestamp object, it should set spark.sql.session.timeZone to match 
its client JVM tz.
-        assert(rs.getTimestamp(1).toString() == "2020-05-28 10:00:00.0")
-      }
+      rs.next()
+      // The timestamp as string is the same as the literal
+      assert(rs.getString(1) == "2020-05-28 10:00:00.0")
+      // Parsing it to java.sql.Timestamp in the client will always result in 
a timestamp
+      // in client default JVM timezone. The string value of the Timestamp 
will match the literal,
+      // but if the JDBC application cares about the internal timezone and UTC 
offset of the
+      // Timestamp object, it should set spark.sql.session.timeZone to match 
its client JVM tz.
+      assert(rs.getTimestamp(1).toString() == "2020-05-28 10:00:00.0")
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to