cloud-fan commented on a change in pull request #32949:
URL: https://github.com/apache/spark/pull/32949#discussion_r668040064



##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSuite.scala
##########
@@ -171,20 +172,22 @@ class DataSourceV2DataFrameSuite
   }
 
   test("Cannot write data with intervals to v2") {
-    withTable("testcat.table_name") {
-      val testCatalog = 
spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
-      testCatalog.createTable(
-        Identifier.of(Array(), "table_name"),
-        new StructType().add("i", "interval"),
-        Array.empty, Collections.emptyMap[String, String])
-      val df = sql("select interval 1 day as i")
-      val v2Writer = df.writeTo("testcat.table_name")
-      val e1 = intercept[AnalysisException](v2Writer.append())
-      assert(e1.getMessage.contains(s"Cannot use interval type in the table 
schema."))
-      val e2 = intercept[AnalysisException](v2Writer.overwrite(df("i")))
-      assert(e2.getMessage.contains(s"Cannot use interval type in the table 
schema."))
-      val e3 = intercept[AnalysisException](v2Writer.overwritePartitions())
-      assert(e3.getMessage.contains(s"Cannot use interval type in the table 
schema."))
+    withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") {

Review comment:
       not related to this PR: so DS v2 can't fail correctly when writing out 
new interval types?

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSuite.scala
##########
@@ -171,20 +172,22 @@ class DataSourceV2DataFrameSuite
   }
 
   test("Cannot write data with intervals to v2") {
-    withTable("testcat.table_name") {
-      val testCatalog = 
spark.sessionState.catalogManager.catalog("testcat").asTableCatalog
-      testCatalog.createTable(
-        Identifier.of(Array(), "table_name"),
-        new StructType().add("i", "interval"),
-        Array.empty, Collections.emptyMap[String, String])
-      val df = sql("select interval 1 day as i")
-      val v2Writer = df.writeTo("testcat.table_name")
-      val e1 = intercept[AnalysisException](v2Writer.append())
-      assert(e1.getMessage.contains(s"Cannot use interval type in the table 
schema."))
-      val e2 = intercept[AnalysisException](v2Writer.overwrite(df("i")))
-      assert(e2.getMessage.contains(s"Cannot use interval type in the table 
schema."))
-      val e3 = intercept[AnalysisException](v2Writer.overwritePartitions())
-      assert(e3.getMessage.contains(s"Cannot use interval type in the table 
schema."))
+    withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") {

Review comment:
       cc @imback82 

##########
File path: 
sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
##########
@@ -663,13 +663,38 @@ class HiveThriftBinaryServerSuite extends 
HiveThriftServer2Test {
 
   test("Support interval type") {
     withJdbcStatement() { statement =>
+      val rs = statement.executeQuery("SELECT interval 5 years 7 months")
+      assert(rs.next())
+      assert(rs.getString(1) === "5-7")
+    }
+    withJdbcStatement() { statement =>
+      val rs = statement.executeQuery("SELECT interval 8 days 10 hours 5 
minutes 10 seconds")
+      assert(rs.next())
+      assert(rs.getString(1) === "8 10:05:10.000000000")
+    }
+    withJdbcStatement() { statement =>
+      statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key} = true")
       val rs = statement.executeQuery("SELECT interval 3 months 1 hours")
       assert(rs.next())
       assert(rs.getString(1) === "3 months 1 hours")
     }
+
     // Invalid interval value
     withJdbcStatement() { statement =>
       val e = intercept[SQLException] {
+        statement.executeQuery("SELECT interval 5 yea 7 months")
+      }
+      
assert(e.getMessage.contains("org.apache.spark.sql.catalyst.parser.ParseException"))
+    }
+    withJdbcStatement() { statement =>
+      val e = intercept[SQLException] {
+        statement.executeQuery("SELECT interval 8 days 10 hours 5 minutes 10 
secon")
+      }
+      
assert(e.getMessage.contains("org.apache.spark.sql.catalyst.parser.ParseException"))
+    }
+    withJdbcStatement() { statement =>
+      val e = intercept[SQLException] {
+        statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key} = true")

Review comment:
       why do we need to turn on the legacy config to detect this error?

##########
File path: 
sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkMetadataOperationSuite.scala
##########
@@ -355,6 +356,7 @@ class SparkMetadataOperationSuite extends 
HiveThriftServer2TestBase {
     val ddl = s"CREATE GLOBAL TEMP VIEW $viewName as select interval 1 day as 
i"
 
     withJdbcStatement(viewName) { statement =>
+      statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key}=true")

Review comment:
       We should update this test to check the new interval types. @yaooqinn 
can you help with this later in a followup PR?

##########
File path: 
sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkMetadataOperationSuite.scala
##########
@@ -355,6 +356,7 @@ class SparkMetadataOperationSuite extends 
HiveThriftServer2TestBase {
     val ddl = s"CREATE GLOBAL TEMP VIEW $viewName as select interval 1 day as 
i"
 
     withJdbcStatement(viewName) { statement =>
+      statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key}=true")

Review comment:
       We should update this test to check the new interval types. @yaooqinn 
can you help with this later in a followup PR?

##########
File path: 
sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala
##########
@@ -362,7 +368,10 @@ class SparkThriftServerProtocolVersionsSuite extends 
HiveThriftServer2TestBase {
     }
 
     test(s"$version get interval type") {
-      testExecuteStatementWithProtocolVersion(version, "SELECT interval '1' 
year '2' day") { rs =>
+      testExecuteStatementWithProtocolVersion(
+        version,
+        "SELECT interval '1' year '2' day",
+        SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { rs =>

Review comment:
       ditto

##########
File path: 
sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala
##########
@@ -362,7 +368,10 @@ class SparkThriftServerProtocolVersionsSuite extends 
HiveThriftServer2TestBase {
     }
 
     test(s"$version get interval type") {
-      testExecuteStatementWithProtocolVersion(version, "SELECT interval '1' 
year '2' day") { rs =>
+      testExecuteStatementWithProtocolVersion(
+        version,
+        "SELECT interval '1' year '2' day",
+        SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") { rs =>

Review comment:
       ditto




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to