This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 98fbbcdac5d [SPARK-38745][SQL][TESTS] Move the tests for 
`NON_PARTITION_COLUMN` to `QueryCompilationErrorsDSv2Suite`
98fbbcdac5d is described below

commit 98fbbcdac5defebec81626dd1dbd5522a2fd910b
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Wed Apr 13 17:39:40 2022 +0300

    [SPARK-38745][SQL][TESTS] Move the tests for `NON_PARTITION_COLUMN` to 
`QueryCompilationErrorsDSv2Suite`
    
    ### What changes were proposed in this pull request?
    Move test for the error class `NON_PARTITION_COLUMN` from 
`InsertIntoSQLOnlyTests` to `QueryCompilationErrorsDSv2Suite`.
    
    ### Why are the changes needed?
    To improve code maintenance - all tests for error classes are placed to 
Query.*ErrorsSuite. Also exception are raised from 
[QueryCompilationErrors](https://github.com/apache/spark/blob/bf75b495e18ed87d0c118bfd5f1ceb52d720cad9/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala#L100-L104),
 so, tests should be in `QueryCompilationErrorsDSv2Suite` for consistency.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    By running the moved tests:
    ```
    $ build/sbt "test:testOnly *QueryCompilationErrorsDSv2Suite"
    ```
    
    Closes #36175 from MaxGekk/move-tests-for-NON_PARTITION_COLUMN.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../spark/sql/connector/InsertIntoTests.scala      | 36 +---------------
 .../errors/QueryCompilationErrorsDSv2Suite.scala   | 49 +++++++++++++++++++---
 2 files changed, 45 insertions(+), 40 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/InsertIntoTests.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/InsertIntoTests.scala
index fc98cfd5138..7493966790c 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/InsertIntoTests.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/InsertIntoTests.scala
@@ -198,7 +198,7 @@ trait InsertIntoSQLOnlyTests
   /** Whether to include the SQL specific tests in this trait within the 
extending test suite. */
   protected val includeSQLOnlyTests: Boolean
 
-  private def withTableAndData(tableName: String)(testFn: String => Unit): 
Unit = {
+  protected def withTableAndData(tableName: String)(testFn: String => Unit): 
Unit = {
     withTable(tableName) {
       val viewName = "tmp_view"
       val df = spark.createDataFrame(Seq((1L, "a"), (2L, "b"), (3L, 
"c"))).toDF("id", "data")
@@ -248,40 +248,6 @@ trait InsertIntoSQLOnlyTests
       }
     }
 
-    test("InsertInto: static PARTITION clause fails with non-partition 
column") {
-      val t1 = s"${catalogAndNamespace}tbl"
-      withTableAndData(t1) { view =>
-        sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format 
PARTITIONED BY (data)")
-
-        val exc = intercept[AnalysisException] {
-          sql(s"INSERT INTO TABLE $t1 PARTITION (id=1) SELECT data FROM $view")
-        }
-
-        verifyTable(t1, spark.emptyDataFrame)
-        assert(exc.getMessage.contains(
-          "PARTITION clause cannot contain a non-partition column name"))
-        assert(exc.getMessage.contains("id"))
-        assert(exc.getErrorClass == "NON_PARTITION_COLUMN")
-      }
-    }
-
-    test("InsertInto: dynamic PARTITION clause fails with non-partition 
column") {
-      val t1 = s"${catalogAndNamespace}tbl"
-      withTableAndData(t1) { view =>
-        sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format 
PARTITIONED BY (id)")
-
-        val exc = intercept[AnalysisException] {
-          sql(s"INSERT INTO TABLE $t1 PARTITION (data) SELECT * FROM $view")
-        }
-
-        verifyTable(t1, spark.emptyDataFrame)
-        assert(exc.getMessage.contains(
-          "PARTITION clause cannot contain a non-partition column name"))
-        assert(exc.getMessage.contains("data"))
-        assert(exc.getErrorClass == "NON_PARTITION_COLUMN")
-      }
-    }
-
     test("InsertInto: overwrite - dynamic clause - static mode") {
       withSQLConf(PARTITION_OVERWRITE_MODE.key -> 
PartitionOverwriteMode.STATIC.toString) {
         val t1 = s"${catalogAndNamespace}tbl"
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala
index bfea3f535dd..042f130d7f5 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala
@@ -17,18 +17,27 @@
 
 package org.apache.spark.sql.errors
 
-import org.apache.spark.sql.{AnalysisException, QueryTest}
-import org.apache.spark.sql.connector.{DatasourceV2SQLBase, FakeV2Provider}
+import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest}
+import org.apache.spark.sql.connector.{DatasourceV2SQLBase, FakeV2Provider, 
InsertIntoSQLOnlyTests}
 import org.apache.spark.sql.test.SharedSparkSession
 
 class QueryCompilationErrorsDSv2Suite
   extends QueryTest
   with SharedSparkSession
-  with DatasourceV2SQLBase {
+  with DatasourceV2SQLBase
+  with InsertIntoSQLOnlyTests {
+
+  private val v2Source = classOf[FakeV2Provider].getName
+  override protected val v2Format = v2Source
+  override protected val catalogAndNamespace = "testcat.ns1.ns2."
+  override protected val supportsDynamicOverwrite: Boolean = false
+  override protected val includeSQLOnlyTests: Boolean = false
+  override def verifyTable(tableName: String, expected: DataFrame): Unit = {
+    checkAnswer(spark.table(tableName), expected)
+  }
 
   test("UNSUPPORTED_FEATURE: IF PARTITION NOT EXISTS not supported by INSERT") 
{
-    val v2Format = classOf[FakeV2Provider].getName
-    val tbl = "testcat.ns1.ns2.tbl"
+    val tbl = s"${catalogAndNamespace}tbl"
 
     withTable(tbl) {
       val view = "tmp_view"
@@ -49,4 +58,34 @@ class QueryCompilationErrorsDSv2Suite
       }
     }
   }
+
+  test("NON_PARTITION_COLUMN: static PARTITION clause fails with non-partition 
column") {
+    val t1 = s"${catalogAndNamespace}tbl"
+    withTableAndData(t1) { view =>
+      sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format 
PARTITIONED BY (data)")
+
+      val e = intercept[AnalysisException] {
+        sql(s"INSERT INTO TABLE $t1 PARTITION (id=1) SELECT data FROM $view")
+      }
+
+      verifyTable(t1, spark.emptyDataFrame)
+      assert(e.getMessage === "PARTITION clause cannot contain a non-partition 
column name: id")
+      assert(e.getErrorClass === "NON_PARTITION_COLUMN")
+    }
+  }
+
+  test("NON_PARTITION_COLUMN: dynamic PARTITION clause fails with 
non-partition column") {
+    val t1 = s"${catalogAndNamespace}tbl"
+    withTableAndData(t1) { view =>
+      sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format 
PARTITIONED BY (id)")
+
+      val e = intercept[AnalysisException] {
+        sql(s"INSERT INTO TABLE $t1 PARTITION (data) SELECT * FROM $view")
+      }
+
+      verifyTable(t1, spark.emptyDataFrame)
+      assert(e.getMessage === "PARTITION clause cannot contain a non-partition 
column name: data")
+      assert(e.getErrorClass === "NON_PARTITION_COLUMN")
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to