This is an automated email from the ASF dual-hosted git repository.

ulyssesyou pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new e00e38028 [KYUUBI #3671] [TEST] assert error message for 
SCHEMA_NOT_FOUND and TABLE_OR_VIEW_NOT_FOUND for Spark 3.4
e00e38028 is described below

commit e00e3802866ec7ccfdddb472928468e693fb4049
Author: Bowen Liang <[email protected]>
AuthorDate: Fri Oct 21 12:15:55 2022 +0800

    [KYUUBI #3671] [TEST] assert error message for SCHEMA_NOT_FOUND and 
TABLE_OR_VIEW_NOT_FOUND for Spark 3.4
    
    ### _Why are the changes needed?_
    
    to close #3671.
    
    - fix assert message for '[SCHEMA_NOT_FOUND]' and 
'[TABLE_OR_VIEW_NOT_FOUND]'
    - introduce SparkVersionUtil in kyuubi-common test for checking Spark 
version
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests)
 locally before make a pull request
    
    Closes #3676 from bowenliang123/3671-notfound.
    
    Closes #3671
    
    c505098f [Bowen Liang] update
    a950abbf [Bowen Liang] fix typo
    56464e84 [Bowen Liang] update SparkVersionUtil
    2190a93e [Bowen Liang] fix assert message for '[SCHEMA_NOT_FOUND]' and 
'[TABLE_OR_VIEW_NOT_FOUND]', introduce SparkVersionUtil for checking spark 
version in kyuubi-common test
    
    Authored-by: Bowen Liang <[email protected]>
    Signed-off-by: ulysses-you <[email protected]>
---
 .../spark/operation/SparkOperationSuite.scala      | 12 +++++++--
 .../kyuubi/operation/IcebergMetadataTests.scala    |  8 ++----
 .../apache/kyuubi/operation/SparkQueryTests.scala  | 18 +++++++++++---
 .../org/apache/kyuubi/util/SparkVersionUtil.scala  | 29 ++++++++++++++++++++++
 4 files changed, 55 insertions(+), 12 deletions(-)

diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
index 2ac0235c6..8d3e1d7ac 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
@@ -38,6 +38,7 @@ import org.apache.kyuubi.engine.spark.shim.SparkCatalogShim
 import org.apache.kyuubi.operation.{HiveMetadataTests, SparkQueryTests}
 import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
 import org.apache.kyuubi.util.KyuubiHadoopUtils
+import org.apache.kyuubi.util.SparkVersionUtil.isSparkVersionAtLeast
 
 class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests 
with SparkQueryTests {
 
@@ -457,12 +458,19 @@ class SparkOperationSuite extends WithSparkSQLEngine with 
HiveMetadataTests with
       val req = new TOpenSessionReq()
       req.setUsername("kentyao")
       req.setPassword("anonymous")
-      val conf = Map("use:database" -> "default2")
+      val dbName = "default2"
+      val conf = Map("use:database" -> dbName)
       req.setConfiguration(conf.asJava)
       val tOpenSessionResp = client.OpenSession(req)
       val status = tOpenSessionResp.getStatus
+      val errorMessage = status.getErrorMessage
       assert(status.getStatusCode === TStatusCode.ERROR_STATUS)
-      assert(status.getErrorMessage.contains("Database 'default2' not found"))
+      if (isSparkVersionAtLeast("3.4")) {
+        assert(errorMessage.contains("[SCHEMA_NOT_FOUND]"))
+        assert(errorMessage.contains(s"The schema `$dbName` cannot be found."))
+      } else {
+        assert(errorMessage.contains(s"Database '$dbName' not found"))
+      }
     }
   }
 
diff --git 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
index 8f19f9016..d14224a84 100644
--- 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
+++ 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
@@ -17,16 +17,12 @@
 
 package org.apache.kyuubi.operation
 
-import org.apache.kyuubi.{IcebergSuiteMixin, SPARK_COMPILE_VERSION}
-import org.apache.kyuubi.engine.SemanticVersion
+import org.apache.kyuubi.IcebergSuiteMixin
 import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
+import org.apache.kyuubi.util.SparkVersionUtil.isSparkVersionAtLeast
 
 trait IcebergMetadataTests extends HiveJDBCTestHelper with IcebergSuiteMixin {
 
-  def isSparkVersionAtLeast(ver: String): Boolean = {
-    SemanticVersion(SPARK_COMPILE_VERSION).isVersionAtLeast(ver)
-  }
-
   test("get catalogs") {
     withJdbcStatement() { statement =>
       val metaData = statement.getConnection.getMetaData
diff --git 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
index 726948485..d227f4002 100644
--- 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
+++ 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
@@ -29,6 +29,7 @@ import 
org.apache.hive.service.rpc.thrift.{TExecuteStatementReq, TFetchResultsRe
 import org.apache.kyuubi.{KYUUBI_VERSION, Utils}
 import org.apache.kyuubi.config.KyuubiConf
 import org.apache.kyuubi.engine.SemanticVersion
+import org.apache.kyuubi.util.SparkVersionUtil.isSparkVersionAtLeast
 
 trait SparkQueryTests extends HiveJDBCTestHelper {
 
@@ -469,11 +470,20 @@ trait SparkQueryTests extends HiveJDBCTestHelper {
   }
 
   test("KYUUBI #1059: Plan only operations") {
-    val ddl = "create table t(a int) using parquet"
-    val dql = "select * from t"
+    val tableName = "t"
+    val ddl = s"create table $tableName(a int) using parquet"
+    val dql = s"select * from $tableName"
     val setkey = "SET kyuubi.operation.plan.only.mode"
     withJdbcStatement("t") { statement =>
       try {
+        val assertTableOrViewNotfound: (Exception, String) => Unit = (e, 
tableName) => {
+          if (isSparkVersionAtLeast("3.4")) {
+            assert(e.getMessage.contains("[TABLE_OR_VIEW_NOT_FOUND]"))
+            assert(e.getMessage.contains(s"The table or view `$tableName` 
cannot be found."))
+          } else {
+            assert(e.getMessage.contains("Table or view not found"))
+          }
+        }
         statement.execute("SET kyuubi.operation.plan.only.mode=optimize")
         val set = statement.executeQuery(ddl)
         assert(set.next())
@@ -482,10 +492,10 @@ trait SparkQueryTests extends HiveJDBCTestHelper {
         assert(set0.next())
         assert(set0.getString(2) === "optimize")
         val e1 = intercept[SQLException](statement.executeQuery(dql))
-        assert(e1.getMessage.contains("Table or view not found"))
+        assertTableOrViewNotfound(e1, tableName)
         statement.execute("SET kyuubi.operation.plan.only.mode=analyze")
         val e2 = intercept[SQLException](statement.executeQuery(dql))
-        assert(e2.getMessage.contains("Table or view not found"))
+        assertTableOrViewNotfound(e2, tableName)
         statement.execute("SET kyuubi.operation.plan.only.mode=parse")
         val set1 = statement.executeQuery(dql)
         assert(set1.next())
diff --git 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/util/SparkVersionUtil.scala 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/util/SparkVersionUtil.scala
new file mode 100644
index 000000000..cd8409d10
--- /dev/null
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/util/SparkVersionUtil.scala
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kyuubi.util
+
+import org.apache.kyuubi.SPARK_COMPILE_VERSION
+import org.apache.kyuubi.engine.SemanticVersion
+
+object SparkVersionUtil {
+  lazy val sparkSemanticVersion: SemanticVersion = 
SemanticVersion(SPARK_COMPILE_VERSION)
+
+  def isSparkVersionAtLeast(ver: String): Boolean = {
+    sparkSemanticVersion.isVersionAtLeast(ver)
+  }
+}

Reply via email to