This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f99580e6f87 [SPARK-42337][SQL] Add error class 
INVALID_TEMP_OBJ_REFERENCE
f99580e6f87 is described below

commit f99580e6f875e5dd93b30cdb51e3b7504077aa5f
Author: allisonwang-db <allison.w...@databricks.com>
AuthorDate: Sat Feb 18 10:17:26 2023 +0300

    [SPARK-42337][SQL] Add error class INVALID_TEMP_OBJ_REFERENCE
    
    ### What changes were proposed in this pull request?
    
    This PR adds a new error class `INVALID_TEMP_OBJ_REFERENCE ` and replaces 
two existing error classes with this new one:
    - _LEGACY_ERROR_TEMP_1283
    - _LEGACY_ERROR_TEMP_1284
    
    ### Why are the changes needed?
    
    To improve the error messages.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, the PR changes a user-facing error message.
    
    ### How was this patch tested?
    
    Existing unit tests.
    
    Closes #39910 from allisonwang-db/spark-42337-persistent-over-temp-err.
    
    Authored-by: allisonwang-db <allison.w...@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   |  15 +--
 .../spark/sql/errors/QueryCompilationErrors.scala  |  16 ++--
 .../results/postgreSQL/create_view.sql.out         | 104 +++++++++++++--------
 .../scala/org/apache/spark/sql/SQLQuerySuite.scala |  22 +++--
 .../spark/sql/connector/DataSourceV2SQLSuite.scala |   8 +-
 .../apache/spark/sql/execution/SQLViewSuite.scala  |  34 ++++---
 .../spark/sql/execution/SQLViewTestSuite.scala     |  32 ++++---
 .../sql/hive/execution/HiveSQLViewSuite.scala      |  13 ++-
 8 files changed, 151 insertions(+), 93 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 6d9aa59e57f..8cf46cb7aad 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1034,6 +1034,11 @@
     },
     "sqlState" : "42823"
   },
+  "INVALID_TEMP_OBJ_REFERENCE" : {
+    "message" : [
+      "Cannot create a persistent <obj> <objName> by referencing a temporary 
<tempObj> <tempObjName>. Please make the temporary <tempObj> persistent, or 
make the persistent <obj> temporary."
+    ]
+  },
   "INVALID_TYPED_LITERAL" : {
     "message" : [
       "The value of the typed literal <valueType> is invalid: <value>."
@@ -3343,16 +3348,6 @@
       "Not allowed to create a permanent view <name> without explicitly 
assigning an alias for expression <attrName>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1283" : {
-    "message" : [
-      "Not allowed to create a permanent view <name> by referencing a 
temporary view <nameParts>. Please create a temp view instead by CREATE TEMP 
VIEW."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_1284" : {
-    "message" : [
-      "Not allowed to create a permanent view <name> by referencing a 
temporary function `<funcName>`."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1285" : {
     "message" : [
       "Since Spark 2.3, the queries from raw JSON/CSV files are disallowed 
when the",
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 9840ebf2b8a..556b3a62da3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -2690,20 +2690,24 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       name: TableIdentifier,
       nameParts: String): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1283",
+      errorClass = "INVALID_TEMP_OBJ_REFERENCE",
       messageParameters = Map(
-        "name" -> name.toString,
-        "nameParts" -> nameParts))
+        "obj" -> "view",
+        "objName" -> toSQLId(name.nameParts),
+        "tempObj" -> "view",
+        "tempObjName" -> toSQLId(nameParts)))
   }
 
   def notAllowedToCreatePermanentViewByReferencingTempFuncError(
       name: TableIdentifier,
       funcName: String): Throwable = {
      new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1284",
+      errorClass = "INVALID_TEMP_OBJ_REFERENCE",
       messageParameters = Map(
-        "name" -> name.toString,
-        "funcName" -> funcName))
+        "obj" -> "view",
+        "objName" -> toSQLId(name.nameParts),
+        "tempObj" -> "function",
+        "tempObjName" -> toSQLId(funcName)))
   }
 
   def queryFromRawFilesIncludeCorruptRecordColumnError(): Throwable = {
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out
index 9f39119aa16..2768f7b53b4 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out
@@ -278,10 +278,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`temp_view_test`.`v1_temp`",
-    "nameParts" : "temp_table"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`temp_view_test`.`v1_temp`",
+    "tempObj" : "view",
+    "tempObjName" : "`temp_table`"
   }
 }
 
@@ -340,10 +342,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`temp_view_test`.`v3_temp`",
-    "nameParts" : "temp_table"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`temp_view_test`.`v3_temp`",
+    "tempObj" : "view",
+    "tempObjName" : "`temp_table`"
   }
 }
 
@@ -395,10 +399,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`temp_view_test`.`v4_temp`",
-    "nameParts" : "temp_table"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`temp_view_test`.`v4_temp`",
+    "tempObj" : "view",
+    "tempObjName" : "`temp_table`"
   }
 }
 
@@ -413,10 +419,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`temp_view_test`.`v5_temp`",
-    "nameParts" : "temp_table"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`temp_view_test`.`v5_temp`",
+    "tempObj" : "view",
+    "tempObjName" : "`temp_table`"
   }
 }
 
@@ -578,10 +586,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`temp_view_test`.`v6_temp`",
-    "nameParts" : "temp_table"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`temp_view_test`.`v6_temp`",
+    "tempObj" : "view",
+    "tempObjName" : "`temp_table`"
   }
 }
 
@@ -593,10 +603,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`temp_view_test`.`v7_temp`",
-    "nameParts" : "temp_table"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`temp_view_test`.`v7_temp`",
+    "tempObj" : "view",
+    "tempObjName" : "`temp_table`"
   }
 }
 
@@ -608,10 +620,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`temp_view_test`.`v8_temp`",
-    "nameParts" : "temp_table"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`temp_view_test`.`v8_temp`",
+    "tempObj" : "view",
+    "tempObjName" : "`temp_table`"
   }
 }
 
@@ -623,10 +637,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`temp_view_test`.`v9_temp`",
-    "nameParts" : "temp_table"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`temp_view_test`.`v9_temp`",
+    "tempObj" : "view",
+    "tempObjName" : "`temp_table`"
   }
 }
 
@@ -777,10 +793,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`testviewschm2`.`temporal1`",
-    "nameParts" : "tt"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`testviewschm2`.`temporal1`",
+    "tempObj" : "view",
+    "tempObjName" : "`tt`"
   }
 }
 
@@ -824,10 +842,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`testviewschm2`.`temporal2`",
-    "nameParts" : "tt"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`testviewschm2`.`temporal2`",
+    "tempObj" : "view",
+    "tempObjName" : "`tt`"
   }
 }
 
@@ -871,10 +891,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`testviewschm2`.`temporal3`",
-    "nameParts" : "tt"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`testviewschm2`.`temporal3`",
+    "tempObj" : "view",
+    "tempObjName" : "`tt`"
   }
 }
 
@@ -918,10 +940,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`testviewschm2`.`temporal4`",
-    "nameParts" : "tt"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`testviewschm2`.`temporal4`",
+    "tempObj" : "view",
+    "tempObjName" : "`tt`"
   }
 }
 
@@ -933,10 +957,12 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1283",
+  "errorClass" : "INVALID_TEMP_OBJ_REFERENCE",
   "messageParameters" : {
-    "name" : "`spark_catalog`.`testviewschm2`.`temporal5`",
-    "nameParts" : "tt"
+    "obj" : "view",
+    "objName" : "`spark_catalog`.`testviewschm2`.`temporal5`",
+    "tempObj" : "view",
+    "tempObjName" : "`tt`"
   }
 }
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 3350858b7e2..20fa5fee3aa 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -3960,9 +3960,14 @@ class SQLQuerySuite extends QueryTest with 
SharedSparkSession with AdaptiveSpark
               |SELECT * FROM cte
               |""".stripMargin)
         }
-        assert(e.message.contains("Not allowed to create a permanent view " +
-          s"`$SESSION_CATALOG_NAME`.`default`.`$testViewName` by referencing a 
" +
-          s"temporary view $tempViewName"))
+        checkError(
+          exception = e,
+          errorClass = "INVALID_TEMP_OBJ_REFERENCE",
+          parameters = Map(
+            "obj" -> "view",
+            "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`$testViewName`",
+            "tempObj" -> "view",
+            "tempObjName" -> s"`$tempViewName`"))
 
         val e2 = intercept[AnalysisException] {
           sql(
@@ -3974,9 +3979,14 @@ class SQLQuerySuite extends QueryTest with 
SharedSparkSession with AdaptiveSpark
               |SELECT * FROM cte
               |""".stripMargin)
         }
-        assert(e2.message.contains("Not allowed to create a permanent view " +
-          s"`$SESSION_CATALOG_NAME`.`default`.`$testViewName` by referencing a 
" +
-          s"temporary function `$tempFuncName`"))
+        checkError(
+          exception = e2,
+          errorClass = "INVALID_TEMP_OBJ_REFERENCE",
+          parameters = Map(
+            "obj" -> "view",
+            "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`$testViewName`",
+            "tempObj" -> "function",
+            "tempObjName" -> s"`$tempFuncName`"))
       }
     }
   }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index 38bd24356f1..f9b7b168d14 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -2186,8 +2186,12 @@ class DataSourceV2SQLSuiteV1Filter
           exception = intercept[AnalysisException] {
             sql(s"CREATE VIEW $sessionCatalogName.default.v AS SELECT * FROM 
t")
           },
-          errorClass = "_LEGACY_ERROR_TEMP_1283",
-          parameters = Map("name" -> "`spark_catalog`.`default`.`v`", 
"nameParts" -> "t"))
+          errorClass = "INVALID_TEMP_OBJ_REFERENCE",
+          parameters = Map(
+            "obj" -> "view",
+            "objName" -> "`spark_catalog`.`default`.`v`",
+            "tempObj" -> "view",
+            "tempObjName" -> "`t`"))
       }
     }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
index 833d9c3c7be..26b0e765700 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
@@ -81,22 +81,28 @@ abstract class SQLViewSuite extends QueryTest with 
SQLTestUtils {
       withTempView("temp_jtv1") {
         withGlobalTempView("global_temp_jtv1") {
           sql("CREATE TEMPORARY VIEW temp_jtv1 AS SELECT * FROM jt WHERE id > 
3")
-          var e = intercept[AnalysisException] {
-            sql("CREATE VIEW jtv1 AS SELECT * FROM temp_jtv1 WHERE id < 6")
-          }.getMessage
-          assert(e.contains("Not allowed to create a permanent view " +
-            s"`$SESSION_CATALOG_NAME`.`default`.`jtv1` by " +
-            "referencing a temporary view temp_jtv1. " +
-            "Please create a temp view instead by CREATE TEMP VIEW"))
-
+          checkError(
+            exception = intercept[AnalysisException] {
+              sql("CREATE VIEW jtv1 AS SELECT * FROM temp_jtv1 WHERE id < 6")
+            },
+            errorClass = "INVALID_TEMP_OBJ_REFERENCE",
+            parameters = Map(
+              "obj" -> "view",
+              "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`jtv1`",
+              "tempObj" -> "view",
+              "tempObjName" -> "`temp_jtv1`"))
           val globalTempDB = spark.sharedState.globalTempViewManager.database
           sql("CREATE GLOBAL TEMP VIEW global_temp_jtv1 AS SELECT * FROM jt 
WHERE id > 0")
-          e = intercept[AnalysisException] {
-            sql(s"CREATE VIEW jtv1 AS SELECT * FROM 
$globalTempDB.global_temp_jtv1 WHERE id < 6")
-          }.getMessage
-          assert(e.contains("Not allowed to create a permanent view " +
-            s"`$SESSION_CATALOG_NAME`.`default`.`jtv1` by " +
-            "referencing a temporary view global_temp.global_temp_jtv1"))
+          checkError(
+            exception = intercept[AnalysisException] {
+              sql(s"CREATE VIEW jtv1 AS SELECT * FROM 
$globalTempDB.global_temp_jtv1 WHERE id < 6")
+            },
+            errorClass = "INVALID_TEMP_OBJ_REFERENCE",
+            parameters = Map(
+              "obj" -> "view",
+              "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`jtv1`",
+              "tempObj" -> "view",
+              "tempObjName" -> "`global_temp`.`global_temp_jtv1`"))
         }
       }
     }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
index 1d4c52d3ae5..f64be6fcd2c 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
@@ -609,22 +609,30 @@ class PersistedViewTestSuite extends SQLViewTestSuite 
with SharedSparkSession {
         withTempView("v2") {
           sql("CREATE VIEW v1 AS SELECT * FROM t")
           sql("CREATE TEMPORARY VIEW v2 AS  SELECT * FROM t")
-          var e = intercept[AnalysisException] {
-            sql("ALTER VIEW v1 AS SELECT * FROM v2")
-          }.getMessage
-          assert(e.contains("Not allowed to create a permanent view " +
-            s"`$SESSION_CATALOG_NAME`.`default`.`v1` by " +
-            "referencing a temporary view v2"))
+          checkError(
+            exception = intercept[AnalysisException] {
+              sql("ALTER VIEW v1 AS SELECT * FROM v2")
+            },
+            errorClass = "INVALID_TEMP_OBJ_REFERENCE",
+            parameters = Map(
+              "obj" -> "view",
+              "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
+              "tempObj" -> "view",
+              "tempObjName" -> "`v2`"))
           val tempFunctionName = "temp_udf"
           val functionClass = "test.org.apache.spark.sql.MyDoubleAvg"
           withUserDefinedFunction(tempFunctionName -> true) {
             sql(s"CREATE TEMPORARY FUNCTION $tempFunctionName AS 
'$functionClass'")
-            e = intercept[AnalysisException] {
-              sql(s"ALTER VIEW v1 AS SELECT $tempFunctionName(id) from t")
-            }.getMessage
-            assert(e.contains("Not allowed to create a permanent view " +
-              s"`$SESSION_CATALOG_NAME`.`default`.`v1` by " +
-              s"referencing a temporary function `$tempFunctionName`"))
+            checkError(
+              exception = intercept[AnalysisException] {
+                sql(s"ALTER VIEW v1 AS SELECT $tempFunctionName(id) from t")
+              },
+              errorClass = "INVALID_TEMP_OBJ_REFERENCE",
+              parameters = Map(
+                "obj" -> "view",
+                "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
+                "tempObj" -> "function",
+                "tempObjName" -> s"`$tempFunctionName`"))
           }
         }
       }
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala
index 6b05a283dae..bf4954eeb1e 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala
@@ -83,10 +83,15 @@ class HiveSQLViewSuite extends SQLViewSuite with 
TestHiveSingleton {
             // permanent view
             val e = intercept[AnalysisException] {
               sql(s"CREATE VIEW view1 AS SELECT $tempFunctionName(id) from 
tab1")
-            }.getMessage
-            assert(e.contains("Not allowed to create a permanent view " +
-              s"`$SESSION_CATALOG_NAME`.`default`.`view1` by " +
-              s"referencing a temporary function `$tempFunctionName`"))
+            }
+            checkError(
+              exception = e,
+              errorClass = "INVALID_TEMP_OBJ_REFERENCE",
+              parameters = Map(
+                "obj" -> "view",
+                "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`view1`",
+                "tempObj" -> "function",
+                "tempObjName" -> s"`$tempFunctionName`"))
           }
         }
       }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to