This is an automated email from the ASF dual-hosted git repository.
gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 6603b82627f [SPARK-40585][SQL] Double-quoted identifiers should be
available only in ANSI mode
6603b82627f is described below
commit 6603b82627fcce5f7fba5376f036862dcfbb5347
Author: Gengliang Wang <[email protected]>
AuthorDate: Tue Oct 11 10:43:39 2022 -0700
[SPARK-40585][SQL] Double-quoted identifiers should be available only in
ANSI mode
### What changes were proposed in this pull request?
https://github.com/apache/spark/pull/38022 introduces an optional feature
for supporting double-quoted identifiers. The feature is controlled by a flag
`spark.sql.ansi.double_quoted_identifiers` which is independent from the flag
`spark.sql.ansi.enabled`.
This is inconsistent with another ANSI SQL feature "Enforce ANSI reserved
keywords":
https://spark.apache.org/docs/latest/sql-ref-ansi-compliance.html#sql-keywords-optional-disabled-by-default,
which is only available when `spark.sql.ansi.enabled` is true.
Thus, to make the ANSI flags consistent, I suggest making double-quoted
identifiers only available under ANSI SQL mode.
Other than that, this PR renames it from
`spark.sql.ansi.double_quoted_identifiers` to
`spark.sql.ansi.doubleQuotedIdentifiers`
### Why are the changes needed?
To make the ANSI SQL related features consistent.
### Does this PR introduce _any_ user-facing change?
No, the feature is not released yet.
### How was this patch tested?
New SQL test input file under ANSI mode.
Closes #38147 from gengliangwang/doubleQuoteFlag.
Authored-by: Gengliang Wang <[email protected]>
Signed-off-by: Gengliang Wang <[email protected]>
---
.../org/apache/spark/sql/internal/SQLConf.scala | 20 +-
.../ansi/double-quoted-identifiers-disabled.sql | 2 +
.../ansi/double-quoted-identifiers-enabled.sql | 3 +
.../sql-tests/inputs/double-quoted-identifiers.sql | 50 ----
.../double-quoted-identifiers-disabled.sql.out} | 302 +++-----------------
.../double-quoted-identifiers-enabled.sql.out} | 315 ++-------------------
.../results/double-quoted-identifiers.sql.out | 302 +++-----------------
7 files changed, 100 insertions(+), 894 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 376bcece3c6..bbe5bdd7035 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -2909,7 +2909,15 @@ object SQLConf {
.booleanConf
.createWithDefault(sys.env.get("SPARK_ANSI_SQL_MODE").contains("true"))
- val DOUBLE_QUOTED_IDENTIFIERS =
buildConf("spark.sql.ansi.double_quoted_identifiers")
+ val ENFORCE_RESERVED_KEYWORDS =
buildConf("spark.sql.ansi.enforceReservedKeywords")
+ .doc(s"When true and '${ANSI_ENABLED.key}' is true, the Spark SQL parser
enforces the ANSI " +
+ "reserved keywords and forbids SQL queries that use reserved keywords as
alias names " +
+ "and/or identifiers for table, view, function, etc.")
+ .version("3.3.0")
+ .booleanConf
+ .createWithDefault(false)
+
+ val DOUBLE_QUOTED_IDENTIFIERS =
buildConf("spark.sql.ansi.doubleQuotedIdentifiers")
.doc("When true, Spark SQL reads literals enclosed in double quoted (\")
as identifiers. " +
"When false they are read as string literals.")
.version("3.4.0")
@@ -2964,14 +2972,6 @@ object SQLConf {
.booleanConf
.createWithDefault(false)
- val ENFORCE_RESERVED_KEYWORDS =
buildConf("spark.sql.ansi.enforceReservedKeywords")
- .doc(s"When true and '${ANSI_ENABLED.key}' is true, the Spark SQL parser
enforces the ANSI " +
- "reserved keywords and forbids SQL queries that use reserved keywords as
alias names " +
- "and/or identifiers for table, view, function, etc.")
- .version("3.3.0")
- .booleanConf
- .createWithDefault(false)
-
val SORT_BEFORE_REPARTITION =
buildConf("spark.sql.execution.sortBeforeRepartition")
.internal()
@@ -4592,7 +4592,7 @@ class SQLConf extends Serializable with Logging {
def enforceReservedKeywords: Boolean = ansiEnabled &&
getConf(ENFORCE_RESERVED_KEYWORDS)
- def doubleQuotedIdentifiers: Boolean = getConf(DOUBLE_QUOTED_IDENTIFIERS)
+ def doubleQuotedIdentifiers: Boolean = ansiEnabled &&
getConf(DOUBLE_QUOTED_IDENTIFIERS)
def timestampType: AtomicType = getConf(TIMESTAMP_TYPE) match {
case "TIMESTAMP_LTZ" =>
diff --git
a/sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-disabled.sql
b/sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-disabled.sql
new file mode 100644
index 00000000000..b8ff8cdb813
--- /dev/null
+++
b/sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-disabled.sql
@@ -0,0 +1,2 @@
+--SET spark.sql.ansi.doubleQuotedIdentifiers=false
+--IMPORT double-quoted-identifiers.sql
diff --git
a/sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-enabled.sql
b/sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-enabled.sql
new file mode 100644
index 00000000000..9547d011c76
--- /dev/null
+++
b/sql/core/src/test/resources/sql-tests/inputs/ansi/double-quoted-identifiers-enabled.sql
@@ -0,0 +1,3 @@
+--SET spark.sql.ansi.doubleQuotedIdentifiers=true
+--IMPORT double-quoted-identifiers.sql
+
diff --git
a/sql/core/src/test/resources/sql-tests/inputs/double-quoted-identifiers.sql
b/sql/core/src/test/resources/sql-tests/inputs/double-quoted-identifiers.sql
index 7fe35e5a410..ffb52b40334 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/double-quoted-identifiers.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/double-quoted-identifiers.sql
@@ -1,8 +1,3 @@
--- test cases for spark.sql.ansi.double_quoted_identifiers
-
--- Base line
-SET spark.sql.ansi.double_quoted_identifiers = false;
-
-- All these should error out in the parser
SELECT 1 FROM "not_exist";
@@ -45,51 +40,6 @@ DROP VIEW v;
SELECT INTERVAL "1" YEAR;
--- Now turn on the config.
-SET spark.sql.ansi.double_quoted_identifiers = true;
-
--- All these should error out in analysis now
-SELECT 1 FROM "not_exist";
-
-USE SCHEMA "not_exist";
-
-ALTER TABLE "not_exist" ADD COLUMN not_exist int;
-
-ALTER TABLE not_exist ADD COLUMN "not_exist" int;
-
-SELECT 1 AS "not_exist" FROM not_exist;
-
-SELECT 1 FROM not_exist AS X("hello");
-
-SELECT "not_exist"();
-
-SELECT "not_exist".not_exist();
-
-SELECT "hello";
-
--- Back ticks still work
-SELECT 1 FROM `hello`;
-
-USE SCHEMA `not_exist`;
-
-ALTER TABLE `not_exist` ADD COLUMN not_exist int;
-
-ALTER TABLE not_exist ADD COLUMN `not_exist` int;
-
-SELECT 1 AS `not_exist` FROM `not_exist`;
-
-SELECT 1 FROM not_exist AS X(`hello`);
-
-SELECT `not_exist`();
-
-SELECT `not_exist`.not_exist();
-
--- These fail in the parser now
-CREATE TEMPORARY VIEW v(c1 COMMENT "hello") AS SELECT 1;
-DROP VIEW v;
-
-SELECT INTERVAL "1" YEAR;
-
-- Single ticks still work
SELECT 'hello';
diff --git
a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out
similarity index 54%
copy from
sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
copy to
sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out
index a67a5cffd31..57fad89d57c 100644
---
a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out
@@ -1,12 +1,4 @@
-- Automatically generated by SQLQueryTestSuite
--- !query
-SET spark.sql.ansi.double_quoted_identifiers = false
--- !query schema
-struct<key:string,value:string>
--- !query output
-spark.sql.ansi.double_quoted_identifiers false
-
-
-- !query
SELECT 1 FROM "not_exist"
-- !query schema
@@ -265,231 +257,72 @@ struct<INTERVAL '1' YEAR:interval year>
-- !query
-SET spark.sql.ansi.double_quoted_identifiers = true
--- !query schema
-struct<key:string,value:string>
--- !query output
-spark.sql.ansi.double_quoted_identifiers true
-
-
--- !query
-SELECT 1 FROM "not_exist"
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 14
-
-
--- !query
-USE SCHEMA "not_exist"
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
-Database 'not_exist' not found
-
-
--- !query
-ALTER TABLE "not_exist" ADD COLUMN not_exist int
+SELECT 'hello'
-- !query schema
-struct<>
+struct<hello:string>
-- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
+hello
-- !query
-ALTER TABLE not_exist ADD COLUMN "not_exist" int
+CREATE TEMPORARY VIEW v(c1 COMMENT 'hello') AS SELECT 1
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
-
--- !query
-SELECT 1 AS "not_exist" FROM not_exist
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 29
-- !query
-SELECT 1 FROM not_exist AS X("hello")
+DROP VIEW v
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 14
--- !query
-SELECT "not_exist"()
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "_LEGACY_ERROR_TEMP_1242",
- "messageParameters" : {
- "fullName" : "spark_catalog.default.not_exist",
- "rawName" : "not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 20,
- "fragment" : "\"not_exist\"()"
- } ]
-}
-
-- !query
-SELECT "not_exist".not_exist()
+SELECT INTERVAL '1' YEAR
-- !query schema
-struct<>
+struct<INTERVAL '1' YEAR:interval year>
-- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "_LEGACY_ERROR_TEMP_1243",
- "messageParameters" : {
- "rawName" : "not_exist.not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 30,
- "fragment" : "\"not_exist\".not_exist()"
- } ]
-}
+1-0
-- !query
-SELECT "hello"
+CREATE SCHEMA "myschema"
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.AnalysisException
+org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "UNRESOLVED_COLUMN.WITHOUT_SUGGESTION",
+ "errorClass" : "PARSE_SYNTAX_ERROR",
"sqlState" : "42000",
"messageParameters" : {
- "objectName" : "`hello`"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 14,
- "fragment" : "\"hello\""
- } ]
-}
-
-
--- !query
-SELECT 1 FROM `hello`
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: hello; line 1 pos 14
-
-
--- !query
-USE SCHEMA `not_exist`
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
-Database 'not_exist' not found
-
-
--- !query
-ALTER TABLE `not_exist` ADD COLUMN not_exist int
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
-
-
--- !query
-ALTER TABLE not_exist ADD COLUMN `not_exist` int
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
-
-
--- !query
-SELECT 1 AS `not_exist` FROM `not_exist`
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 29
-
-
--- !query
-SELECT 1 FROM not_exist AS X(`hello`)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 14
-
-
--- !query
-SELECT `not_exist`()
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "_LEGACY_ERROR_TEMP_1242",
- "messageParameters" : {
- "fullName" : "spark_catalog.default.not_exist",
- "rawName" : "not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 20,
- "fragment" : "`not_exist`()"
- } ]
+ "error" : "'\"myschema\"'",
+ "hint" : ""
+ }
}
-- !query
-SELECT `not_exist`.not_exist()
+CREATE TEMPORARY VIEW "myview"("c1") AS
+ WITH "v"("a") AS (SELECT 1) SELECT "a" FROM "v"
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.AnalysisException
+org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_1243",
+ "errorClass" : "PARSE_SYNTAX_ERROR",
+ "sqlState" : "42000",
"messageParameters" : {
- "rawName" : "not_exist.not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 30,
- "fragment" : "`not_exist`.not_exist()"
- } ]
+ "error" : "'\"myview\"'",
+ "hint" : ""
+ }
}
-- !query
-CREATE TEMPORARY VIEW v(c1 COMMENT "hello") AS SELECT 1
+SELECT "a1" AS "a2" FROM "myview" AS "atab"("a1")
-- !query schema
struct<>
-- !query output
@@ -498,28 +331,30 @@ org.apache.spark.sql.catalyst.parser.ParseException
"errorClass" : "PARSE_SYNTAX_ERROR",
"sqlState" : "42000",
"messageParameters" : {
- "error" : "'\"hello\"'",
+ "error" : "'\"a2\"'",
"hint" : ""
}
}
-- !query
-DROP VIEW v
+DROP TABLE "myview"
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.catalyst.analysis.NoSuchTableException
+org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_1115",
+ "errorClass" : "PARSE_SYNTAX_ERROR",
+ "sqlState" : "42000",
"messageParameters" : {
- "msg" : "Table spark_catalog.default.v not found"
+ "error" : "'\"myview\"'",
+ "hint" : ""
}
}
-- !query
-SELECT INTERVAL "1" YEAR
+DROP SCHEMA "myschema"
-- !query schema
struct<>
-- !query output
@@ -528,80 +363,7 @@ org.apache.spark.sql.catalyst.parser.ParseException
"errorClass" : "PARSE_SYNTAX_ERROR",
"sqlState" : "42000",
"messageParameters" : {
- "error" : "'\"1\"'",
+ "error" : "'\"myschema\"'",
"hint" : ""
}
}
-
-
--- !query
-SELECT 'hello'
--- !query schema
-struct<hello:string>
--- !query output
-hello
-
-
--- !query
-CREATE TEMPORARY VIEW v(c1 COMMENT 'hello') AS SELECT 1
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-DROP VIEW v
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-SELECT INTERVAL '1' YEAR
--- !query schema
-struct<INTERVAL '1' YEAR:interval year>
--- !query output
-1-0
-
-
--- !query
-CREATE SCHEMA "myschema"
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-CREATE TEMPORARY VIEW "myview"("c1") AS
- WITH "v"("a") AS (SELECT 1) SELECT "a" FROM "v"
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-SELECT "a1" AS "a2" FROM "myview" AS "atab"("a1")
--- !query schema
-struct<a2:int>
--- !query output
-1
-
-
--- !query
-DROP TABLE "myview"
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-DROP SCHEMA "myschema"
--- !query schema
-struct<>
--- !query output
-
diff --git
a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out
similarity index 54%
copy from
sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
copy to
sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out
index a67a5cffd31..fb34e9a1619 100644
---
a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out
@@ -1,151 +1,15 @@
-- Automatically generated by SQLQueryTestSuite
--- !query
-SET spark.sql.ansi.double_quoted_identifiers = false
--- !query schema
-struct<key:string,value:string>
--- !query output
-spark.sql.ansi.double_quoted_identifiers false
-
-
-- !query
SELECT 1 FROM "not_exist"
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.catalyst.parser.ParseException
-{
- "errorClass" : "PARSE_SYNTAX_ERROR",
- "sqlState" : "42000",
- "messageParameters" : {
- "error" : "'\"not_exist\"'",
- "hint" : ""
- }
-}
-
-
--- !query
-USE SCHEMA "not_exist"
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.parser.ParseException
-{
- "errorClass" : "PARSE_SYNTAX_ERROR",
- "sqlState" : "42000",
- "messageParameters" : {
- "error" : "'\"not_exist\"'",
- "hint" : ""
- }
-}
-
-
--- !query
-ALTER TABLE "not_exist" ADD COLUMN not_exist int
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.parser.ParseException
-{
- "errorClass" : "PARSE_SYNTAX_ERROR",
- "sqlState" : "42000",
- "messageParameters" : {
- "error" : "'\"not_exist\"'",
- "hint" : ""
- }
-}
-
-
--- !query
-ALTER TABLE not_exist ADD COLUMN "not_exist" int
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.parser.ParseException
-{
- "errorClass" : "PARSE_SYNTAX_ERROR",
- "sqlState" : "42000",
- "messageParameters" : {
- "error" : "'\"not_exist\"'",
- "hint" : ""
- }
-}
-
-
--- !query
-SELECT 1 AS "not_exist" FROM not_exist
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.parser.ParseException
-{
- "errorClass" : "PARSE_SYNTAX_ERROR",
- "sqlState" : "42000",
- "messageParameters" : {
- "error" : "'\"not_exist\"'",
- "hint" : ""
- }
-}
-
-
--- !query
-SELECT 1 FROM not_exist AS X("hello")
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.parser.ParseException
-{
- "errorClass" : "PARSE_SYNTAX_ERROR",
- "sqlState" : "42000",
- "messageParameters" : {
- "error" : "'\"hello\"'",
- "hint" : ""
- }
-}
-
-
--- !query
-SELECT "not_exist"()
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.parser.ParseException
-{
- "errorClass" : "PARSE_SYNTAX_ERROR",
- "sqlState" : "42000",
- "messageParameters" : {
- "error" : "'\"not_exist\"'",
- "hint" : ""
- }
-}
-
-
--- !query
-SELECT "not_exist".not_exist()
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.parser.ParseException
-{
- "errorClass" : "PARSE_SYNTAX_ERROR",
- "sqlState" : "42000",
- "messageParameters" : {
- "error" : "'\"not_exist\"'",
- "hint" : ""
- }
-}
-
-
--- !query
-SELECT 1 FROM `hello`
--- !query schema
-struct<>
--- !query output
org.apache.spark.sql.AnalysisException
-Table or view not found: hello; line 1 pos 14
+Table or view not found: not_exist; line 1 pos 14
-- !query
-USE SCHEMA `not_exist`
+USE SCHEMA "not_exist"
-- !query schema
struct<>
-- !query output
@@ -154,7 +18,7 @@ Database 'not_exist' not found
-- !query
-ALTER TABLE `not_exist` ADD COLUMN not_exist int
+ALTER TABLE "not_exist" ADD COLUMN not_exist int
-- !query schema
struct<>
-- !query output
@@ -163,7 +27,7 @@ Table not found: not_exist; line 1 pos 12
-- !query
-ALTER TABLE not_exist ADD COLUMN `not_exist` int
+ALTER TABLE not_exist ADD COLUMN "not_exist" int
-- !query schema
struct<>
-- !query output
@@ -172,7 +36,7 @@ Table not found: not_exist; line 1 pos 12
-- !query
-SELECT 1 AS `not_exist` FROM `not_exist`
+SELECT 1 AS "not_exist" FROM not_exist
-- !query schema
struct<>
-- !query output
@@ -181,7 +45,7 @@ Table or view not found: not_exist; line 1 pos 29
-- !query
-SELECT 1 FROM not_exist AS X(`hello`)
+SELECT 1 FROM not_exist AS X("hello")
-- !query schema
struct<>
-- !query output
@@ -190,7 +54,7 @@ Table or view not found: not_exist; line 1 pos 14
-- !query
-SELECT `not_exist`()
+SELECT "not_exist"()
-- !query schema
struct<>
-- !query output
@@ -206,13 +70,13 @@ org.apache.spark.sql.AnalysisException
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 20,
- "fragment" : "`not_exist`()"
+ "fragment" : "\"not_exist\"()"
} ]
}
-- !query
-SELECT `not_exist`.not_exist()
+SELECT "not_exist".not_exist()
-- !query schema
struct<>
-- !query output
@@ -227,62 +91,22 @@ org.apache.spark.sql.AnalysisException
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 30,
- "fragment" : "`not_exist`.not_exist()"
+ "fragment" : "\"not_exist\".not_exist()"
} ]
}
-- !query
-SELECT "hello"
--- !query schema
-struct<hello:string>
--- !query output
-hello
-
-
--- !query
-CREATE TEMPORARY VIEW v(c1 COMMENT "hello") AS SELECT 1
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-DROP VIEW v
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-SELECT INTERVAL "1" YEAR
--- !query schema
-struct<INTERVAL '1' YEAR:interval year>
--- !query output
-1-0
-
-
--- !query
-SET spark.sql.ansi.double_quoted_identifiers = true
--- !query schema
-struct<key:string,value:string>
--- !query output
-spark.sql.ansi.double_quoted_identifiers true
-
-
--- !query
-SELECT 1 FROM "not_exist"
+SELECT 1 FROM `hello`
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 14
+Table or view not found: hello; line 1 pos 14
-- !query
-USE SCHEMA "not_exist"
+USE SCHEMA `not_exist`
-- !query schema
struct<>
-- !query output
@@ -291,7 +115,7 @@ Database 'not_exist' not found
-- !query
-ALTER TABLE "not_exist" ADD COLUMN not_exist int
+ALTER TABLE `not_exist` ADD COLUMN not_exist int
-- !query schema
struct<>
-- !query output
@@ -300,7 +124,7 @@ Table not found: not_exist; line 1 pos 12
-- !query
-ALTER TABLE not_exist ADD COLUMN "not_exist" int
+ALTER TABLE not_exist ADD COLUMN `not_exist` int
-- !query schema
struct<>
-- !query output
@@ -309,7 +133,7 @@ Table not found: not_exist; line 1 pos 12
-- !query
-SELECT 1 AS "not_exist" FROM not_exist
+SELECT 1 AS `not_exist` FROM `not_exist`
-- !query schema
struct<>
-- !query output
@@ -318,7 +142,7 @@ Table or view not found: not_exist; line 1 pos 29
-- !query
-SELECT 1 FROM not_exist AS X("hello")
+SELECT 1 FROM not_exist AS X(`hello`)
-- !query schema
struct<>
-- !query output
@@ -327,7 +151,7 @@ Table or view not found: not_exist; line 1 pos 14
-- !query
-SELECT "not_exist"()
+SELECT `not_exist`()
-- !query schema
struct<>
-- !query output
@@ -343,13 +167,13 @@ org.apache.spark.sql.AnalysisException
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 20,
- "fragment" : "\"not_exist\"()"
+ "fragment" : "`not_exist`()"
} ]
}
-- !query
-SELECT "not_exist".not_exist()
+SELECT `not_exist`.not_exist()
-- !query schema
struct<>
-- !query output
@@ -364,7 +188,7 @@ org.apache.spark.sql.AnalysisException
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 30,
- "fragment" : "\"not_exist\".not_exist()"
+ "fragment" : "`not_exist`.not_exist()"
} ]
}
@@ -391,103 +215,6 @@ org.apache.spark.sql.AnalysisException
}
--- !query
-SELECT 1 FROM `hello`
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: hello; line 1 pos 14
-
-
--- !query
-USE SCHEMA `not_exist`
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
-Database 'not_exist' not found
-
-
--- !query
-ALTER TABLE `not_exist` ADD COLUMN not_exist int
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
-
-
--- !query
-ALTER TABLE not_exist ADD COLUMN `not_exist` int
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
-
-
--- !query
-SELECT 1 AS `not_exist` FROM `not_exist`
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 29
-
-
--- !query
-SELECT 1 FROM not_exist AS X(`hello`)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 14
-
-
--- !query
-SELECT `not_exist`()
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "_LEGACY_ERROR_TEMP_1242",
- "messageParameters" : {
- "fullName" : "spark_catalog.default.not_exist",
- "rawName" : "not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 20,
- "fragment" : "`not_exist`()"
- } ]
-}
-
-
--- !query
-SELECT `not_exist`.not_exist()
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "_LEGACY_ERROR_TEMP_1243",
- "messageParameters" : {
- "rawName" : "not_exist.not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 30,
- "fragment" : "`not_exist`.not_exist()"
- } ]
-}
-
-
-- !query
CREATE TEMPORARY VIEW v(c1 COMMENT "hello") AS SELECT 1
-- !query schema
diff --git
a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
b/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
index a67a5cffd31..57fad89d57c 100644
---
a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
@@ -1,12 +1,4 @@
-- Automatically generated by SQLQueryTestSuite
--- !query
-SET spark.sql.ansi.double_quoted_identifiers = false
--- !query schema
-struct<key:string,value:string>
--- !query output
-spark.sql.ansi.double_quoted_identifiers false
-
-
-- !query
SELECT 1 FROM "not_exist"
-- !query schema
@@ -265,231 +257,72 @@ struct<INTERVAL '1' YEAR:interval year>
-- !query
-SET spark.sql.ansi.double_quoted_identifiers = true
--- !query schema
-struct<key:string,value:string>
--- !query output
-spark.sql.ansi.double_quoted_identifiers true
-
-
--- !query
-SELECT 1 FROM "not_exist"
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 14
-
-
--- !query
-USE SCHEMA "not_exist"
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
-Database 'not_exist' not found
-
-
--- !query
-ALTER TABLE "not_exist" ADD COLUMN not_exist int
+SELECT 'hello'
-- !query schema
-struct<>
+struct<hello:string>
-- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
+hello
-- !query
-ALTER TABLE not_exist ADD COLUMN "not_exist" int
+CREATE TEMPORARY VIEW v(c1 COMMENT 'hello') AS SELECT 1
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
-
--- !query
-SELECT 1 AS "not_exist" FROM not_exist
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 29
-- !query
-SELECT 1 FROM not_exist AS X("hello")
+DROP VIEW v
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 14
--- !query
-SELECT "not_exist"()
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "_LEGACY_ERROR_TEMP_1242",
- "messageParameters" : {
- "fullName" : "spark_catalog.default.not_exist",
- "rawName" : "not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 20,
- "fragment" : "\"not_exist\"()"
- } ]
-}
-
-- !query
-SELECT "not_exist".not_exist()
+SELECT INTERVAL '1' YEAR
-- !query schema
-struct<>
+struct<INTERVAL '1' YEAR:interval year>
-- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "_LEGACY_ERROR_TEMP_1243",
- "messageParameters" : {
- "rawName" : "not_exist.not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 30,
- "fragment" : "\"not_exist\".not_exist()"
- } ]
-}
+1-0
-- !query
-SELECT "hello"
+CREATE SCHEMA "myschema"
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.AnalysisException
+org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "UNRESOLVED_COLUMN.WITHOUT_SUGGESTION",
+ "errorClass" : "PARSE_SYNTAX_ERROR",
"sqlState" : "42000",
"messageParameters" : {
- "objectName" : "`hello`"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 14,
- "fragment" : "\"hello\""
- } ]
-}
-
-
--- !query
-SELECT 1 FROM `hello`
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: hello; line 1 pos 14
-
-
--- !query
-USE SCHEMA `not_exist`
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
-Database 'not_exist' not found
-
-
--- !query
-ALTER TABLE `not_exist` ADD COLUMN not_exist int
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
-
-
--- !query
-ALTER TABLE not_exist ADD COLUMN `not_exist` int
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table not found: not_exist; line 1 pos 12
-
-
--- !query
-SELECT 1 AS `not_exist` FROM `not_exist`
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 29
-
-
--- !query
-SELECT 1 FROM not_exist AS X(`hello`)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-Table or view not found: not_exist; line 1 pos 14
-
-
--- !query
-SELECT `not_exist`()
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "_LEGACY_ERROR_TEMP_1242",
- "messageParameters" : {
- "fullName" : "spark_catalog.default.not_exist",
- "rawName" : "not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 20,
- "fragment" : "`not_exist`()"
- } ]
+ "error" : "'\"myschema\"'",
+ "hint" : ""
+ }
}
-- !query
-SELECT `not_exist`.not_exist()
+CREATE TEMPORARY VIEW "myview"("c1") AS
+ WITH "v"("a") AS (SELECT 1) SELECT "a" FROM "v"
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.AnalysisException
+org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_1243",
+ "errorClass" : "PARSE_SYNTAX_ERROR",
+ "sqlState" : "42000",
"messageParameters" : {
- "rawName" : "not_exist.not_exist"
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 30,
- "fragment" : "`not_exist`.not_exist()"
- } ]
+ "error" : "'\"myview\"'",
+ "hint" : ""
+ }
}
-- !query
-CREATE TEMPORARY VIEW v(c1 COMMENT "hello") AS SELECT 1
+SELECT "a1" AS "a2" FROM "myview" AS "atab"("a1")
-- !query schema
struct<>
-- !query output
@@ -498,28 +331,30 @@ org.apache.spark.sql.catalyst.parser.ParseException
"errorClass" : "PARSE_SYNTAX_ERROR",
"sqlState" : "42000",
"messageParameters" : {
- "error" : "'\"hello\"'",
+ "error" : "'\"a2\"'",
"hint" : ""
}
}
-- !query
-DROP VIEW v
+DROP TABLE "myview"
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.catalyst.analysis.NoSuchTableException
+org.apache.spark.sql.catalyst.parser.ParseException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_1115",
+ "errorClass" : "PARSE_SYNTAX_ERROR",
+ "sqlState" : "42000",
"messageParameters" : {
- "msg" : "Table spark_catalog.default.v not found"
+ "error" : "'\"myview\"'",
+ "hint" : ""
}
}
-- !query
-SELECT INTERVAL "1" YEAR
+DROP SCHEMA "myschema"
-- !query schema
struct<>
-- !query output
@@ -528,80 +363,7 @@ org.apache.spark.sql.catalyst.parser.ParseException
"errorClass" : "PARSE_SYNTAX_ERROR",
"sqlState" : "42000",
"messageParameters" : {
- "error" : "'\"1\"'",
+ "error" : "'\"myschema\"'",
"hint" : ""
}
}
-
-
--- !query
-SELECT 'hello'
--- !query schema
-struct<hello:string>
--- !query output
-hello
-
-
--- !query
-CREATE TEMPORARY VIEW v(c1 COMMENT 'hello') AS SELECT 1
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-DROP VIEW v
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-SELECT INTERVAL '1' YEAR
--- !query schema
-struct<INTERVAL '1' YEAR:interval year>
--- !query output
-1-0
-
-
--- !query
-CREATE SCHEMA "myschema"
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-CREATE TEMPORARY VIEW "myview"("c1") AS
- WITH "v"("a") AS (SELECT 1) SELECT "a" FROM "v"
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-SELECT "a1" AS "a2" FROM "myview" AS "atab"("a1")
--- !query schema
-struct<a2:int>
--- !query output
-1
-
-
--- !query
-DROP TABLE "myview"
--- !query schema
-struct<>
--- !query output
-
-
-
--- !query
-DROP SCHEMA "myschema"
--- !query schema
-struct<>
--- !query output
-
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]