This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new aa1ff3789e49 [SPARK-46246][SQL] EXECUTE IMMEDIATE SQL support
aa1ff3789e49 is described below

commit aa1ff3789e492545b07d84ac095fc4c39f7446c6
Author: milastdbx <milan.stefano...@databricks.com>
AuthorDate: Tue Dec 19 19:04:26 2023 +0300

    [SPARK-46246][SQL] EXECUTE IMMEDIATE SQL support
    
    ### What changes were proposed in this pull request?
    
    Introducing new EXECUTE IMMEDIATE syntax to support parameterized queries 
from within SQL.
    
    This API executes query passed as string with arguments.
    
    Other DBs that support this:
     - 
[Oracle](https://docs.oracle.com/cd/B13789_01/appdev.101/b10807/13_elems017.htm)
     - 
[Snowflake](https://docs.snowflake.com/en/sql-reference/sql/execute-immediate)
     - 
[PgSql](https://www.postgresql.org/docs/current/ecpg-sql-execute-immediate.html#:~:text=Description,statement%2C%20without%20retrieving%20result%20rows.)
    
    ### Why are the changes needed?
    Often time queries are constructed as a result of other queries. We need a 
way to dynamically construct a query and execute it without leaving SQL API.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, it exposes new syntax
    
    ### How was this patch tested?
    Golden files
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #44093 from milastdbx/dev/executeImmediate.
    
    Lead-authored-by: milastdbx <milan.stefano...@databricks.com>
    Co-authored-by: Milan Stefanovic 
<150366084+milast...@users.noreply.github.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../src/main/resources/error/error-classes.json    |  44 ++
 docs/sql-error-conditions.md                       |  44 ++
 docs/sql-ref-ansi-compliance.md                    |   2 +
 docs/sql-ref-syntax-aux-exec-imm.md                |  86 +++
 docs/sql-ref-syntax.md                             |   1 +
 .../spark/sql/catalyst/parser/SqlBaseLexer.g4      |   2 +
 .../spark/sql/catalyst/parser/SqlBaseParser.g4     |  26 +
 .../spark/sql/errors/QueryParsingErrors.scala      |   9 +
 .../spark/sql/catalyst/analysis/Analyzer.scala     |   1 +
 .../sql/catalyst/analysis/executeImmediate.scala   | 189 +++++
 .../spark/sql/catalyst/analysis/parameters.scala   |   5 +-
 .../sql/catalyst/catalog/SessionCatalog.scala      |   2 +-
 .../spark/sql/catalyst/parser/AstBuilder.scala     |  59 ++
 .../sql/catalyst/rules/RuleIdCollection.scala      |   1 +
 .../spark/sql/catalyst/trees/TreePatterns.scala    |   1 +
 .../spark/sql/errors/QueryCompilationErrors.scala  |  50 ++
 .../sql/catalyst/analysis/AnalysisErrorSuite.scala |  70 ++
 .../sql/catalyst/analysis/AnalysisSuite.scala      |  24 +
 .../spark/sql/catalyst/analysis/AnalysisTest.scala |   3 +
 .../analyzer-results/execute-immediate.sql.out     | 782 +++++++++++++++++++++
 .../sql-tests/inputs/execute-immediate.sql         | 138 ++++
 .../sql-tests/results/ansi/keywords.sql.out        |   3 +
 .../sql-tests/results/execute-immediate.sql.out    | 738 +++++++++++++++++++
 .../resources/sql-tests/results/keywords.sql.out   |   2 +
 .../spark/sql/errors/QueryParsingErrorsSuite.scala |  27 +
 .../ThriftServerWithSparkContextSuite.scala        |   2 +-
 26 files changed, 2307 insertions(+), 4 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 62e3427fdffd..30aacc07d318 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -5,6 +5,12 @@
     ],
     "sqlState" : "42845"
   },
+  "ALL_PARAMETERS_MUST_BE_NAMED" : {
+    "message" : [
+      "Using name parameterized queries requires all parameters to be named. 
Parameters missing names: <exprs>."
+    ],
+    "sqlState" : "07001"
+  },
   "ALL_PARTITION_COLUMNS_NOT_ALLOWED" : {
     "message" : [
       "Cannot use all columns for partition columns."
@@ -1024,6 +1030,12 @@
     ],
     "sqlState" : "42702"
   },
+  "EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES" : {
+    "message" : [
+      "The USING clause of this EXECUTE IMMEDIATE command contained multiple 
arguments with same alias (<aliases>), which is invalid; please update the 
command to specify unique aliases and then try it again."
+    ],
+    "sqlState" : "42701"
+  },
   "EXPECT_PERMANENT_VIEW_NOT_TEMP" : {
     "message" : [
       "'<operation>' expects a permanent view but <viewName> is a temp view."
@@ -2230,6 +2242,12 @@
     ],
     "sqlState" : "42602"
   },
+  "INVALID_QUERY_MIXED_QUERY_PARAMETERS" : {
+    "message" : [
+      "Parameterized query must either use positional, or named parameters, 
but not both."
+    ],
+    "sqlState" : "42613"
+  },
   "INVALID_SCHEMA" : {
     "message" : [
       "The input schema <inputSchema> is not a valid schema string."
@@ -2381,6 +2399,12 @@
     },
     "sqlState" : "42000"
   },
+  "INVALID_STATEMENT_FOR_EXECUTE_INTO" : {
+    "message" : [
+      "The INTO clause of EXECUTE IMMEDIATE is only valid for queries but the 
given statement is not a query: <sqlString>."
+    ],
+    "sqlState" : "07501"
+  },
   "INVALID_SUBQUERY_EXPRESSION" : {
     "message" : [
       "Invalid subquery:"
@@ -2458,6 +2482,12 @@
     ],
     "sqlState" : "42000"
   },
+  "INVALID_VARIABLE_TYPE_FOR_QUERY_EXECUTE_IMMEDIATE" : {
+    "message" : [
+      "Variable type must be string type but got <varType>."
+    ],
+    "sqlState" : "42K09"
+  },
   "INVALID_VIEW_TEXT" : {
     "message" : [
       "The view <viewName> cannot be displayed due to invalid view text: 
<viewText>. This may be caused by an unauthorized modification of the view or 
an incorrect query syntax. Please check your query syntax and verify that the 
view has not been tampered with."
@@ -2643,6 +2673,12 @@
     ],
     "sqlState" : "42607"
   },
+  "NESTED_EXECUTE_IMMEDIATE" : {
+    "message" : [
+      "Nested EXECUTE IMMEDIATE commands are not allowed. Please ensure that 
the SQL query provided (<sqlString>) does not contain another EXECUTE IMMEDIATE 
command."
+    ],
+    "sqlState" : "07501"
+  },
   "NON_FOLDABLE_ARGUMENT" : {
     "message" : [
       "The function <funcName> requires the parameter <paramName> to be a 
foldable expression of the type <paramType>, but the actual argument is a 
non-foldable."
@@ -3599,6 +3635,14 @@
     ],
     "sqlState" : "42K0E"
   },
+  "UNSUPPORTED_EXPR_FOR_PARAMETER" : {
+    "message" : [
+      "A query parameter contains unsupported expression.",
+      "Parameters can either be variables or literals.",
+      "Invalid expression: [<invalidExprSql>]"
+    ],
+    "sqlState" : "42K0E"
+  },
   "UNSUPPORTED_EXPR_FOR_WINDOW" : {
     "message" : [
       "Expression <sqlExpr> not supported within a window function."
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index 5657877971c5..e4b04ce02fe2 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -29,6 +29,12 @@ Also see [SQLSTATE 
Codes](sql-error-conditions-sqlstates.html).
 
 Non-deterministic expression `<sqlExpr>` should not appear in the arguments of 
an aggregate function.
 
+### ALL_PARAMETERS_MUST_BE_NAMED
+
+SQLSTATE: 07001
+
+Using name parameterized queries requires all parameters to be named. 
Parameters missing names: `<exprs>`.
+
 ### ALL_PARTITION_COLUMNS_NOT_ALLOWED
 
 SQLSTATE: KD005
@@ -613,6 +619,12 @@ EXCEPT column `<columnName>` was resolved and expected to 
be StructType, but fou
 
 Columns in an EXCEPT list must be distinct and non-overlapping, but got 
(`<columns>`).
 
+### EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES
+
+[SQLSTATE: 
42701](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
+
+The USING clause of this EXECUTE IMMEDIATE command contained multiple 
arguments with same alias (`<aliases>`), which is invalid; please update the 
command to specify unique aliases and then try it again.
+
 ### EXPECT_PERMANENT_VIEW_NOT_TEMP
 
 [SQLSTATE: 
42809](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
@@ -1257,6 +1269,12 @@ For more details see 
[INVALID_PARTITION_OPERATION](sql-error-conditions-invalid-
 
 `<value>` is an invalid property value, please use quotes, e.g. SET 
`<key>`=`<value>`
 
+### INVALID_QUERY_MIXED_QUERY_PARAMETERS
+
+[SQLSTATE: 
42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
+
+Parameterized query must either use positional, or named parameters, but not 
both.
+
 ### [INVALID_SCHEMA](sql-error-conditions-invalid-schema-error-class.html)
 
 [SQLSTATE: 
42K07](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
@@ -1291,6 +1309,12 @@ Invalid SQL syntax:
 
 For more details see 
[INVALID_SQL_SYNTAX](sql-error-conditions-invalid-sql-syntax-error-class.html)
 
+### INVALID_STATEMENT_FOR_EXECUTE_INTO
+
+SQLSTATE: 07501
+
+The INTO clause of EXECUTE IMMEDIATE is only valid for queries but the given 
statement is not a query: `<sqlString>`.
+
 ### 
[INVALID_SUBQUERY_EXPRESSION](sql-error-conditions-invalid-subquery-expression-error-class.html)
 
 [SQLSTATE: 
42823](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
@@ -1343,6 +1367,12 @@ The url is invalid: `<url>`. If necessary set 
`<ansiConfig>` to "false" to bypas
 
 Invalid usage of `<elem>` in `<prettyName>`.
 
+### INVALID_VARIABLE_TYPE_FOR_QUERY_EXECUTE_IMMEDIATE
+
+[SQLSTATE: 
42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
+
+Variable type must be string type but got `<varType>`.
+
 ### INVALID_VIEW_TEXT
 
 [SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error)
@@ -1487,6 +1517,12 @@ Set "spark.sql.allowNamedFunctionArguments" to "true" to 
turn on feature.
 
 It is not allowed to use an aggregate function in the argument of another 
aggregate function. Please use the inner aggregate function in a sub-query.
 
+### NESTED_EXECUTE_IMMEDIATE
+
+SQLSTATE: 07501
+
+Nested EXECUTE IMMEDIATE commands are not allowed. Please ensure that the SQL 
query provided (`<sqlString>`) does not contain another EXECUTE IMMEDIATE 
command.
+
 ### NON_FOLDABLE_ARGUMENT
 
 [SQLSTATE: 
42K08](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
@@ -2318,6 +2354,14 @@ A query operator contains one or more unsupported 
expressions.
 Consider to rewrite it to avoid window functions, aggregate functions, and 
generator functions in the WHERE clause.
 Invalid expressions: [`<invalidExprSqls>`]
 
+### UNSUPPORTED_EXPR_FOR_PARAMETER
+
+[SQLSTATE: 
42K0E](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
+
+A query parameter contains unsupported expression.
+Parameters can either be variables or literals.
+Invalid expression: [`<invalidExprSql>`]
+
 ### UNSUPPORTED_EXPR_FOR_WINDOW
 
 [SQLSTATE: 
42P20](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
diff --git a/docs/sql-ref-ansi-compliance.md b/docs/sql-ref-ansi-compliance.md
index 90e65e5ce36e..93af3e669847 100644
--- a/docs/sql-ref-ansi-compliance.md
+++ b/docs/sql-ref-ansi-compliance.md
@@ -464,6 +464,7 @@ Below is a list of all the keywords in Spark SQL.
 |EXCEPT|reserved|strict-non-reserved|reserved|
 |EXCHANGE|non-reserved|non-reserved|non-reserved|
 |EXCLUDE|non-reserved|non-reserved|non-reserved|
+|EXECUTE|reserved|non-reserved|reserved|
 |EXISTS|non-reserved|non-reserved|reserved|
 |EXPLAIN|non-reserved|non-reserved|non-reserved|
 |EXPORT|non-reserved|non-reserved|non-reserved|
@@ -497,6 +498,7 @@ Below is a list of all the keywords in Spark SQL.
 |IDENTIFIER|non-reserved|non-reserved|non-reserved|
 |IF|non-reserved|non-reserved|not a keyword|
 |IGNORE|non-reserved|non-reserved|non-reserved|
+|IMMEDIATE|non-reserved|non-reserved|non-reserved|
 |IMPORT|non-reserved|non-reserved|non-reserved|
 |IN|reserved|non-reserved|reserved|
 |INCLUDE|non-reserved|non-reserved|non-reserved|
diff --git a/docs/sql-ref-syntax-aux-exec-imm.md 
b/docs/sql-ref-syntax-aux-exec-imm.md
new file mode 100644
index 000000000000..0d9a0f5b4aa3
--- /dev/null
+++ b/docs/sql-ref-syntax-aux-exec-imm.md
@@ -0,0 +1,86 @@
+---
+layout: global
+title: EXECUTE IMMEDIATE
+displayTitle: EXECUTE IMMEDIATE
+license: |
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+---
+
+### Description
+
+Executes a sql statement provided as a `STRING`, optionally passing 
`arg_exprN` to parameter markers and assigning the results to `var_nameN`.
+
+### Syntax
+
+```sql
+EXECUTE IMMEDIATE sql_string
+        [ INTO var_name [, …]  ]
+        [ USING { (arg_expr [ AS ] [alias] [, …] ) | arg_expr [ AS ] [alias] 
[, …] } ]
+```
+
+### Parameters
+
+* **sql_string**
+
+  A STRING expression producing a well-formed SQL statement.
+
+* **INTO var_name [, …]**
+
+    Optionally returns the results of a single row query into SQL variables.
+    If the query returns no rows the result is NULL.
+    - `var_name`
+    A SQL variable. A variable may not be referenced more than once.
+
+* **USING arg_expr [, …]**
+
+  Optionally, if sql_string contains parameter markers, binds in values to the 
parameters.
+  - `arg_expr`
+  An expression that binds to a parameter marker.
+  If the parameter markers are unnamed the binding is by position.
+  For unnamed parameter markers, binding is by name.
+  - `alias`
+    Overrides the name used to bind `arg_expr` to a named parameter marker
+
+  Each named parameter marker must be matched once. Not all arg_expr must be 
matched.
+
+
+### Examples
+
+```sql
+-- A self-contained execution using a literal string
+EXECUTE IMMEDIATE 'SELECT SUM(c1) FROM VALUES(?), (?)' USING 5, 6;
+ 11
+
+-- A SQL string composed in a SQL variable
+DECLARE sqlStr = 'SELECT SUM(c1) FROM VALUES(?), (?)';
+DECLARE arg1 = 5;
+DECLARE arg2 = 6;
+EXECUTE IMMEDIATE sqlStr USING arg1, arg2;
+ 11
+
+-- Using the INTO clause
+DECLARE sum INT;
+EXECUTE IMMEDIATE sqlStr INTO sum USING arg1, arg2;
+SELECT sum;
+ 11
+
+-- Using named parameter markers
+SET VAR sqlStr = 'SELECT SUM(c1) FROM VALUES(:first), (:second)';
+EXECUTE IMMEDIATE sqlStr INTO (sum)
+    USING 5 AS first, arg2 AS second;
+SELECT sum;
+ 11
+```
\ No newline at end of file
diff --git a/docs/sql-ref-syntax.md b/docs/sql-ref-syntax.md
index 8bc1c7cec56d..163db8f6f57d 100644
--- a/docs/sql-ref-syntax.md
+++ b/docs/sql-ref-syntax.md
@@ -108,6 +108,7 @@ ability to generate logical and physical plan for a given 
query using
  * [RESET](sql-ref-syntax-aux-conf-mgmt-reset.html)
  * [SET](sql-ref-syntax-aux-conf-mgmt-set.html)
  * [SET VAR](sql-ref-syntax-aux-set-var.html)
+ * [EXECUTE IMMEDIATE](sql-ref-syntax-aux-exec-imm.html)
  * [SHOW COLUMNS](sql-ref-syntax-aux-show-columns.html)
  * [SHOW CREATE TABLE](sql-ref-syntax-aux-show-create-table.html)
  * [SHOW DATABASES](sql-ref-syntax-aux-show-databases.html)
diff --git 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
index 9b3dcbc6d194..174887def66d 100644
--- 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
+++ 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
@@ -217,6 +217,7 @@ HOURS: 'HOURS';
 IDENTIFIER_KW: 'IDENTIFIER';
 IF: 'IF';
 IGNORE: 'IGNORE';
+IMMEDIATE: 'IMMEDIATE';
 IMPORT: 'IMPORT';
 IN: 'IN';
 INCLUDE: 'INCLUDE';
@@ -381,6 +382,7 @@ TIMESTAMPADD: 'TIMESTAMPADD';
 TIMESTAMPDIFF: 'TIMESTAMPDIFF';
 TINYINT: 'TINYINT';
 TO: 'TO';
+EXECUTE: 'EXECUTE';
 TOUCH: 'TOUCH';
 TRAILING: 'TRAILING';
 TRANSACTION: 'TRANSACTION';
diff --git 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
index 0ab3c5ac0c36..737d5196e7c4 100644
--- 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
+++ 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
@@ -72,6 +72,7 @@ singleTableSchema
 
 statement
     : query                                                            
#statementDefault
+    | executeImmediate                                                 
#visitExecuteImmediate
     | ctes? dmlStatementNoWith                                         
#dmlStatement
     | USE identifierReference                                          #use
     | USE namespace identifierReference                                
#useNamespace
@@ -230,6 +231,28 @@ statement
     | unsupportedHiveNativeCommands .*?                                
#failNativeCommand
     ;
 
+executeImmediate
+    : EXECUTE IMMEDIATE queryParam=executeImmediateQueryParam (INTO 
targetVariable=multipartIdentifierList)? executeImmediateUsing?
+    ;
+
+executeImmediateUsing
+    : USING LEFT_PAREN params=namedExpressionSeq RIGHT_PAREN
+    | USING params=namedExpressionSeq
+    ;
+
+executeImmediateQueryParam
+    : stringLit
+    | multipartIdentifier
+    ;
+
+executeImmediateArgument
+    : (constant|multipartIdentifier) (AS name=errorCapturingIdentifier)?
+    ;
+
+executeImmediateArgumentSeq
+    : executeImmediateArgument (COMMA executeImmediateArgument)*
+    ;
+
 timezone
     : stringLit
     | LOCAL
@@ -1394,6 +1417,7 @@ ansiNonReserved
     | IDENTIFIER_KW
     | IF
     | IGNORE
+    | IMMEDIATE
     | IMPORT
     | INCLUDE
     | INDEX
@@ -1685,6 +1709,7 @@ nonReserved
     | ESCAPED
     | EXCHANGE
     | EXCLUDE
+    | EXECUTE
     | EXISTS
     | EXPLAIN
     | EXPORT
@@ -1717,6 +1742,7 @@ nonReserved
     | IDENTIFIER_KW
     | IF
     | IGNORE
+    | IMMEDIATE
     | IMPORT
     | IN
     | INCLUDE
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index 841a678144f5..daab055608f5 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -631,6 +631,15 @@ private[sql] object QueryParsingErrors extends 
DataTypeErrorsBase {
     new ParseException(errorClass = 
"REF_DEFAULT_VALUE_IS_NOT_ALLOWED_IN_PARTITION", ctx)
   }
 
+  def duplicateArgumentNamesError(
+      arguments: Seq[String],
+      ctx: ParserRuleContext): Throwable = {
+    new ParseException(
+      errorClass = "EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES",
+      messageParameters = Map("aliases" -> arguments.map(toSQLId).mkString(", 
")),
+      ctx)
+  }
+
   def duplicateTableColumnDescriptor(
       ctx: ParserRuleContext,
       columnName: String,
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index acb9b610dc09..e3538647e375 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -256,6 +256,7 @@ class Analyzer(override val catalogManager: CatalogManager) 
extends RuleExecutor
 
   override def batches: Seq[Batch] = Seq(
     Batch("Substitution", fixedPoint,
+      new SubstituteExecuteImmediate(catalogManager),
       // This rule optimizes `UpdateFields` expression chains so looks more 
like optimization rule.
       // However, when manipulating deeply nested schema, `UpdateFields` 
expression tree could be
       // very complex and make analysis impossible. Thus we need to optimize 
`UpdateFields` early
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/executeImmediate.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/executeImmediate.scala
new file mode 100644
index 000000000000..8fc373b71f25
--- /dev/null
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/executeImmediate.scala
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.analysis
+
+import scala.util.{Either, Left, Right}
+
+import org.apache.spark.sql.catalyst.expressions.{Alias, Expression, 
VariableReference}
+import org.apache.spark.sql.catalyst.parser.ParseException
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SetVariable}
+import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.catalyst.trees.TreePattern.{EXECUTE_IMMEDIATE, 
TreePattern}
+import org.apache.spark.sql.connector.catalog.CatalogManager
+import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.types.StringType
+
+/**
+ * Logical plan representing execute immediate query.
+ *
+ * @param args parameters of query
+ * @param query query string or variable
+ * @param targetVariables variables to store the result of the query
+ */
+case class ExecuteImmediateQuery(
+    args: Seq[Expression],
+    query: Either[String, UnresolvedAttribute],
+    targetVariables: Seq[UnresolvedAttribute])
+  extends UnresolvedLeafNode {
+  final override val nodePatterns: Seq[TreePattern] = Seq(EXECUTE_IMMEDIATE)
+}
+
+/**
+ * This rule substitutes execute immediate query node with plan that is passed 
as string literal
+ * or session parameter.
+ */
+class SubstituteExecuteImmediate(val catalogManager: CatalogManager)
+  extends Rule[LogicalPlan]
+  with ColumnResolutionHelper {
+
+  def resolveVariable(e: Expression): Expression = {
+
+    /**
+     * We know that the expression is either UnresolvedAttribute or Alias, as 
passed from the
+     * parser. If it is an UnresolvedAttribute, we look it up in the catalog 
and return it. If it
+     * is an Alias, we resolve the child and return an Alias with the same 
name.
+     */
+    e match {
+      case u: UnresolvedAttribute =>
+        getVariableReference(u, u.nameParts)
+      case a: Alias =>
+        Alias(resolveVariable(a.child), a.name)()
+      case other =>
+        throw QueryCompilationErrors.unsupportedParameterExpression(other)
+    }
+  }
+
+  def resolveArguments(expressions: Seq[Expression]): Seq[Expression] = {
+    expressions.map { exp =>
+      if (exp.resolved) {
+        exp
+      } else {
+        resolveVariable(exp)
+      }
+    }
+  }
+
+  def extractQueryString(either: Either[String, UnresolvedAttribute]): String 
= {
+    either match {
+      case Left(v) => v
+      case Right(u) =>
+        val varReference = getVariableReference(u, u.nameParts)
+
+        if (!varReference.dataType.sameType(StringType)) {
+          throw 
QueryCompilationErrors.invalidExecuteImmediateVariableType(varReference.dataType)
+        }
+
+        // Call eval with null value passed instead of a row.
+        // This is ok as this is variable and invoking eval should
+        // be independent of row value.
+        varReference.eval(null).toString
+    }
+  }
+
+  override def apply(plan: LogicalPlan): LogicalPlan =
+    plan.resolveOperatorsWithPruning(_.containsPattern(EXECUTE_IMMEDIATE), 
ruleId) {
+      case ExecuteImmediateQuery(expressions, query, targetVariables) =>
+        val queryString = extractQueryString(query)
+        val plan = parseStatement(queryString, targetVariables)
+
+        val posNodes = plan.collect { case p: LogicalPlan =>
+          p.expressions.flatMap(_.collect { case n: PosParameter => n })
+        }.flatten
+        val namedNodes = plan.collect { case p: LogicalPlan =>
+          p.expressions.flatMap(_.collect { case n: NamedParameter => n })
+        }.flatten
+
+        val queryPlan = if (expressions.isEmpty || (posNodes.isEmpty && 
namedNodes.isEmpty)) {
+          plan
+        } else if (posNodes.nonEmpty && namedNodes.nonEmpty) {
+          throw QueryCompilationErrors.invalidQueryMixedQueryParameters()
+        } else {
+          if (posNodes.nonEmpty) {
+            PosParameterizedQuery(
+              plan,
+              // We need to resolve arguments before Resolution batch to make 
sure
+              // that some rule does not accidentally resolve our parameters.
+              // We do not want this as they can resolve some unsupported 
parameters
+              resolveArguments(expressions))
+          } else {
+            val aliases = expressions.collect {
+              case (e: Alias) => e
+            }
+            val nonAliases = expressions.filter(!_.isInstanceOf[Alias])
+
+            if (nonAliases.nonEmpty) {
+              throw 
QueryCompilationErrors.invalidQueryAllParametersMustBeNamed(nonAliases)
+            }
+
+            NameParameterizedQuery(
+              plan,
+              aliases.map(_.name),
+              // We need to resolve arguments before Resolution batch to make 
sure
+              // that some rule does not accidentally resolve our parameters.
+              // We do not want this as they can resolve some unsupported 
parameters.
+              resolveArguments(aliases))
+          }
+        }
+
+        if (targetVariables.nonEmpty) {
+          SetVariable(targetVariables, queryPlan)
+        } else { queryPlan }
+    }
+
+  private def parseStatement(
+      queryString: String,
+      targetVariables: Seq[Expression]): LogicalPlan = {
+    // If targetVariables is defined, statement needs to be a query.
+    // Otherwise, it can be anything.
+    val plan = if (targetVariables.nonEmpty) {
+      try {
+        catalogManager.v1SessionCatalog.parser.parseQuery(queryString)
+      } catch {
+        case e: ParseException =>
+          // Since we do not have a way of telling that parseQuery failed 
because of
+          // actual parsing error or because statement was passed where query 
was expected,
+          // we need to make sure that parsePlan wouldn't throw
+          catalogManager.v1SessionCatalog.parser.parsePlan(queryString)
+
+          // Plan was successfully parsed, but query wasn't - throw.
+          throw 
QueryCompilationErrors.invalidStatementForExecuteInto(queryString)
+      }
+    } else {
+      catalogManager.v1SessionCatalog.parser.parsePlan(queryString)
+    }
+
+    // do not allow nested execute immediate
+    if (plan.containsPattern(EXECUTE_IMMEDIATE)) {
+      throw QueryCompilationErrors.nestedExecuteImmediate(queryString)
+    }
+
+    plan
+  }
+
+  private def getVariableReference(expr: Expression, nameParts: Seq[String]): 
VariableReference = {
+    lookupVariable(nameParts) match {
+      case Some(variable) => variable.copy(canFold = false)
+      case _ =>
+        throw QueryCompilationErrors
+          .unresolvedVariableError(
+            nameParts,
+            Seq(CatalogManager.SYSTEM_CATALOG_NAME, 
CatalogManager.SESSION_NAMESPACE),
+            expr.origin)
+    }
+  }
+}
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/parameters.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/parameters.scala
index 66da63f363f0..f1cc44b270bc 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/parameters.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/parameters.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.catalyst.analysis
 
 import org.apache.spark.SparkException
-import org.apache.spark.sql.catalyst.expressions.{CreateArray, CreateMap, 
CreateNamedStruct, Expression, LeafExpression, Literal, MapFromArrays, 
MapFromEntries, SubqueryExpression, Unevaluable}
+import org.apache.spark.sql.catalyst.expressions.{Alias, CreateArray, 
CreateMap, CreateNamedStruct, Expression, LeafExpression, Literal, 
MapFromArrays, MapFromEntries, SubqueryExpression, Unevaluable, 
VariableReference}
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.catalyst.trees.TreePattern.{PARAMETER, 
PARAMETERIZED_QUERY, TreePattern, UNRESOLVED_WITH}
@@ -113,7 +113,8 @@ object BindParameters extends Rule[LogicalPlan] with 
QueryErrorsBase {
   private def checkArgs(args: Iterable[(String, Expression)]): Unit = {
     def isNotAllowed(expr: Expression): Boolean = expr.exists {
       case _: Literal | _: CreateArray | _: CreateNamedStruct |
-        _: CreateMap | _: MapFromArrays |  _: MapFromEntries => false
+        _: CreateMap | _: MapFromArrays |  _: MapFromEntries | _: 
VariableReference => false
+      case a: Alias => isNotAllowed(a.child)
       case _ => true
     }
     args.find(arg => isNotAllowed(arg._2)).foreach { case (name, expr) =>
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 4016e3bfb067..d17d0a97387d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -66,7 +66,7 @@ class SessionCatalog(
     functionRegistry: FunctionRegistry,
     tableFunctionRegistry: TableFunctionRegistry,
     hadoopConf: Configuration,
-    parser: ParserInterface,
+    val parser: ParserInterface,
     functionResourceLoader: FunctionResourceLoader,
     functionExpressionBuilder: FunctionExpressionBuilder,
     cacheSize: Int = SQLConf.get.tableRelationCacheSize,
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 9a4e389fcf99..bb63d874aa1a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -22,6 +22,7 @@ import java.util.concurrent.TimeUnit
 
 import scala.collection.mutable.{ArrayBuffer, Set}
 import scala.jdk.CollectionConverters._
+import scala.util.{Left, Right}
 
 import org.antlr.v4.runtime.{ParserRuleContext, Token}
 import org.antlr.v4.runtime.misc.Interval
@@ -558,6 +559,64 @@ class AstBuilder extends DataTypeAstBuilder with 
SQLConfHelper with Logging {
   }
 
   /**
+   * Returns the parameters for [[ExecuteImmediateQuery]] logical plan.
+   * Expected format:
+   * {{{
+   *   EXECUTE IMMEDIATE {query_string|string_literal}
+   *   [INTO target1, target2] [USING param1, param2, ...]
+   * }}}
+   */
+  override def visitExecuteImmediate(ctx: ExecuteImmediateContext): 
LogicalPlan = withOrigin(ctx) {
+    // Because of how parsing rules are written, we know that either
+    // queryParam or targetVariable is non null - hence use Either to 
represent this.
+    val queryString = Option(ctx.queryParam.stringLit()).map(sl => 
Left(string(visitStringLit(sl))))
+    val queryVariable = Option(ctx.queryParam.multipartIdentifier)
+      .map(mpi => Right(UnresolvedAttribute(visitMultipartIdentifier(mpi))))
+
+    val targetVars = Option(ctx.targetVariable).toSeq
+      .flatMap(v => visitMultipartIdentifierList(v))
+    val exprs = Option(ctx.executeImmediateUsing).map {
+      visitExecuteImmediateUsing(_)
+    }.getOrElse{ Seq.empty }
+
+
+    ExecuteImmediateQuery(exprs, queryString.getOrElse(queryVariable.get), 
targetVars)
+  }
+
+  override def visitExecuteImmediateUsing(
+      ctx: ExecuteImmediateUsingContext): Seq[Expression] = withOrigin(ctx) {
+    val expressions = Option(ctx).toSeq
+      .flatMap(ctx => visitNamedExpressionSeq(ctx.params))
+    val resultExpr = expressions.map(e => e._1)
+
+    validateExecImmediateArguments(resultExpr, ctx)
+    resultExpr
+  }
+
+  /**
+   * Performs validation on the arguments to EXECUTE IMMEDIATE.
+   */
+  private def validateExecImmediateArguments(
+    expressions: Seq[Expression],
+    ctx : ExecuteImmediateUsingContext) : Unit = {
+    val duplicateAliases = expressions
+      .filter(_.isInstanceOf[Alias])
+      .groupBy {
+        case Alias(arg, name) => name
+      }.filter(group => group._2.size > 1)
+
+    if (duplicateAliases.nonEmpty) {
+      throw 
QueryParsingErrors.duplicateArgumentNamesError(duplicateAliases.keys.toSeq, ctx)
+    }
+  }
+
+  override def visitMultipartIdentifierList(
+      ctx: MultipartIdentifierListContext): Seq[UnresolvedAttribute] = 
withOrigin(ctx) {
+    ctx.multipartIdentifier.asScala.map(typedVisit[Seq[String]]).map(new 
UnresolvedAttribute(_))
+      .toSeq
+  }
+
+/**
    * Create a partition specification map.
    */
   override def visitPartitionSpec(
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleIdCollection.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleIdCollection.scala
index d71fab817df1..a6d03692646c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleIdCollection.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleIdCollection.scala
@@ -93,6 +93,7 @@ object RuleIdCollection {
       "org.apache.spark.sql.catalyst.analysis.ResolveOrderByAll" ::
       
"org.apache.spark.sql.catalyst.analysis.ResolveRowLevelCommandAssignments" ::
       "org.apache.spark.sql.catalyst.analysis.ResolveSetVariable" ::
+      "org.apache.spark.sql.catalyst.analysis.SubstituteExecuteImmediate" ::
       "org.apache.spark.sql.catalyst.analysis.ResolveTableSpec" ::
       "org.apache.spark.sql.catalyst.analysis.ResolveTimeZone" ::
       "org.apache.spark.sql.catalyst.analysis.ResolveUnion" ::
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreePatterns.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreePatterns.scala
index 1f0df8f3b8ab..fc869bce2772 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreePatterns.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreePatterns.scala
@@ -112,6 +112,7 @@ object TreePattern extends Enumeration  {
   val EVAL_PYTHON_UDTF: Value = Value
   val EVENT_TIME_WATERMARK: Value = Value
   val EXCEPT: Value = Value
+  val EXECUTE_IMMEDIATE: Value = Value
   val FILTER: Value = Value
   val INNER_LIKE_JOIN: Value = Value
   val JOIN: Value = Value
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index a2ce6cc16393..cdab854c004b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -764,6 +764,18 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
         "searchPath" -> toSQLId(searchPath)))
   }
 
+  def unresolvedVariableError(
+      name: Seq[String],
+      searchPath: Seq[String],
+      origin: Origin): Throwable = {
+    new AnalysisException(
+      errorClass = "UNRESOLVED_VARIABLE",
+      messageParameters = Map(
+        "variableName" -> toSQLId(name),
+        "searchPath" -> toSQLId(searchPath)),
+      origin = origin)
+  }
+
   def unresolvedRoutineError(name: FunctionIdentifier, searchPath: 
Seq[String]): Throwable = {
     new AnalysisException(
       errorClass = "UNRESOLVED_ROUTINE",
@@ -3874,4 +3886,42 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
       errorClass = "_LEGACY_ERROR_TEMP_2450",
       messageParameters = Map("invalidClass" -> invalidClass))
   }
+
+  def unsupportedParameterExpression(expr: Expression): Throwable = {
+    new AnalysisException(
+      errorClass = "UNSUPPORTED_EXPR_FOR_PARAMETER",
+      messageParameters = Map(
+        "invalidExprSql" -> toSQLExpr(expr)),
+      origin = expr.origin)
+  }
+
+  def invalidQueryAllParametersMustBeNamed(expr: Seq[Expression]): Throwable = 
{
+    new AnalysisException(
+      errorClass = "ALL_PARAMETERS_MUST_BE_NAMED",
+      messageParameters = Map("exprs" -> expr.map(e => 
toSQLExpr(e)).mkString(", ")))
+  }
+
+  def invalidQueryMixedQueryParameters(): Throwable = {
+    throw new AnalysisException(
+      errorClass = "INVALID_QUERY_MIXED_QUERY_PARAMETERS",
+      messageParameters = Map.empty)
+  }
+
+  def invalidExecuteImmediateVariableType(dataType: DataType): Throwable = {
+    throw new AnalysisException(
+      errorClass = "INVALID_VARIABLE_TYPE_FOR_QUERY_EXECUTE_IMMEDIATE",
+      messageParameters = Map("varType" -> toSQLType(dataType)))
+  }
+
+  def invalidStatementForExecuteInto(queryString: String): Throwable = {
+    throw new AnalysisException(
+      errorClass = "INVALID_STATEMENT_FOR_EXECUTE_INTO",
+      messageParameters = Map("sqlString" -> toSQLStmt(queryString)))
+  }
+
+  def nestedExecuteImmediate(queryString: String): Throwable = {
+    throw new AnalysisException(
+      errorClass = "NESTED_EXECUTE_IMMEDIATE",
+      messageParameters = Map("sqlString" -> toSQLStmt(queryString)))
+  }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
index ac263230f127..0bcda7dd5590 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
@@ -835,6 +835,76 @@ class AnalysisErrorSuite extends AnalysisTest with 
DataTypeErrorsBase {
        "filter (where nth_value(e, 2) over(order by b) > 1) FROM TaBlE2"),
     "FILTER expression contains window function" :: Nil)
 
+  errorClassTest(
+    "EXEC IMMEDIATE - nested execute immediate not allowed",
+    CatalystSqlParser.parsePlan("EXECUTE IMMEDIATE 'EXECUTE IMMEDIATE 
\\\'SELECT 42\\\''"),
+    "NESTED_EXECUTE_IMMEDIATE",
+    Map(
+      "sqlString" -> "EXECUTE IMMEDIATE 'SELECT 42'"))
+
+  errorClassTest(
+    "EXEC IMMEDIATE - both positional and named used",
+    CatalystSqlParser.parsePlan("EXECUTE IMMEDIATE 'SELECT 42 where ? = 
:first'" +
+      " USING 1, 2 as first"),
+    "INVALID_QUERY_MIXED_QUERY_PARAMETERS",
+    Map.empty)
+
+  test("EXEC IMMEDIATE - non string variable as sqlString parameter") {
+    var execImmediatePlan = ExecuteImmediateQuery(
+      Seq.empty,
+      scala.util.Right(UnresolvedAttribute("testVarA")),
+      Seq(UnresolvedAttribute("testVarA")))
+
+    assertAnalysisErrorClass(
+      inputPlan = execImmediatePlan,
+      expectedErrorClass = "INVALID_VARIABLE_TYPE_FOR_QUERY_EXECUTE_IMMEDIATE",
+      expectedMessageParameters = Map(
+        "varType" -> "\"INT\""
+      ))
+  }
+
+  test("EXEC IMMEDIATE - Unsupported expr for parameter") {
+    var execImmediatePlan: LogicalPlan = ExecuteImmediateQuery(
+      Seq(UnresolvedAttribute("testVarA"), NaNvl(Literal(1), Literal(1))),
+      scala.util.Left("SELECT ?"),
+      Seq.empty)
+
+    assertAnalysisErrorClass(
+      inputPlan = execImmediatePlan,
+      expectedErrorClass = "UNSUPPORTED_EXPR_FOR_PARAMETER",
+      expectedMessageParameters = Map(
+        "invalidExprSql" -> "\"nanvl(1, 1)\""
+      ))
+  }
+
+  test("EXEC IMMEDIATE - Name Parametrize query with non named parameters") {
+    var execImmediateSetVariablePlan = ExecuteImmediateQuery(
+      Seq(Literal(2), new Alias(UnresolvedAttribute("testVarA"), "first")(), 
Literal(3)),
+      scala.util.Left("SELECT :first"),
+      Seq.empty)
+
+    assertAnalysisErrorClass(
+      inputPlan = execImmediateSetVariablePlan,
+      expectedErrorClass = "ALL_PARAMETERS_MUST_BE_NAMED",
+      expectedMessageParameters = Map(
+        "exprs" -> "\"2\", \"3\""
+      ))
+  }
+
+  test("EXEC IMMEDIATE - INTO specified for COMMAND query") {
+    var execImmediateSetVariablePlan = ExecuteImmediateQuery(
+      Seq.empty,
+      scala.util.Left("SET VAR testVarA = 1"),
+      Seq(UnresolvedAttribute("testVarA")))
+
+    assertAnalysisErrorClass(
+      inputPlan = execImmediateSetVariablePlan,
+      expectedErrorClass = "INVALID_STATEMENT_FOR_EXECUTE_INTO",
+      expectedMessageParameters = Map(
+        "sqlString" -> "SET VAR TESTVARA = 1"
+      ))
+  }
+
   test("SPARK-6452 regression test") {
     // CheckAnalysis should throw AnalysisException when Aggregate contains 
missing attribute(s)
     // Since we manually construct the logical plan at here and Sum only accept
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 40ce1a3133cd..1dce98907326 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -1465,6 +1465,30 @@ class AnalysisSuite extends AnalysisTest with Matchers {
     assertAnalysisSuccess(finalPlan)
   }
 
+  test("Execute Immediate plan transformation") {
+    try {
+    SimpleAnalyzer.catalogManager.tempVariableManager.create(
+      "res", "1", Literal(1), overrideIfExists = true)
+    SimpleAnalyzer.catalogManager.tempVariableManager.create(
+      "res2", "1", Literal(1), overrideIfExists = true)
+    val actual1 = parsePlan("EXECUTE IMMEDIATE 'SELECT 42 WHERE ? = 1' USING 
2").analyze
+    val expected1 = parsePlan("SELECT 42 where 2 = 1").analyze
+    comparePlans(actual1, expected1)
+    val actual2 = parsePlan(
+      "EXECUTE IMMEDIATE 'SELECT 42 WHERE :first = 1' USING 2 as 
first").analyze
+    val expected2 = parsePlan("SELECT 42 where 2 = 1").analyze
+    comparePlans(actual2, expected2)
+    // Test that plan is transformed to SET operation
+    val actual3 = parsePlan(
+      "EXECUTE IMMEDIATE 'SELECT 17, 7 WHERE ? = 1' INTO res, res2 USING 
2").analyze
+    val expected3 = parsePlan("SET var (res, res2) = (SELECT 17, 7 where 2 = 
1)").analyze
+      comparePlans(actual3, expected3)
+    } finally {
+      SimpleAnalyzer.catalogManager.tempVariableManager.remove("res")
+      SimpleAnalyzer.catalogManager.tempVariableManager.remove("res2")
+    }
+  }
+
   test("SPARK-41271: bind named parameters to literals") {
     CTERelationDef.curId.set(0)
     val actual1 = NameParameterizedQuery(
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
index 2ee434704d6c..cfb40cffcdae 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
@@ -25,6 +25,7 @@ import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.{QueryPlanningTracker, TableIdentifier}
 import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, 
CatalogStorageFormat, CatalogTable, CatalogTableType, InMemoryCatalog, 
SessionCatalog, TemporaryViewRelation}
 import 
org.apache.spark.sql.catalyst.catalog.CatalogTable.VIEW_STORING_ANALYZED_PLAN
+import org.apache.spark.sql.catalyst.expressions.Literal
 import org.apache.spark.sql.catalyst.optimizer.InlineCTE
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -85,6 +86,8 @@ trait AnalysisTest extends PlanTest {
     createTempView(catalog, "streamingTable", TestRelations.streamingRelation,
       overrideIfExists = true)
     new Analyzer(catalog) {
+      catalogManager.tempVariableManager.create(
+        "testVarA", "1", Literal(1), overrideIfExists = true)
       override val extendedResolutionRules = extendedAnalysisRules
     }
   }
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/execute-immediate.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/execute-immediate.sql.out
new file mode 100644
index 000000000000..21930ebe6f0b
--- /dev/null
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/execute-immediate.sql.out
@@ -0,0 +1,782 @@
+-- Automatically generated by SQLQueryTestSuite
+-- !query
+CREATE TEMPORARY VIEW tbl_view AS SELECT * FROM VALUES
+  (10, 'name1', named_struct('f1', 1, 's2', named_struct('f2', 101, 'f3', 
'a'))),
+  (20, 'name2', named_struct('f1', 2, 's2', named_struct('f2', 202, 'f3', 
'b'))),
+  (30, 'name3', named_struct('f1', 3, 's2', named_struct('f2', 303, 'f3', 
'c'))),
+  (40, 'name4', named_struct('f1', 4, 's2', named_struct('f2', 404, 'f3', 
'd'))),
+  (50, 'name5', named_struct('f1', 5, 's2', named_struct('f2', 505, 'f3', 
'e'))),
+  (60, 'name6', named_struct('f1', 6, 's2', named_struct('f2', 606, 'f3', 
'f'))),
+  (70, 'name7', named_struct('f1', 7, 's2', named_struct('f2', 707, 'f3', 
'g')))
+AS tbl_view(id, name, data)
+-- !query analysis
+CreateViewCommand `tbl_view`, SELECT * FROM VALUES
+  (10, 'name1', named_struct('f1', 1, 's2', named_struct('f2', 101, 'f3', 
'a'))),
+  (20, 'name2', named_struct('f1', 2, 's2', named_struct('f2', 202, 'f3', 
'b'))),
+  (30, 'name3', named_struct('f1', 3, 's2', named_struct('f2', 303, 'f3', 
'c'))),
+  (40, 'name4', named_struct('f1', 4, 's2', named_struct('f2', 404, 'f3', 
'd'))),
+  (50, 'name5', named_struct('f1', 5, 's2', named_struct('f2', 505, 'f3', 
'e'))),
+  (60, 'name6', named_struct('f1', 6, 's2', named_struct('f2', 606, 'f3', 
'f'))),
+  (70, 'name7', named_struct('f1', 7, 's2', named_struct('f2', 707, 'f3', 
'g')))
+AS tbl_view(id, name, data), false, false, LocalTempView, true
+   +- Project [id#x, name#x, data#x]
+      +- SubqueryAlias tbl_view
+         +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+CREATE TABLE x (id INT) USING csv
+-- !query analysis
+CreateDataSourceTableCommand `spark_catalog`.`default`.`x`, false
+
+
+-- !query
+DECLARE sql_string STRING
+-- !query analysis
+CreateVariable defaultvalueexpression(null, null), false
++- ResolvedIdentifier 
org.apache.spark.sql.catalyst.analysis.FakeSystemCatalog$@xxxxxxxx, 
session.sql_string
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = \'name1\''
+-- !query analysis
+SetVariable [variablereference(system.session.sql_string=CAST(NULL AS STRING))]
++- Project [SELECT * from tbl_view where name = 'name1' AS sql_string#x]
+   +- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE 'SET spark.sql.ansi.enabled=true'
+-- !query analysis
+SetCommand (spark.sql.ansi.enabled,Some(true))
+
+
+-- !query
+EXECUTE IMMEDIATE 'CREATE TEMPORARY VIEW IDENTIFIER(:tblName) AS SELECT id, 
name FROM tbl_view' USING 'tbl_view_tmp' as tblName
+-- !query analysis
+CreateViewCommand `tbl_view_tmp`, SELECT id, name FROM tbl_view, false, false, 
LocalTempView, true
+   +- Project [id#x, name#x]
+      +- SubqueryAlias tbl_view
+         +- View (`tbl_view`, [id#x,name#x,data#x])
+            +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+               +- Project [id#x, name#x, data#x]
+                  +- SubqueryAlias tbl_view
+                     +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view_tmp'
+-- !query analysis
+Project [id#x, name#x]
++- SubqueryAlias tbl_view_tmp
+   +- View (`tbl_view_tmp`, [id#x,name#x])
+      +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS name#x]
+         +- Project [id#x, name#x]
+            +- SubqueryAlias tbl_view
+               +- View (`tbl_view`, [id#x,name#x,data#x])
+                  +- Project [cast(id#x as int) AS id#x, cast(name#x as 
string) AS name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) 
AS data#x]
+                     +- Project [id#x, name#x, data#x]
+                        +- SubqueryAlias tbl_view
+                           +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'REFRESH TABLE IDENTIFIER(:tblName)' USING 'x' as tblName
+-- !query analysis
+RefreshTableCommand `spark_catalog`.`default`.`x`
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter (name#x = name1)
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = \'name1\''
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter (name#x = name1)
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = ? or name = ?'
+-- !query analysis
+SetVariable [variablereference(system.session.sql_string='SELECT * from 
tbl_view where name = \'name1\'')]
++- Project [SELECT * from tbl_view where name = ? or name = ? AS sql_string#x]
+   +- OneRowRelation
+
+
+-- !query
+DECLARE a STRING
+-- !query analysis
+CreateVariable defaultvalueexpression(null, null), false
++- ResolvedIdentifier 
org.apache.spark.sql.catalyst.analysis.FakeSystemCatalog$@xxxxxxxx, session.a
+
+
+-- !query
+SET VAR a = 'name1'
+-- !query analysis
+SetVariable [variablereference(system.session.a=CAST(NULL AS STRING))]
++- Project [name1 AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string USING 'name1', 'name3'
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((name#x = name1) OR (name#x = name3))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string USING a, 'name2'
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((name#x = variablereference(system.session.a='name1')) OR (name#x = 
name2))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = ? or name = ?' USING 
'name1', 'name3'
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((name#x = name1) OR (name#x = name3))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = ? or name = ?' USING a, 
'name2'
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((name#x = variablereference(system.session.a='name1')) OR (name#x = 
name2))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = ? or name = ?' USING 
(a, 'name2')
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((name#x = variablereference(system.session.a='name1')) OR (name#x = 
name2))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'INSERT INTO x VALUES(?)' USING 1
+-- !query analysis
+InsertIntoHadoopFsRelationCommand file:[not included in 
comparison]/{warehouse_dir}/x, false, CSV, [path=file:[not included in 
comparison]/{warehouse_dir}/x], Append, `spark_catalog`.`default`.`x`, 
org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included 
in comparison]/{warehouse_dir}/x), [id]
++- Project [cast(col1#x as int) AS id#x]
+   +- LocalRelation [col1#x]
+
+
+-- !query
+SELECT * from x
+-- !query analysis
+Project [id#x]
++- SubqueryAlias spark_catalog.default.x
+   +- Relation spark_catalog.default.x[id#x] csv
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = :first or id = 
:second'
+-- !query analysis
+SetVariable [variablereference(system.session.sql_string='SELECT * from 
tbl_view where name = ? or name = ?')]
++- Project [SELECT * from tbl_view where name = :first or id = :second AS 
sql_string#x]
+   +- OneRowRelation
+
+
+-- !query
+DECLARE b INT
+-- !query analysis
+CreateVariable defaultvalueexpression(null, null), false
++- ResolvedIdentifier 
org.apache.spark.sql.catalyst.analysis.FakeSystemCatalog$@xxxxxxxx, session.b
+
+
+-- !query
+SET VAR b = 40
+-- !query analysis
+SetVariable [variablereference(system.session.b=CAST(NULL AS INT))]
++- Project [40 AS b#x]
+   +- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string USING 40 as second, 'name7' as first
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((name#x = name7) OR (id#x = 40))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string USING b as second, 'name7' as first
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((name#x = name7) OR (id#x = variablereference(system.session.b=40)))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first or id = :second' 
USING 40 as second, 'name7' as first
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((name#x = name7) OR (id#x = 40))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first or id = :second' 
USING 'name7' as first, b as second
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((name#x = name7) OR (id#x = variablereference(system.session.b=40)))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT tbl_view.*, :first as p FROM tbl_view WHERE name = 
:first' USING 'name7' as first
+-- !query analysis
+Project [id#x, name#x, data#x, name7 AS p#x]
++- Filter (name#x = name7)
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SET VAR sql_string = ?' USING 'SELECT id from tbl_view 
where name = :first'
+-- !query analysis
+SetVariable [variablereference(system.session.sql_string='SELECT * from 
tbl_view where name = :first or id = :second')]
++- Project [SELECT id from tbl_view where name = :first AS sql_string#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT sql_string
+-- !query analysis
+Project [variablereference(system.session.sql_string='SELECT id from tbl_view 
where name = :first') AS sql_string#x]
++- OneRowRelation
+
+
+-- !query
+DECLARE res_id INT
+-- !query analysis
+CreateVariable defaultvalueexpression(null, null), false
++- ResolvedIdentifier 
org.apache.spark.sql.catalyst.analysis.FakeSystemCatalog$@xxxxxxxx, 
session.res_id
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string INTO res_id USING 'name7' as first
+-- !query analysis
+SetVariable [variablereference(system.session.res_id=CAST(NULL AS INT))]
++- GlobalLimit 2
+   +- LocalLimit 2
+      +- Project [id#x]
+         +- Filter (name#x = name7)
+            +- SubqueryAlias tbl_view
+               +- View (`tbl_view`, [id#x,name#x,data#x])
+                  +- Project [cast(id#x as int) AS id#x, cast(name#x as 
string) AS name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) 
AS data#x]
+                     +- Project [id#x, name#x, data#x]
+                        +- SubqueryAlias tbl_view
+                           +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+SELECT res_id
+-- !query analysis
+Project [variablereference(system.session.res_id=70) AS res_id#x]
++- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string INTO res_id USING a as first
+-- !query analysis
+SetVariable [variablereference(system.session.res_id=70)]
++- GlobalLimit 2
+   +- LocalLimit 2
+      +- Project [id#x]
+         +- Filter (name#x = variablereference(system.session.a='name1'))
+            +- SubqueryAlias tbl_view
+               +- View (`tbl_view`, [id#x,name#x,data#x])
+                  +- Project [cast(id#x as int) AS id#x, cast(name#x as 
string) AS name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) 
AS data#x]
+                     +- Project [id#x, name#x, data#x]
+                        +- SubqueryAlias tbl_view
+                           +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+SELECT res_id
+-- !query analysis
+Project [variablereference(system.session.res_id=10) AS res_id#x]
++- OneRowRelation
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = :first or id = 
:second'
+-- !query analysis
+SetVariable [variablereference(system.session.sql_string='SELECT id from 
tbl_view where name = :first')]
++- Project [SELECT * from tbl_view where name = :first or id = :second AS 
sql_string#x]
+   +- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT 42' INTO res_id
+-- !query analysis
+SetVariable [variablereference(system.session.res_id=10)]
++- Project [42 AS 42#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT res_id
+-- !query analysis
+Project [variablereference(system.session.res_id=42) AS res_id#x]
++- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = ?' INTO b, a USING 
10
+-- !query analysis
+SetVariable [variablereference(system.session.b=40), 
variablereference(system.session.a='name1')]
++- GlobalLimit 2
+   +- LocalLimit 2
+      +- Project [id#x, name#x]
+         +- Filter (id#x = 10)
+            +- SubqueryAlias tbl_view
+               +- View (`tbl_view`, [id#x,name#x,data#x])
+                  +- Project [cast(id#x as int) AS id#x, cast(name#x as 
string) AS name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) 
AS data#x]
+                     +- Project [id#x, name#x, data#x]
+                        +- SubqueryAlias tbl_view
+                           +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+SELECT b, a
+-- !query analysis
+Project [variablereference(system.session.b=10) AS b#x, 
variablereference(system.session.a='name1') AS a#x]
++- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view where id = ? AND name = ?' USING b 
as first, a
+-- !query analysis
+Project [id#x, name#x, data#x]
++- Filter ((id#x = variablereference(system.session.b=10)) AND (name#x = 
variablereference(system.session.a='name1')))
+   +- SubqueryAlias tbl_view
+      +- View (`tbl_view`, [id#x,name#x,data#x])
+         +- Project [cast(id#x as int) AS id#x, cast(name#x as string) AS 
name#x, cast(data#x as struct<f1:int,s2:struct<f2:int,f3:string>>) AS data#x]
+            +- Project [id#x, name#x, data#x]
+               +- SubqueryAlias tbl_view
+                  +- LocalRelation [id#x, name#x, data#x]
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT 42 WHERE 2 = 1' INTO res_id
+-- !query analysis
+SetVariable [variablereference(system.session.res_id=42)]
++- Project [42 AS 42#x]
+   +- Filter (2 = 1)
+      +- OneRowRelation
+
+
+-- !query
+SELECT res_id
+-- !query analysis
+Project [variablereference(system.session.res_id=CAST(NULL AS INT)) AS 
res_id#x]
++- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT \'1707\'' INTO res_id
+-- !query analysis
+SetVariable [variablereference(system.session.res_id=CAST(NULL AS INT))]
++- Project [cast(1707#x as int) AS res_id#x]
+   +- Project [1707 AS 1707#x]
+      +- OneRowRelation
+
+
+-- !query
+SELECT res_id
+-- !query analysis
+Project [variablereference(system.session.res_id=1707) AS res_id#x]
++- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT \'invalid_cast_error_expected\'' INTO res_id
+-- !query analysis
+org.apache.spark.SparkNumberFormatException
+{
+  "errorClass" : "CAST_INVALID_INPUT",
+  "sqlState" : "22018",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "expression" : "'invalid_cast_error_expected'",
+    "sourceType" : "\"STRING\"",
+    "targetType" : "\"INT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 70,
+    "fragment" : "EXECUTE IMMEDIATE 'SELECT \\'invalid_cast_error_expected\\'' 
INTO res_id"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'INSERT INTO x VALUES (?)' INTO res_id USING 1
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "INVALID_STATEMENT_FOR_EXECUTE_INTO",
+  "sqlState" : "07501",
+  "messageParameters" : {
+    "sqlString" : "INSERT INTO X VALUES (?)"
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view WHERE ? = id' USING id
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "UNRESOLVED_VARIABLE",
+  "sqlState" : "42883",
+  "messageParameters" : {
+    "searchPath" : "`system`.`session`",
+    "variableName" : "`id`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 63,
+    "stopIndex" : 64,
+    "fragment" : "id"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view where ? = id and :first = name' 
USING 1, 'name2' as first
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "INVALID_QUERY_MIXED_QUERY_PARAMETERS",
+  "sqlState" : "42613"
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view where :first = name' USING 1, 
'name2' as first
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "ALL_PARAMETERS_MUST_BE_NAMED",
+  "sqlState" : "07001",
+  "messageParameters" : {
+    "exprs" : "\"1\""
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELCT Fa'
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "PARSE_SYNTAX_ERROR",
+  "sqlState" : "42601",
+  "messageParameters" : {
+    "error" : "'SELCT'",
+    "hint" : ""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 28,
+    "fragment" : "EXECUTE IMMEDIATE 'SELCT Fa'"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELCT Fa' INTO res_id
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "PARSE_SYNTAX_ERROR",
+  "sqlState" : "42601",
+  "messageParameters" : {
+    "error" : "'SELCT'",
+    "hint" : ""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 40,
+    "fragment" : "EXECUTE IMMEDIATE 'SELCT Fa' INTO res_id"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE b
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "INVALID_VARIABLE_TYPE_FOR_QUERY_EXECUTE_IMMEDIATE",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "varType" : "\"INT\""
+  }
+}
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = :first or id = 
:second'
+-- !query analysis
+SetVariable [variablereference(system.session.sql_string='SELECT * from 
tbl_view where name = :first or id = :second')]
++- Project [SELECT * from tbl_view where name = :first or id = :second AS 
sql_string#x]
+   +- OneRowRelation
+
+
+-- !query
+SET VAR a = 'na'
+-- !query analysis
+SetVariable [variablereference(system.session.a='name1')]
++- Project [na AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first' USING CONCAT(a 
, "me1") as first
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "UNSUPPORTED_EXPR_FOR_PARAMETER",
+  "sqlState" : "42K0E",
+  "messageParameters" : {
+    "invalidExprSql" : "\"CONCAT(a, me1)\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 70,
+    "stopIndex" : 86,
+    "fragment" : "CONCAT(a , \"me1\")"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first' USING (SELECT 
42) as first, 'name2' as second
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "UNSUPPORTED_EXPR_FOR_PARAMETER",
+  "sqlState" : "42K0E",
+  "messageParameters" : {
+    "invalidExprSql" : "\"scalarsubquery()\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 70,
+    "stopIndex" : 80,
+    "fragment" : "(SELECT 42)"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = ?' INTO a, b USING 
10
+-- !query analysis
+org.apache.spark.SparkNumberFormatException
+{
+  "errorClass" : "CAST_INVALID_INPUT",
+  "sqlState" : "22018",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "expression" : "'name1'",
+    "sourceType" : "\"STRING\"",
+    "targetType" : "\"INT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 81,
+    "fragment" : "EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = 
?' INTO a, b USING 10"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = ?' INTO (a, b) 
USING 10
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "PARSE_SYNTAX_ERROR",
+  "sqlState" : "42601",
+  "messageParameters" : {
+    "error" : "'('",
+    "hint" : ""
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id FROM tbl_view' INTO res_id
+-- !query analysis
+org.apache.spark.SparkException
+{
+  "errorClass" : "ROW_SUBQUERY_TOO_MANY_ROWS",
+  "sqlState" : "21000"
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, data.f1 FROM tbl_view' INTO res_id
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "ASSIGNMENT_ARITY_MISMATCH",
+  "sqlState" : "42802",
+  "messageParameters" : {
+    "numExpr" : "2",
+    "numTarget" : "1"
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id FROM tbl_view' INTO res_id, b
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "ASSIGNMENT_ARITY_MISMATCH",
+  "sqlState" : "42802",
+  "messageParameters" : {
+    "numExpr" : "1",
+    "numTarget" : "2"
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id FROM tbl_view WHERE id = :first' USING 10 as 
first, 20 as first
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES",
+  "sqlState" : "42701",
+  "messageParameters" : {
+    "aliases" : "`first`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 63,
+    "stopIndex" : 92,
+    "fragment" : "USING 10 as first, 20 as first"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, data.f1 FROM tbl_view WHERE id = 10' INTO 
res_id, res_id
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "DUPLICATE_ASSIGNMENTS",
+  "sqlState" : "42701",
+  "messageParameters" : {
+    "nameList" : "`res_id`"
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'EXECUTE IMMEDIATE \'SELECT id FROM tbl_view WHERE id = ? 
USING 10\''
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NESTED_EXECUTE_IMMEDIATE",
+  "sqlState" : "07501",
+  "messageParameters" : {
+    "sqlString" : "EXECUTE IMMEDIATE 'SELECT ID FROM TBL_VIEW WHERE ID = ? 
USING 10'"
+  }
+}
+
+
+-- !query
+DROP TABLE x
+-- !query analysis
+DropTable false, false
++- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.x
diff --git a/sql/core/src/test/resources/sql-tests/inputs/execute-immediate.sql 
b/sql/core/src/test/resources/sql-tests/inputs/execute-immediate.sql
new file mode 100644
index 000000000000..421d7dafcaaf
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/inputs/execute-immediate.sql
@@ -0,0 +1,138 @@
+-- Automatically generated by SQLQueryTestSuite
+-- !query
+CREATE TEMPORARY VIEW tbl_view AS SELECT * FROM VALUES
+  (10, 'name1', named_struct('f1', 1, 's2', named_struct('f2', 101, 'f3', 
'a'))),
+  (20, 'name2', named_struct('f1', 2, 's2', named_struct('f2', 202, 'f3', 
'b'))),
+  (30, 'name3', named_struct('f1', 3, 's2', named_struct('f2', 303, 'f3', 
'c'))),
+  (40, 'name4', named_struct('f1', 4, 's2', named_struct('f2', 404, 'f3', 
'd'))),
+  (50, 'name5', named_struct('f1', 5, 's2', named_struct('f2', 505, 'f3', 
'e'))),
+  (60, 'name6', named_struct('f1', 6, 's2', named_struct('f2', 606, 'f3', 
'f'))),
+  (70, 'name7', named_struct('f1', 7, 's2', named_struct('f2', 707, 'f3', 
'g')))
+AS tbl_view(id, name, data);
+CREATE TABLE x (id INT) USING csv;
+
+DECLARE sql_string STRING;
+SET VAR sql_string = 'SELECT * from tbl_view where name = \'name1\'';
+
+-- test commands
+EXECUTE IMMEDIATE 'SET spark.sql.ansi.enabled=true';
+EXECUTE IMMEDIATE 'CREATE TEMPORARY VIEW IDENTIFIER(:tblName) AS SELECT id, 
name FROM tbl_view' USING 'tbl_view_tmp' as tblName;
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view_tmp';
+
+EXECUTE IMMEDIATE 'REFRESH TABLE IDENTIFIER(:tblName)' USING 'x' as tblName;
+
+-- test execute immediate without parameters
+EXECUTE IMMEDIATE sql_string;
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = \'name1\'';
+
+-- test positional paramete
+SET VAR sql_string = 'SELECT * from tbl_view where name = ? or name = ?';
+DECLARE a STRING;
+SET VAR a = 'name1';
+EXECUTE IMMEDIATE sql_string USING 'name1', 'name3';
+EXECUTE IMMEDIATE sql_string USING a, 'name2';
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = ? or name = ?' USING 
'name1', 'name3';
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = ? or name = ?' USING a, 
'name2';
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = ? or name = ?' USING 
(a, 'name2');
+-- test positonal command
+EXECUTE IMMEDIATE 'INSERT INTO x VALUES(?)' USING 1;
+SELECT * from x;
+
+-- test named parameters
+SET VAR sql_string = 'SELECT * from tbl_view where name = :first or id = 
:second';
+DECLARE b INT;
+SET VAR b = 40;
+EXECUTE IMMEDIATE sql_string USING 40 as second, 'name7' as first;
+EXECUTE IMMEDIATE sql_string USING b as second, 'name7' as first;
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first or id = :second' 
USING 40 as second, 'name7' as first;
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first or id = :second' 
USING 'name7' as first, b as second;
+-- named parameter used multiple times
+EXECUTE IMMEDIATE 'SELECT tbl_view.*, :first as p FROM tbl_view WHERE name = 
:first' USING 'name7' as first;
+
+-- test named command and setup for next test
+EXECUTE IMMEDIATE 'SET VAR sql_string = ?' USING 'SELECT id from tbl_view 
where name = :first';
+SELECT sql_string;
+
+-- test into
+DECLARE res_id INT;
+EXECUTE IMMEDIATE sql_string INTO res_id USING 'name7' as first;
+SELECT res_id;
+EXECUTE IMMEDIATE sql_string INTO res_id USING a as first;
+SELECT res_id;
+
+-- test into without using
+SET VAR sql_string = 'SELECT * from tbl_view where name = :first or id = 
:second';
+EXECUTE IMMEDIATE 'SELECT 42' INTO res_id;
+SELECT res_id;
+
+-- multiple INTOs
+EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = ?' INTO b, a USING 
10;
+SELECT b, a;
+
+-- use AS for using positional params
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view where id = ? AND name = ?' USING b 
as first, a;
+
+-- empty query
+EXECUTE IMMEDIATE 'SELECT 42 WHERE 2 = 1' INTO res_id;
+SELECT res_id;
+
+-- implicit casting
+EXECUTE IMMEDIATE 'SELECT \'1707\'' INTO res_id;
+SELECT res_id;
+
+-- test errors
+-- string to int error
+EXECUTE IMMEDIATE 'SELECT \'invalid_cast_error_expected\'' INTO res_id;
+
+-- require query when using INTO
+EXECUTE IMMEDIATE 'INSERT INTO x VALUES (?)' INTO res_id USING 1;
+
+-- use column in using - should fail as we expect variable here
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view WHERE ? = id' USING id;
+
+-- either positional or named parameters must be used
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view where ? = id and :first = name' 
USING 1, 'name2' as first;
+
+-- all paramerers must be named
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view where :first = name' USING 1, 
'name2' as first;
+
+-- internal syntax error
+EXECUTE IMMEDIATE 'SELCT Fa';
+
+-- internal syntax error - test that both parseQuery and parsePlan fail
+EXECUTE IMMEDIATE 'SELCT Fa' INTO res_id;
+
+-- Parameter passed must be STRING
+EXECUTE IMMEDIATE b;
+
+-- test expressions should fail with parser error
+SET VAR sql_string = 'SELECT * from tbl_view where name = :first or id = 
:second';
+SET VAR a = 'na';
+
+-- expressions not supported - feature not supported
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first' USING CONCAT(a 
, "me1") as first;
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first' USING (SELECT 
42) as first, 'name2' as second;
+
+-- INTO variables not matching scalar types
+EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = ?' INTO a, b USING 
10;
+
+-- INTO does not support braces - parser error
+EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = ?' INTO (a, b) 
USING 10;
+
+-- Error too many rows
+EXECUTE IMMEDIATE 'SELECT id FROM tbl_view' INTO res_id;
+
+-- Error mismatch cardinality
+EXECUTE IMMEDIATE 'SELECT id, data.f1 FROM tbl_view' INTO res_id;
+EXECUTE IMMEDIATE 'SELECT id FROM tbl_view' INTO res_id, b;
+
+-- duplicate aliases
+EXECUTE IMMEDIATE 'SELECT id FROM tbl_view WHERE id = :first' USING 10 as 
first, 20 as first;
+
+-- duplicate into entry
+EXECUTE IMMEDIATE 'SELECT id, data.f1 FROM tbl_view WHERE id = 10' INTO 
res_id, res_id;
+
+-- nested execute immediate
+EXECUTE IMMEDIATE 'EXECUTE IMMEDIATE \'SELECT id FROM tbl_view WHERE id = ? 
USING 10\'';
+
+DROP TABLE x;
\ No newline at end of file
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
index 10fcee146939..87f9d262ec70 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
@@ -99,6 +99,7 @@ ESCAPED       false
 EXCEPT true
 EXCHANGE       false
 EXCLUDE        false
+EXECUTE        true
 EXISTS false
 EXPLAIN        false
 EXPORT false
@@ -133,6 +134,7 @@ IDENTIFIER  false
 IF     false
 IGNORE false
 ILIKE  false
+IMMEDIATE      false
 IMPORT false
 IN     true
 INCLUDE        false
@@ -366,6 +368,7 @@ ELSE
 END
 ESCAPE
 EXCEPT
+EXECUTE
 FALSE
 FETCH
 FILTER
diff --git 
a/sql/core/src/test/resources/sql-tests/results/execute-immediate.sql.out 
b/sql/core/src/test/resources/sql-tests/results/execute-immediate.sql.out
new file mode 100644
index 000000000000..8e4dcb5161d5
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/execute-immediate.sql.out
@@ -0,0 +1,738 @@
+-- Automatically generated by SQLQueryTestSuite
+-- !query
+CREATE TEMPORARY VIEW tbl_view AS SELECT * FROM VALUES
+  (10, 'name1', named_struct('f1', 1, 's2', named_struct('f2', 101, 'f3', 
'a'))),
+  (20, 'name2', named_struct('f1', 2, 's2', named_struct('f2', 202, 'f3', 
'b'))),
+  (30, 'name3', named_struct('f1', 3, 's2', named_struct('f2', 303, 'f3', 
'c'))),
+  (40, 'name4', named_struct('f1', 4, 's2', named_struct('f2', 404, 'f3', 
'd'))),
+  (50, 'name5', named_struct('f1', 5, 's2', named_struct('f2', 505, 'f3', 
'e'))),
+  (60, 'name6', named_struct('f1', 6, 's2', named_struct('f2', 606, 'f3', 
'f'))),
+  (70, 'name7', named_struct('f1', 7, 's2', named_struct('f2', 707, 'f3', 
'g')))
+AS tbl_view(id, name, data)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE TABLE x (id INT) USING csv
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+DECLARE sql_string STRING
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = \'name1\''
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+EXECUTE IMMEDIATE 'SET spark.sql.ansi.enabled=true'
+-- !query schema
+struct<key:string,value:string>
+-- !query output
+spark.sql.ansi.enabled true
+
+
+-- !query
+EXECUTE IMMEDIATE 'CREATE TEMPORARY VIEW IDENTIFIER(:tblName) AS SELECT id, 
name FROM tbl_view' USING 'tbl_view_tmp' as tblName
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view_tmp'
+-- !query schema
+struct<id:int,name:string>
+-- !query output
+10     name1
+20     name2
+30     name3
+40     name4
+50     name5
+60     name6
+70     name7
+
+
+-- !query
+EXECUTE IMMEDIATE 'REFRESH TABLE IDENTIFIER(:tblName)' USING 'x' as tblName
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+10     name1   {"f1":1,"s2":{"f2":101,"f3":"a"}}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = \'name1\''
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+10     name1   {"f1":1,"s2":{"f2":101,"f3":"a"}}
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = ? or name = ?'
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+DECLARE a STRING
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SET VAR a = 'name1'
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string USING 'name1', 'name3'
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+10     name1   {"f1":1,"s2":{"f2":101,"f3":"a"}}
+30     name3   {"f1":3,"s2":{"f2":303,"f3":"c"}}
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string USING a, 'name2'
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+10     name1   {"f1":1,"s2":{"f2":101,"f3":"a"}}
+20     name2   {"f1":2,"s2":{"f2":202,"f3":"b"}}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = ? or name = ?' USING 
'name1', 'name3'
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+10     name1   {"f1":1,"s2":{"f2":101,"f3":"a"}}
+30     name3   {"f1":3,"s2":{"f2":303,"f3":"c"}}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = ? or name = ?' USING a, 
'name2'
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+10     name1   {"f1":1,"s2":{"f2":101,"f3":"a"}}
+20     name2   {"f1":2,"s2":{"f2":202,"f3":"b"}}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = ? or name = ?' USING 
(a, 'name2')
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+10     name1   {"f1":1,"s2":{"f2":101,"f3":"a"}}
+20     name2   {"f1":2,"s2":{"f2":202,"f3":"b"}}
+
+
+-- !query
+EXECUTE IMMEDIATE 'INSERT INTO x VALUES(?)' USING 1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT * from x
+-- !query schema
+struct<id:int>
+-- !query output
+1
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = :first or id = 
:second'
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+DECLARE b INT
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SET VAR b = 40
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string USING 40 as second, 'name7' as first
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+40     name4   {"f1":4,"s2":{"f2":404,"f3":"d"}}
+70     name7   {"f1":7,"s2":{"f2":707,"f3":"g"}}
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string USING b as second, 'name7' as first
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+40     name4   {"f1":4,"s2":{"f2":404,"f3":"d"}}
+70     name7   {"f1":7,"s2":{"f2":707,"f3":"g"}}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first or id = :second' 
USING 40 as second, 'name7' as first
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+40     name4   {"f1":4,"s2":{"f2":404,"f3":"d"}}
+70     name7   {"f1":7,"s2":{"f2":707,"f3":"g"}}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first or id = :second' 
USING 'name7' as first, b as second
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+40     name4   {"f1":4,"s2":{"f2":404,"f3":"d"}}
+70     name7   {"f1":7,"s2":{"f2":707,"f3":"g"}}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT tbl_view.*, :first as p FROM tbl_view WHERE name = 
:first' USING 'name7' as first
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>,p:string>
+-- !query output
+70     name7   {"f1":7,"s2":{"f2":707,"f3":"g"}}       name7
+
+
+-- !query
+EXECUTE IMMEDIATE 'SET VAR sql_string = ?' USING 'SELECT id from tbl_view 
where name = :first'
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT sql_string
+-- !query schema
+struct<sql_string:string>
+-- !query output
+SELECT id from tbl_view where name = :first
+
+
+-- !query
+DECLARE res_id INT
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string INTO res_id USING 'name7' as first
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT res_id
+-- !query schema
+struct<res_id:int>
+-- !query output
+70
+
+
+-- !query
+EXECUTE IMMEDIATE sql_string INTO res_id USING a as first
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT res_id
+-- !query schema
+struct<res_id:int>
+-- !query output
+10
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = :first or id = 
:second'
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT 42' INTO res_id
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT res_id
+-- !query schema
+struct<res_id:int>
+-- !query output
+42
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = ?' INTO b, a USING 
10
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT b, a
+-- !query schema
+struct<b:int,a:string>
+-- !query output
+10     name1
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view where id = ? AND name = ?' USING b 
as first, a
+-- !query schema
+struct<id:int,name:string,data:struct<f1:int,s2:struct<f2:int,f3:string>>>
+-- !query output
+10     name1   {"f1":1,"s2":{"f2":101,"f3":"a"}}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT 42 WHERE 2 = 1' INTO res_id
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT res_id
+-- !query schema
+struct<res_id:int>
+-- !query output
+NULL
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT \'1707\'' INTO res_id
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT res_id
+-- !query schema
+struct<res_id:int>
+-- !query output
+1707
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT \'invalid_cast_error_expected\'' INTO res_id
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+{
+  "errorClass" : "CAST_INVALID_INPUT",
+  "sqlState" : "22018",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "expression" : "'invalid_cast_error_expected'",
+    "sourceType" : "\"STRING\"",
+    "targetType" : "\"INT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 70,
+    "fragment" : "EXECUTE IMMEDIATE 'SELECT \\'invalid_cast_error_expected\\'' 
INTO res_id"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'INSERT INTO x VALUES (?)' INTO res_id USING 1
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "INVALID_STATEMENT_FOR_EXECUTE_INTO",
+  "sqlState" : "07501",
+  "messageParameters" : {
+    "sqlString" : "INSERT INTO X VALUES (?)"
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view WHERE ? = id' USING id
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "UNRESOLVED_VARIABLE",
+  "sqlState" : "42883",
+  "messageParameters" : {
+    "searchPath" : "`system`.`session`",
+    "variableName" : "`id`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 63,
+    "stopIndex" : 64,
+    "fragment" : "id"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view where ? = id and :first = name' 
USING 1, 'name2' as first
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "INVALID_QUERY_MIXED_QUERY_PARAMETERS",
+  "sqlState" : "42613"
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * FROM tbl_view where :first = name' USING 1, 
'name2' as first
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "ALL_PARAMETERS_MUST_BE_NAMED",
+  "sqlState" : "07001",
+  "messageParameters" : {
+    "exprs" : "\"1\""
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELCT Fa'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "PARSE_SYNTAX_ERROR",
+  "sqlState" : "42601",
+  "messageParameters" : {
+    "error" : "'SELCT'",
+    "hint" : ""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 28,
+    "fragment" : "EXECUTE IMMEDIATE 'SELCT Fa'"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELCT Fa' INTO res_id
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "PARSE_SYNTAX_ERROR",
+  "sqlState" : "42601",
+  "messageParameters" : {
+    "error" : "'SELCT'",
+    "hint" : ""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 40,
+    "fragment" : "EXECUTE IMMEDIATE 'SELCT Fa' INTO res_id"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE b
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "INVALID_VARIABLE_TYPE_FOR_QUERY_EXECUTE_IMMEDIATE",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "varType" : "\"INT\""
+  }
+}
+
+
+-- !query
+SET VAR sql_string = 'SELECT * from tbl_view where name = :first or id = 
:second'
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SET VAR a = 'na'
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first' USING CONCAT(a 
, "me1") as first
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "UNSUPPORTED_EXPR_FOR_PARAMETER",
+  "sqlState" : "42K0E",
+  "messageParameters" : {
+    "invalidExprSql" : "\"CONCAT(a, me1)\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 70,
+    "stopIndex" : 86,
+    "fragment" : "CONCAT(a , \"me1\")"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT * from tbl_view where name = :first' USING (SELECT 
42) as first, 'name2' as second
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "UNSUPPORTED_EXPR_FOR_PARAMETER",
+  "sqlState" : "42K0E",
+  "messageParameters" : {
+    "invalidExprSql" : "\"scalarsubquery()\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 70,
+    "stopIndex" : 80,
+    "fragment" : "(SELECT 42)"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = ?' INTO a, b USING 
10
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+{
+  "errorClass" : "CAST_INVALID_INPUT",
+  "sqlState" : "22018",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "expression" : "'name1'",
+    "sourceType" : "\"STRING\"",
+    "targetType" : "\"INT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 81,
+    "fragment" : "EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = 
?' INTO a, b USING 10"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, name FROM tbl_view WHERE id = ?' INTO (a, b) 
USING 10
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "PARSE_SYNTAX_ERROR",
+  "sqlState" : "42601",
+  "messageParameters" : {
+    "error" : "'('",
+    "hint" : ""
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id FROM tbl_view' INTO res_id
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkException
+{
+  "errorClass" : "ROW_SUBQUERY_TOO_MANY_ROWS",
+  "sqlState" : "21000"
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, data.f1 FROM tbl_view' INTO res_id
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "ASSIGNMENT_ARITY_MISMATCH",
+  "sqlState" : "42802",
+  "messageParameters" : {
+    "numExpr" : "2",
+    "numTarget" : "1"
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id FROM tbl_view' INTO res_id, b
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "ASSIGNMENT_ARITY_MISMATCH",
+  "sqlState" : "42802",
+  "messageParameters" : {
+    "numExpr" : "1",
+    "numTarget" : "2"
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id FROM tbl_view WHERE id = :first' USING 10 as 
first, 20 as first
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES",
+  "sqlState" : "42701",
+  "messageParameters" : {
+    "aliases" : "`first`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 63,
+    "stopIndex" : 92,
+    "fragment" : "USING 10 as first, 20 as first"
+  } ]
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'SELECT id, data.f1 FROM tbl_view WHERE id = 10' INTO 
res_id, res_id
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "DUPLICATE_ASSIGNMENTS",
+  "sqlState" : "42701",
+  "messageParameters" : {
+    "nameList" : "`res_id`"
+  }
+}
+
+
+-- !query
+EXECUTE IMMEDIATE 'EXECUTE IMMEDIATE \'SELECT id FROM tbl_view WHERE id = ? 
USING 10\''
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NESTED_EXECUTE_IMMEDIATE",
+  "sqlState" : "07501",
+  "messageParameters" : {
+    "sqlString" : "EXECUTE IMMEDIATE 'SELECT ID FROM TBL_VIEW WHERE ID = ? 
USING 10'"
+  }
+}
+
+
+-- !query
+DROP TABLE x
+-- !query schema
+struct<>
+-- !query output
+
diff --git a/sql/core/src/test/resources/sql-tests/results/keywords.sql.out 
b/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
index be2303a716da..fda2d6ead598 100644
--- a/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
@@ -99,6 +99,7 @@ ESCAPED       false
 EXCEPT false
 EXCHANGE       false
 EXCLUDE        false
+EXECUTE        false
 EXISTS false
 EXPLAIN        false
 EXPORT false
@@ -133,6 +134,7 @@ IDENTIFIER  false
 IF     false
 IGNORE false
 ILIKE  false
+IMMEDIATE      false
 IMPORT false
 IN     false
 INCLUDE        false
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
index 97d66095f643..d381dae6ea29 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
@@ -32,6 +32,33 @@ class QueryParsingErrorsSuite extends QueryTest with 
SharedSparkSession with SQL
     intercept[ParseException](sql(sqlText).collect())
   }
 
+  test("EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES: duplicate aliases provided 
in using statement") {
+    val query = "EXECUTE IMMEDIATE 'SELECT 1707 WHERE ? = 1' USING 1 as first" 
+
+      ", 2 as first, 3 as second, 4 as second, 5 as third"
+    checkError(
+      exception = parseException(query),
+      errorClass = "EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES",
+      parameters = Map("aliases" -> "`second`, `first`"),
+      context = ExpectedContext(
+        "USING 1 as first, 2 as first, 3 as second, 4 as second, 5 as third",
+        start = 44,
+        stop = 109)
+    )
+  }
+
+  test("PARSE_SYNTAX_ERROR: Execute immediate syntax error with INTO 
specified") {
+    val query = "EXECUTE IMMEDIATE 'SELCT 1707 WHERE ? = 1' INTO a USING 1"
+    checkError(
+      exception = parseException(query),
+      errorClass = "PARSE_SYNTAX_ERROR",
+      parameters = Map("error" -> "'SELCT'", "hint" -> ""),
+      context = ExpectedContext(
+        start = 0,
+        stop = 56,
+        fragment = query)
+    )
+  }
+
   test("NAMED_PARAMETER_SUPPORT_DISABLED: named arguments not turned on") {
     withSQLConf("spark.sql.allowNamedFunctionArguments" -> "false") {
       checkError(
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
index 9bb35bb8719e..800ac69fcfab 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
@@ -214,7 +214,7 @@ trait ThriftServerWithSparkContextSuite extends 
SharedThriftServer {
       val sessionHandle = client.openSession(user, "")
       val infoValue = client.getInfo(sessionHandle, 
GetInfoType.CLI_ODBC_KEYWORDS)
       // scalastyle:off line.size.limit
-      assert(infoValue.getStringValue == 
"ADD,AFTER,ALL,ALTER,ALWAYS,ANALYZE,AND,ANTI,ANY,ANY_VALUE,ARCHIVE,ARRAY,AS,ASC,AT,AUTHORIZATION,BETWEEN,BIGINT,BINARY,BOOLEAN,BOTH,BUCKET,BUCKETS,BY,BYTE,CACHE,CASCADE,CASE,CAST,CATALOG,CATALOGS,CHANGE,CHAR,CHARACTER,CHECK,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATE,COLLECTION,COLUMN,COLUMNS,COMMENT,COMMIT,COMPACT,COMPACTIONS,COMPUTE,CONCATENATE,CONSTRAINT,COST,CREATE,CROSS,CUBE,CURRENT,CURRENT_DATE,CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_USER,DATA,DA
 [...]
+      assert(infoValue.getStringValue == 
"ADD,AFTER,ALL,ALTER,ALWAYS,ANALYZE,AND,ANTI,ANY,ANY_VALUE,ARCHIVE,ARRAY,AS,ASC,AT,AUTHORIZATION,BETWEEN,BIGINT,BINARY,BOOLEAN,BOTH,BUCKET,BUCKETS,BY,BYTE,CACHE,CASCADE,CASE,CAST,CATALOG,CATALOGS,CHANGE,CHAR,CHARACTER,CHECK,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATE,COLLECTION,COLUMN,COLUMNS,COMMENT,COMMIT,COMPACT,COMPACTIONS,COMPUTE,CONCATENATE,CONSTRAINT,COST,CREATE,CROSS,CUBE,CURRENT,CURRENT_DATE,CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_USER,DATA,DA
 [...]
       // scalastyle:on line.size.limit
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to