This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 3eaf5b5f98e [SPARK-41720][SQL] Rename UnresolvedFunc to
UnresolvedFunctionName
3eaf5b5f98e is described below
commit 3eaf5b5f98ef222780967faff524e907644bcbdd
Author: Wenchen Fan <[email protected]>
AuthorDate: Tue Dec 27 08:10:27 2022 +0900
[SPARK-41720][SQL] Rename UnresolvedFunc to UnresolvedFunctionName
### What changes were proposed in this pull request?
It's a bit confusing to have both `UnresolvedFunc` and
`UnresolvedFunction`. This PR renames `UnresolvedFunc` to
`UnresolvedFunctionName`, and also improves the classdoc a bit.
### Why are the changes needed?
avoid confusing names
### Does this PR introduce _any_ user-facing change?
no
### How was this patch tested?
existing tests
Closes #39222 from cloud-fan/name.
Authored-by: Wenchen Fan <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
---
.../scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala | 4 ++--
.../org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala | 2 +-
.../spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala | 2 +-
.../scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala | 5 +++++
.../apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala | 5 +++--
.../scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala | 4 ++--
.../org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala | 8 ++++----
.../scala/org/apache/spark/sql/execution/SparkSqlParser.scala | 4 ++--
.../main/scala/org/apache/spark/sql/internal/CatalogImpl.scala | 2 +-
.../org/apache/spark/sql/execution/command/DDLParserSuite.scala | 6 +++---
10 files changed, 24 insertions(+), 18 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 7eb911b559b..9dd66492786 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -2353,7 +2353,7 @@ class Analyzer(override val catalogManager:
CatalogManager)
}
/**
- * Replaces [[UnresolvedFunc]]s with concrete [[LogicalPlan]]s.
+ * Replaces [[UnresolvedFunctionName]]s with concrete [[LogicalPlan]]s.
* Replaces [[UnresolvedFunction]]s with concrete [[Expression]]s.
* Replaces [[UnresolvedGenerator]]s with concrete [[Expression]]s.
* Replaces [[UnresolvedTableValuedFunction]]s with concrete
[[LogicalPlan]]s.
@@ -2365,7 +2365,7 @@ class Analyzer(override val catalogManager:
CatalogManager)
_.containsAnyPattern(UNRESOLVED_FUNC, UNRESOLVED_FUNCTION, GENERATOR,
UNRESOLVED_TABLE_VALUED_FUNCTION), ruleId) {
// Resolve functions with concrete relations from v2 catalog.
- case u @ UnresolvedFunc(nameParts, cmd, requirePersistentFunc,
mismatchHint, _) =>
+ case u @ UnresolvedFunctionName(nameParts, cmd, requirePersistentFunc,
mismatchHint, _) =>
lookupBuiltinOrTempFunction(nameParts)
.orElse(lookupBuiltinOrTempTableFunction(nameParts)).map { info =>
if (requirePersistentFunc) {
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index c5663d72ca4..8309186d566 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -169,7 +169,7 @@ trait CheckAnalysis extends PredicateHelper with
LookupCatalog with QueryErrorsB
case u: UnresolvedRelation =>
u.tableNotFound(u.multipartIdentifier)
- case u: UnresolvedFunc =>
+ case u: UnresolvedFunctionName =>
val catalogPath = (currentCatalog.name +:
catalogManager.currentNamespace).mkString(".")
throw QueryCompilationErrors.unresolvedRoutineError(
u.multipartIdentifier,
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala
index 7a2bd1ccc15..65c23c3d3b2 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala
@@ -31,7 +31,7 @@ object ResolveCommandsWithIfExists extends Rule[LogicalPlan] {
_.containsPattern(COMMAND)) {
case UncacheTable(u: UnresolvedRelation, ifExists, _) if ifExists =>
NoopCommand("UNCACHE TABLE", u.multipartIdentifier)
- case DropFunction(u: UnresolvedFunc, ifExists) if ifExists =>
+ case DropFunction(u: UnresolvedFunctionName, ifExists) if ifExists =>
NoopCommand("DROP FUNCTION", u.multipartIdentifier)
}
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
index 4b24c0002ff..687bf4f775e 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
@@ -269,6 +269,11 @@ case class UnresolvedGenerator(name: FunctionIdentifier,
children: Seq[Expressio
newChildren: IndexedSeq[Expression]): UnresolvedGenerator = copy(children
= newChildren)
}
+/**
+ * Represents an unresolved function that is being invoked. The analyzer will
resolve the function
+ * arguments first, then look up the function by name and arguments, and
return an expression that
+ * can be evaluated to get the result of this function invocation.
+ */
case class UnresolvedFunction(
nameParts: Seq[String],
arguments: Seq[Expression],
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala
index 321eecf42b0..e6be5c23955 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala
@@ -119,9 +119,10 @@ case class UnresolvedFieldPosition(position:
ColumnPosition) extends FieldPositi
/**
* Holds the name of a function that has yet to be looked up. It will be
resolved to
- * [[ResolvedPersistentFunc]] or [[ResolvedNonPersistentFunc]] during analysis.
+ * [[ResolvedPersistentFunc]] or [[ResolvedNonPersistentFunc]] during analysis
of function-related
+ * commands such as `DESCRIBE FUNCTION name`.
*/
-case class UnresolvedFunc(
+case class UnresolvedFunctionName(
multipartIdentifier: Seq[String],
commandName: String,
requirePersistent: Boolean,
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index ea752a420d5..6ea3972c961 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -4709,7 +4709,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef]
with SQLConfHelper wit
Seq(describeFuncName.getText)
}
DescribeFunction(
- UnresolvedFunc(
+ UnresolvedFunctionName(
functionName,
"DESCRIBE FUNCTION",
requirePersistent = false,
@@ -4747,7 +4747,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef]
with SQLConfHelper wit
override def visitRefreshFunction(ctx: RefreshFunctionContext): LogicalPlan
= withOrigin(ctx) {
val functionIdentifier = visitMultipartIdentifier(ctx.multipartIdentifier)
- RefreshFunction(UnresolvedFunc(
+ RefreshFunction(UnresolvedFunctionName(
functionIdentifier,
"REFRESH FUNCTION",
requirePersistent = true,
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index 3e63cc24be2..0efbd75ad93 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -2360,8 +2360,8 @@ class DDLParserSuite extends AnalysisTest {
}
test("DESCRIBE FUNCTION") {
- def createFuncPlan(name: Seq[String]): UnresolvedFunc = {
- UnresolvedFunc(name, "DESCRIBE FUNCTION", false, None)
+ def createFuncPlan(name: Seq[String]): UnresolvedFunctionName = {
+ UnresolvedFunctionName(name, "DESCRIBE FUNCTION", false, None)
}
comparePlans(
parsePlan("DESC FUNCTION a"),
@@ -2378,8 +2378,8 @@ class DDLParserSuite extends AnalysisTest {
}
test("REFRESH FUNCTION") {
- def createFuncPlan(name: Seq[String]): UnresolvedFunc = {
- UnresolvedFunc(name, "REFRESH FUNCTION", true, None)
+ def createFuncPlan(name: Seq[String]): UnresolvedFunctionName = {
+ UnresolvedFunctionName(name, "REFRESH FUNCTION", true, None)
}
parseCompare("REFRESH FUNCTION c",
RefreshFunction(createFuncPlan(Seq("c"))))
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index f551aa9efbf..ad0599775de 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -27,7 +27,7 @@ import org.antlr.v4.runtime.{ParserRuleContext, Token}
import org.antlr.v4.runtime.tree.TerminalNode
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
-import org.apache.spark.sql.catalyst.analysis.{GlobalTempView, LocalTempView,
PersistedView, UnresolvedFunc, UnresolvedIdentifier}
+import org.apache.spark.sql.catalyst.analysis.{GlobalTempView, LocalTempView,
PersistedView, UnresolvedFunctionName, UnresolvedIdentifier}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.parser._
@@ -604,7 +604,7 @@ class SparkSqlAstBuilder extends AstBuilder {
} else {
val hintStr = "Please use fully qualified identifier to drop the
persistent function."
DropFunction(
- UnresolvedFunc(
+ UnresolvedFunctionName(
functionName,
"DROP FUNCTION",
requirePersistent = true,
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
index 81f7f8a8e73..79b0084da23 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
@@ -246,7 +246,7 @@ class CatalogImpl(sparkSession: SparkSession) extends
Catalog {
}
private def makeFunction(ident: Seq[String]): Function = {
- val plan = UnresolvedFunc(ident, "Catalog.makeFunction", false, None)
+ val plan = UnresolvedFunctionName(ident, "Catalog.makeFunction", false,
None)
sparkSession.sessionState.executePlan(plan).analyzed match {
case f: ResolvedPersistentFunc =>
val className = f.func match {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
index 786ad75303f..c291cc85aae 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
@@ -18,7 +18,7 @@
package org.apache.spark.sql.execution.command
import org.apache.spark.SparkThrowable
-import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView,
LocalTempView, UnresolvedAttribute, UnresolvedFunc, UnresolvedIdentifier}
+import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView,
LocalTempView, UnresolvedAttribute, UnresolvedFunctionName,
UnresolvedIdentifier}
import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, FileResource,
FunctionResource, JarResource}
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans
@@ -695,8 +695,8 @@ class DDLParserSuite extends AnalysisTest with
SharedSparkSession {
}
test("DROP FUNCTION") {
- def createFuncPlan(name: Seq[String]): UnresolvedFunc = {
- UnresolvedFunc(name, "DROP FUNCTION", true,
+ def createFuncPlan(name: Seq[String]): UnresolvedFunctionName = {
+ UnresolvedFunctionName(name, "DROP FUNCTION", true,
Some("Please use fully qualified identifier to drop the persistent
function."))
}
comparePlans(
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]