imback82 commented on a change in pull request #28840:
URL: https://github.com/apache/spark/pull/28840#discussion_r444602830
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
##########
@@ -236,6 +236,59 @@ case class ShowFunctionsCommand(
}
}
+
+/**
+ * A command for users to refresh the persistent function.
+ * The syntax of using this command in SQL is:
+ * {{{
+ * REFRESH FUNCTION functionName
+ * }}}
+ */
+case class RefreshFunctionCommand(
+ databaseName: Option[String],
+ functionName: String)
+ extends RunnableCommand {
+
+ override def run(sparkSession: SparkSession): Seq[Row] = {
+ val catalog = sparkSession.sessionState.catalog
+ if
(FunctionRegistry.builtin.functionExists(FunctionIdentifier(functionName))) {
+ throw new AnalysisException(s"Cannot refresh builtin function
$functionName")
+ }
+ if (catalog.isTemporaryFunction(FunctionIdentifier(functionName,
databaseName))) {
+ throw new AnalysisException(s"Cannot refresh temporary function
$functionName")
+ }
+
+ // we only refresh the permanent function.
+ // there are 4 cases:
+ // 1. registry exists externalCatalog exists
+ // 2. registry exists externalCatalog not exists
+ // 3. registry not exists externalCatalog exists
+ // 4. registry not exists externalCatalog not exists
+ val identifier = FunctionIdentifier(
+ functionName, Some(databaseName.getOrElse(catalog.getCurrentDatabase)))
+ val isRegisteredFunction = catalog.isRegisteredFunction(identifier)
+ val isPersistentFunction = catalog.isPersistentFunction(identifier)
+ if (isRegisteredFunction && isPersistentFunction) {
+ // re-register function
+ catalog.unregisterFunction(identifier)
+ val func = catalog.getFunctionMetadata(identifier)
+ catalog.registerFunction(func, true)
+ } else if (isRegisteredFunction && !isPersistentFunction) {
+ // unregister function and throw NoSuchFunctionException
+ catalog.unregisterFunction(identifier)
+ throw new NoSuchFunctionException(identifier.database.get, functionName)
Review comment:
It's a bit weird that you unregister then throw an exception. Maybe we
should throw an exception without unregistering and say this is not in valid
state?
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
##########
@@ -236,6 +236,59 @@ case class ShowFunctionsCommand(
}
}
+
+/**
+ * A command for users to refresh the persistent function.
+ * The syntax of using this command in SQL is:
+ * {{{
+ * REFRESH FUNCTION functionName
+ * }}}
+ */
+case class RefreshFunctionCommand(
+ databaseName: Option[String],
+ functionName: String)
+ extends RunnableCommand {
+
+ override def run(sparkSession: SparkSession): Seq[Row] = {
+ val catalog = sparkSession.sessionState.catalog
+ if
(FunctionRegistry.builtin.functionExists(FunctionIdentifier(functionName))) {
+ throw new AnalysisException(s"Cannot refresh builtin function
$functionName")
+ }
+ if (catalog.isTemporaryFunction(FunctionIdentifier(functionName,
databaseName))) {
+ throw new AnalysisException(s"Cannot refresh temporary function
$functionName")
+ }
+
+ // we only refresh the permanent function.
+ // there are 4 cases:
+ // 1. registry exists externalCatalog exists
+ // 2. registry exists externalCatalog not exists
+ // 3. registry not exists externalCatalog exists
+ // 4. registry not exists externalCatalog not exists
+ val identifier = FunctionIdentifier(
+ functionName, Some(databaseName.getOrElse(catalog.getCurrentDatabase)))
+ val isRegisteredFunction = catalog.isRegisteredFunction(identifier)
+ val isPersistentFunction = catalog.isPersistentFunction(identifier)
+ if (isRegisteredFunction && isPersistentFunction) {
+ // re-register function
+ catalog.unregisterFunction(identifier)
+ val func = catalog.getFunctionMetadata(identifier)
+ catalog.registerFunction(func, true)
+ } else if (isRegisteredFunction && !isPersistentFunction) {
+ // unregister function and throw NoSuchFunctionException
+ catalog.unregisterFunction(identifier)
+ throw new NoSuchFunctionException(identifier.database.get, functionName)
+ } else if (!isRegisteredFunction && isPersistentFunction) {
+ // register function
+ val func = catalog.getFunctionMetadata(identifier)
+ catalog.registerFunction(func, true)
Review comment:
should this condition be a no op? or do we proactively register the
function?
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -1885,11 +1885,17 @@ class Analyzer(
}
/**
+ * Replaces [[UnresolvedFunc]]s with concrete [[LogicalPlan]]s.
* Replaces [[UnresolvedFunction]]s with concrete [[Expression]]s.
*/
object ResolveFunctions extends Rule[LogicalPlan] {
val trimWarningEnabled = new AtomicBoolean(true)
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
+ case UnresolvedFunc(multipartIdent) =>
+ val funcIdent = parseSessionCatalogFunctionIdentifier(multipartIdent,
s"${plan.nodeName}")
Review comment:
`s"${plan.nodeName}"` -> `plan.nodeName`
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]