amaliujia commented on code in PR #37287:
URL: https://github.com/apache/spark/pull/37287#discussion_r930279421
##########
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##########
@@ -404,82 +354,69 @@ class CatalogImpl(sparkSession: SparkSession) extends
Catalog {
* table/view. This throws an `AnalysisException` when no `Table` can be
found.
*/
override def getTable(tableName: String): Table = {
- // calling `sqlParser.parseTableIdentifier` to parse tableName. If it
contains only table name
- // and optionally contains a database name(thus a TableIdentifier), then
we look up the table in
- // sessionCatalog. Otherwise we try `sqlParser.parseMultipartIdentifier`
to have a sequence of
- // string as the qualified identifier and resolve the table through SQL
analyzer.
- try {
- val ident =
sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
- if (tableExists(ident.database.orNull, ident.table)) {
- makeTable(ident)
- } else {
- getTable3LNamespace(tableName)
- }
- } catch {
- case e: org.apache.spark.sql.catalyst.parser.ParseException =>
- getTable3LNamespace(tableName)
+ val parsed =
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(tableName)
+ // For backward compatibility (Spark 3.3 and prior), we should check if
the table exists in
+ // the Hive Metastore first.
+ val nameParts = if (parsed.length <= 2 &&
!sessionCatalog.isTempView(parsed) &&
+ sessionCatalog.tableExists(parsed.asTableIdentifier)) {
+ CatalogManager.SESSION_CATALOG_NAME +: parsed
+ } else {
+ parsed
}
- }
-
- private def getTable3LNamespace(tableName: String): Table = {
- val ident =
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(tableName)
- makeTable(ident)
+ makeTable(nameParts)
}
/**
* Gets the table or view with the specified name in the specified database.
This throws an
* `AnalysisException` when no `Table` can be found.
*/
override def getTable(dbName: String, tableName: String): Table = {
- if (tableExists(dbName, tableName)) {
- makeTable(TableIdentifier(tableName, Option(dbName)))
- } else {
- throw
QueryCompilationErrors.tableOrViewNotFoundInDatabaseError(tableName, dbName)
- }
+ // For backward compatibility (Spark 3.3 and prior), here we always look
up the table from the
+ // Hive Metastore.
+ makeTable(Seq(CatalogManager.SESSION_CATALOG_NAME, dbName, tableName))
}
/**
* Gets the function with the specified name. This function can be a
temporary function or a
* function. This throws an `AnalysisException` when no `Function` can be
found.
*/
override def getFunction(functionName: String): Function = {
- // calling `sqlParser.parseFunctionIdentifier` to parse functionName. If
it contains only
- // function name and optionally contains a database name(thus a
FunctionIdentifier), then
- // we look up the function in sessionCatalog.
- // Otherwise we try `sqlParser.parseMultipartIdentifier` to have a
sequence of string as
- // the qualified identifier and resolve the function through SQL analyzer.
- try {
- val ident =
sparkSession.sessionState.sqlParser.parseFunctionIdentifier(functionName)
- getFunction(ident.database.orNull, ident.funcName)
- } catch {
- case e: org.apache.spark.sql.catalyst.parser.ParseException =>
- val ident =
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(functionName)
- makeFunction(ident)
+ val parsed =
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(functionName)
+ // For backward compatibility (Spark 3.3 and prior), we should check if
the function exists in
+ // the Hive Metastore first.
+ val nameParts = if (parsed.length <= 2 &&
+ !sessionCatalog.isTemporaryFunction(parsed.asFunctionIdentifier) &&
+ sessionCatalog.functionExists(parsed.asFunctionIdentifier)) {
+ CatalogManager.SESSION_CATALOG_NAME +: parsed
+ } else {
+ parsed
}
+ makeFunction(nameParts)
}
/**
* Gets the function with the specified name. This returns `None` when no
`Function` can be
* found.
*/
override def getFunction(dbName: String, functionName: String): Function = {
- makeFunction(FunctionIdentifier(functionName, Option(dbName)))
+ // For backward compatibility (Spark 3.3 and prior), here we always look
up the function from
+ // the Hive Metastore.
+ makeFunction(Seq(CatalogManager.SESSION_CATALOG_NAME, dbName,
functionName))
Review Comment:
This really depends on if SQL analyzer respect current catalog when
resolving Unresolvedfunc.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]