Repository: spark Updated Branches: refs/heads/branch-1.5 51fc152b7 -> cf36cdba2
[SPARK-11191][SQL][FOLLOW-UP] Cleans up unnecessary anonymous HiveFunctionRegistry According to discussion in PR #9664, the anonymous `HiveFunctionRegistry` in `HiveContext` can be removed now. Author: Cheng Lian <[email protected]> Closes #9737 from liancheng/spark-11191.follow-up. (cherry picked from commit fa13301ae440c4c9594280f236bcca11b62fdd29) Signed-off-by: Cheng Lian <[email protected]> Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/cf36cdba Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/cf36cdba Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/cf36cdba Branch: refs/heads/branch-1.5 Commit: cf36cdba2079d1768086d999180c4eb9eb09b7a9 Parents: 51fc152 Author: Cheng Lian <[email protected]> Authored: Tue Nov 17 18:11:08 2015 +0800 Committer: Cheng Lian <[email protected]> Committed: Tue Nov 17 18:16:31 2015 +0800 ---------------------------------------------------------------------- .../main/scala/org/apache/spark/sql/hive/HiveContext.scala | 2 +- .../src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/cf36cdba/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala ---------------------------------------------------------------------- diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala index b3ba444..e611152 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala @@ -412,7 +412,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) with Logging { // Note that HiveUDFs will be overridden by functions registered in this context. @transient override protected[sql] lazy val functionRegistry: FunctionRegistry = - new HiveFunctionRegistry(FunctionRegistry.builtin, this) + new HiveFunctionRegistry(FunctionRegistry.builtin, this.executionHive) /* An analyzer that uses the Hive metastore. */ @transient http://git-wip-us.apache.org/repos/asf/spark/blob/cf36cdba/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala ---------------------------------------------------------------------- diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala index a510df6..5fe3076 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala @@ -41,16 +41,19 @@ import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.hive.HiveShim._ +import org.apache.spark.sql.hive.client.ClientWrapper import org.apache.spark.sql.types._ private[hive] class HiveFunctionRegistry( underlying: analysis.FunctionRegistry, - hiveContext: HiveContext) + executionHive: ClientWrapper) extends analysis.FunctionRegistry with HiveInspectors { def getFunctionInfo(name: String): FunctionInfo = { - hiveContext.executionHive.withHiveState { + // Hive Registry need current database to lookup function + // TODO: the current database of executionHive should be consistent with metadataHive + executionHive.withHiveState { FunctionRegistry.getFunctionInfo(name) } } --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
