Github user yaooqinn commented on a diff in the pull request:
https://github.com/apache/spark/pull/19068#discussion_r138557772
--- Diff: sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala
---
@@ -231,6 +231,42 @@ private[spark] object HiveUtils extends Logging {
}.toMap
}
+ private[hive] def newHiveConfigurations(
+ sparkConf: SparkConf = new SparkConf(loadDefaults = true),
+ classLoader: ClassLoader = null)(
+ hadoopConf: Configuration =
SparkHadoopUtil.get.newConfiguration(sparkConf))(
+ extraConfig: Map[String, String] =
hiveClientConfigurations(hadoopConf)): HiveConf = {
--- End diff --
This doesn't change the original logic, check
https://github.com/apache/spark/blob/master/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala#L283.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]