Github user yhuai commented on a diff in the pull request:
https://github.com/apache/spark/pull/14897#discussion_r82493491
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala ---
@@ -37,39 +37,14 @@ import org.apache.spark.util.{MutableURLClassLoader,
Utils}
*/
private[sql] class SharedState(val sparkContext: SparkContext) extends
Logging {
- /**
- * Class for caching query results reused in future executions.
- */
- val cacheManager: CacheManager = new CacheManager
-
- /**
- * A listener for SQL-specific
[[org.apache.spark.scheduler.SparkListenerEvent]]s.
- */
- val listener: SQLListener = createListenerAndUI(sparkContext)
-
+ // Load hive-site.xml into hadoopConf and determine the warehouse path
we want to use, based on
+ // the config from both hive and Spark SQL. Finally set the warehouse
config value to sparkConf.
{
val configFile =
Utils.getContextOrSparkClassLoader.getResource("hive-site.xml")
if (configFile != null) {
sparkContext.hadoopConfiguration.addResource(configFile)
}
- }
-
- /**
- * A catalog that interacts with external systems.
- */
- lazy val externalCatalog: ExternalCatalog =
- SharedState.reflect[ExternalCatalog, SparkConf, Configuration](
- SharedState.externalCatalogClassName(sparkContext.conf),
- sparkContext.conf,
- sparkContext.hadoopConfiguration)
-
- /**
- * A classloader used to load all user-added jar.
- */
- val jarClassLoader = new NonClosableMutableURLClassLoader(
- org.apache.spark.util.Utils.getContextOrSparkClassLoader)
- {
// Set the Hive metastore warehouse path to the one we use
val tempConf = new SQLConf
--- End diff --
Can you double check?
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]