Repository: hive Updated Branches: refs/heads/master 09b6f9a36 -> 305b8ce40
HIVE-12538: After set spark related config, SparkSession never get reused (Nemon Lou via Xuefu) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/305b8ce4 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/305b8ce4 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/305b8ce4 Branch: refs/heads/master Commit: 305b8ce4097a692a2ee718b1df384d98d1e6fc1a Parents: 09b6f9a Author: Xuefu Zhang <[email protected]> Authored: Wed Dec 16 08:31:27 2015 -0800 Committer: Xuefu Zhang <[email protected]> Committed: Wed Dec 16 08:31:27 2015 -0800 ---------------------------------------------------------------------- .../java/org/apache/hadoop/hive/conf/HiveConf.java | 4 +++- .../org/apache/hadoop/hive/conf/TestHiveConf.java | 14 ++++++++++++++ .../hadoop/hive/ql/exec/spark/SparkUtilities.java | 6 +++++- 3 files changed, 22 insertions(+), 2 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/305b8ce4/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java ---------------------------------------------------------------------- diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 243f281..b5aee00 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -2836,7 +2836,9 @@ public class HiveConf extends Configuration { // When either name or value is null, the set method below will fail, // and throw IllegalArgumentException set(name, value); - isSparkConfigUpdated = isSparkRelatedConfig(name); + if (isSparkRelatedConfig(name)) { + isSparkConfigUpdated = true; + } } } http://git-wip-us.apache.org/repos/asf/hive/blob/305b8ce4/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java ---------------------------------------------------------------------- diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java index 3b7a525..cd472c7 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java @@ -142,4 +142,18 @@ public class TestHiveConf { Assert.assertEquals("", conf2.get(HiveConf.ConfVars.METASTOREPWD.varname)); Assert.assertEquals("", conf2.get(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname)); } + + @Test + public void testSparkConfigUpdate(){ + HiveConf conf = new HiveConf(); + Assert.assertFalse(conf.getSparkConfigUpdated()); + + conf.verifyAndSet("spark.master", "yarn-cluster"); + Assert.assertTrue(conf.getSparkConfigUpdated()); + conf.verifyAndSet("hive.execution.engine", "spark"); + Assert.assertTrue("Expected spark config updated.", conf.getSparkConfigUpdated()); + + conf.setSparkConfigUpdated(false); + Assert.assertFalse(conf.getSparkConfigUpdated()); + } } http://git-wip-us.apache.org/repos/asf/hive/blob/305b8ce4/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java index 0268469..a61cdc5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java @@ -121,12 +121,16 @@ public class SparkUtilities { public static SparkSession getSparkSession(HiveConf conf, SparkSessionManager sparkSessionManager) throws HiveException { SparkSession sparkSession = SessionState.get().getSparkSession(); + HiveConf sessionConf = SessionState.get().getConf(); // Spark configurations are updated close the existing session - if (conf.getSparkConfigUpdated()) { + // In case of async queries or confOverlay is not empty, + // sessionConf and conf are different objects + if (sessionConf.getSparkConfigUpdated() || conf.getSparkConfigUpdated()) { sparkSessionManager.closeSession(sparkSession); sparkSession = null; conf.setSparkConfigUpdated(false); + sessionConf.setSparkConfigUpdated(false); } sparkSession = sparkSessionManager.getSession(sparkSession, conf, true); SessionState.get().setSparkSession(sparkSession);
