Repository: spark Updated Branches: refs/heads/master f953ca56e -> c79c10eba
[TEST] Different behaviors of SparkContext Conf when building SparkSession ## What changes were proposed in this pull request? If the created ACTIVE sparkContext is not EXPLICITLY passed through the Builder's API `sparkContext()`, the conf of this sparkContext will also contain the conf set through the API `config()`; otherwise, the conf of this sparkContext will NOT contain the conf set through the API `config()` ## How was this patch tested? N/A Author: gatorsmile <[email protected]> Closes #18517 from gatorsmile/fixTestCase2. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c79c10eb Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c79c10eb Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c79c10eb Branch: refs/heads/master Commit: c79c10ebaf3d63b697b8d6d1a7e55aa2d406af69 Parents: f953ca5 Author: gatorsmile <[email protected]> Authored: Mon Jul 3 16:18:54 2017 -0700 Committer: gatorsmile <[email protected]> Committed: Mon Jul 3 16:18:54 2017 -0700 ---------------------------------------------------------------------- .../spark/sql/SparkSessionBuilderSuite.scala | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/c79c10eb/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala index 386d13d..4f6d5f7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala @@ -98,12 +98,31 @@ class SparkSessionBuilderSuite extends SparkFunSuite { val session = SparkSession.builder().config("key2", "value2").getOrCreate() assert(session.conf.get("key1") == "value1") assert(session.conf.get("key2") == "value2") + assert(session.sparkContext == sparkContext2) assert(session.sparkContext.conf.get("key1") == "value1") + // If the created sparkContext is not passed through the Builder's API sparkContext, + // the conf of this sparkContext will also contain the conf set through the API config. assert(session.sparkContext.conf.get("key2") == "value2") assert(session.sparkContext.conf.get("spark.app.name") == "test") session.stop() } + test("create SparkContext first then pass context to SparkSession") { + sparkContext.stop() + val conf = new SparkConf().setAppName("test").setMaster("local").set("key1", "value1") + val newSC = new SparkContext(conf) + val session = SparkSession.builder().sparkContext(newSC).config("key2", "value2").getOrCreate() + assert(session.conf.get("key1") == "value1") + assert(session.conf.get("key2") == "value2") + assert(session.sparkContext == newSC) + assert(session.sparkContext.conf.get("key1") == "value1") + // If the created sparkContext is passed through the Builder's API sparkContext, + // the conf of this sparkContext will not contain the conf set through the API config. + assert(!session.sparkContext.conf.contains("key2")) + assert(session.sparkContext.conf.get("spark.app.name") == "test") + session.stop() + } + test("SPARK-15887: hive-site.xml should be loaded") { val session = SparkSession.builder().master("local").getOrCreate() assert(session.sessionState.newHadoopConf().get("hive.in.test") == "true") --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
