Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/12890#discussion_r62084188
  
    --- Diff: repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala 
---
    @@ -71,35 +71,32 @@ object Main extends Logging {
         }
       }
     
    -  def createSparkContext(): SparkContext = {
    +  def createSparkSession(): SparkSession = {
         val execUri = System.getenv("SPARK_EXECUTOR_URI")
         conf.setIfMissing("spark.app.name", "Spark shell")
    -      // SparkContext will detect this configuration and register it with 
the RpcEnv's
    -      // file server, setting spark.repl.class.uri to the actual URI for 
executors to
    -      // use. This is sort of ugly but since executors are started as part 
of SparkContext
    -      // initialization in certain cases, there's an initialization order 
issue that prevents
    -      // this from being set after SparkContext is instantiated.
    -      .set("spark.repl.class.outputDir", outputDir.getAbsolutePath())
    +    // SparkContext will detect this configuration and register it with 
the RpcEnv's
    +    // file server, setting spark.repl.class.uri to the actual URI for 
executors to
    +    // use. This is sort of ugly but since executors are started as part 
of SparkContext
    +    // initialization in certain cases, there's an initialization order 
issue that prevents
    +    // this from being set after SparkContext is instantiated.
    +    conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath())
         if (execUri != null) {
           conf.set("spark.executor.uri", execUri)
         }
         if (System.getenv("SPARK_HOME") != null) {
           conf.setSparkHome(System.getenv("SPARK_HOME"))
         }
    -    sparkContext = new SparkContext(conf)
    -    logInfo("Created spark context..")
    -    Signaling.cancelOnInterrupt(sparkContext)
    -    sparkContext
    -  }
     
    -  def createSparkSession(): SparkSession = {
    +    val builder = SparkSession.builder.config(conf)
         if (SparkSession.hiveClassesArePresent) {
    -      sparkSession = SparkSession.builder.enableHiveSupport().getOrCreate()
    +      sparkSession = builder.enableHiveSupport().getOrCreate()
           logInfo("Created Spark session with Hive support")
         } else {
    -      sparkSession = SparkSession.builder.getOrCreate()
    +      sparkSession = builder.getOrCreate()
    --- End diff --
    
    hm I think it's better to keep that flag contained rather than duplicating 
it everywhere


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to