Github user HyukjinKwon commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21368#discussion_r189760439
  
    --- Diff: repl/src/main/scala/org/apache/spark/repl/Main.scala ---
    @@ -79,44 +81,50 @@ object Main extends Logging {
       }
     
       def createSparkSession(): SparkSession = {
    -    val execUri = System.getenv("SPARK_EXECUTOR_URI")
    -    conf.setIfMissing("spark.app.name", "Spark shell")
    -    // SparkContext will detect this configuration and register it with 
the RpcEnv's
    -    // file server, setting spark.repl.class.uri to the actual URI for 
executors to
    -    // use. This is sort of ugly but since executors are started as part 
of SparkContext
    -    // initialization in certain cases, there's an initialization order 
issue that prevents
    -    // this from being set after SparkContext is instantiated.
    -    conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath())
    -    if (execUri != null) {
    -      conf.set("spark.executor.uri", execUri)
    -    }
    -    if (System.getenv("SPARK_HOME") != null) {
    -      conf.setSparkHome(System.getenv("SPARK_HOME"))
    -    }
    +    try {
    +      val execUri = System.getenv("SPARK_EXECUTOR_URI")
    +      conf.setIfMissing("spark.app.name", "Spark shell")
    +      // SparkContext will detect this configuration and register it with 
the RpcEnv's
    +      // file server, setting spark.repl.class.uri to the actual URI for 
executors to
    +      // use. This is sort of ugly but since executors are started as part 
of SparkContext
    +      // initialization in certain cases, there's an initialization order 
issue that prevents
    +      // this from being set after SparkContext is instantiated.
    +      conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath())
    +      if (execUri != null) {
    +        conf.set("spark.executor.uri", execUri)
    +      }
    +      if (System.getenv("SPARK_HOME") != null) {
    +        conf.setSparkHome(System.getenv("SPARK_HOME"))
    +      }
     
    -    val builder = SparkSession.builder.config(conf)
    -    if (conf.get(CATALOG_IMPLEMENTATION.key, 
"hive").toLowerCase(Locale.ROOT) == "hive") {
    -      if (SparkSession.hiveClassesArePresent) {
    -        // In the case that the property is not set at all, builder's 
config
    -        // does not have this value set to 'hive' yet. The original default
    -        // behavior is that when there are hive classes, we use hive 
catalog.
    -        sparkSession = builder.enableHiveSupport().getOrCreate()
    -        logInfo("Created Spark session with Hive support")
    +      val builder = SparkSession.builder.config(conf)
    +      if (conf.get(CATALOG_IMPLEMENTATION.key, 
"hive").toLowerCase(Locale.ROOT) == "hive") {
    +        if (SparkSession.hiveClassesArePresent) {
    +          // In the case that the property is not set at all, builder's 
config
    +          // does not have this value set to 'hive' yet. The original 
default
    +          // behavior is that when there are hive classes, we use hive 
catalog.
    +          sparkSession = builder.enableHiveSupport().getOrCreate()
    +          logInfo("Created Spark session with Hive support")
    +        } else {
    +          // Need to change it back to 'in-memory' if no hive classes are 
found
    +          // in the case that the property is set to hive in 
spark-defaults.conf
    +          builder.config(CATALOG_IMPLEMENTATION.key, "in-memory")
    +          sparkSession = builder.getOrCreate()
    +          logInfo("Created Spark session")
    +        }
           } else {
    -        // Need to change it back to 'in-memory' if no hive classes are 
found
    -        // in the case that the property is set to hive in 
spark-defaults.conf
    -        builder.config(CATALOG_IMPLEMENTATION.key, "in-memory")
    +        // In the case that the property is set but not to 'hive', the 
internal
    +        // default is 'in-memory'. So the sparkSession will use in-memory 
catalog.
             sparkSession = builder.getOrCreate()
             logInfo("Created Spark session")
           }
    -    } else {
    -      // In the case that the property is set but not to 'hive', the 
internal
    -      // default is 'in-memory'. So the sparkSession will use in-memory 
catalog.
    -      sparkSession = builder.getOrCreate()
    -      logInfo("Created Spark session")
    +      sparkContext = sparkSession.sparkContext
    +      sparkSession
    +    } catch {
    +      case e: Exception if isShellSession =>
    +        logError("Failed to initialize Spark session.", e)
    --- End diff --
    
    @vanzin, seem `e.printStackTrace()` is missing .. ?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to