Repository: spark
Updated Branches:
  refs/heads/branch-1.2 668643b8d -> 6f70e0295


[SPARK-4472][Shell] Print "Spark context available as sc." only when 
SparkContext is created...

... successfully

It's weird that printing "Spark context available as sc" when creating 
SparkContext unsuccessfully.

Author: zsxwing <[email protected]>

Closes #3341 from zsxwing/SPARK-4472 and squashes the following commits:

4850093 [zsxwing] Print "Spark context available as sc." only when SparkContext 
is created successfully

(cherry picked from commit f1069b84b82b932751604bc20d5c2e451d57c455)
Signed-off-by: Reynold Xin <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6f70e029
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6f70e029
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6f70e029

Branch: refs/heads/branch-1.2
Commit: 6f70e0295572e3037660004797040e026e440dbd
Parents: 668643b
Author: zsxwing <[email protected]>
Authored: Fri Nov 21 00:42:43 2014 -0800
Committer: Reynold Xin <[email protected]>
Committed: Fri Nov 21 00:43:10 2014 -0800

----------------------------------------------------------------------
 .../src/main/scala/org/apache/spark/repl/SparkILoopInit.scala | 7 +++++--
 .../src/main/scala/org/apache/spark/repl/SparkILoop.scala     | 7 +++++--
 2 files changed, 10 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/6f70e029/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
----------------------------------------------------------------------
diff --git 
a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala 
b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index 7667a9c..da4286c 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -121,11 +121,14 @@ trait SparkILoopInit {
   def initializeSpark() {
     intp.beQuietDuring {
       command("""
-         @transient val sc = 
org.apache.spark.repl.Main.interp.createSparkContext();
+         @transient val sc = {
+           val _sc = org.apache.spark.repl.Main.interp.createSparkContext()
+           println("Spark context available as sc.")
+           _sc
+         }
         """)
       command("import org.apache.spark.SparkContext._")
     }
-    echo("Spark context available as sc.")
   }
 
   // code to be executed only after the interpreter is initialized

http://git-wip-us.apache.org/repos/asf/spark/blob/6f70e029/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
----------------------------------------------------------------------
diff --git 
a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala 
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index a591e9f..2507273 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -61,11 +61,14 @@ class SparkILoop(in0: Option[BufferedReader], protected val 
out: JPrintWriter)
   def initializeSpark() {
     intp.beQuietDuring {
       command( """
-         @transient val sc = org.apache.spark.repl.Main.createSparkContext();
+         @transient val sc = {
+           val _sc = org.apache.spark.repl.Main.createSparkContext()
+           println("Spark context available as sc.")
+           _sc
+         }
                """)
       command("import org.apache.spark.SparkContext._")
     }
-    echo("Spark context available as sc.")
   }
 
   /** Print a welcome message */


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to