spark git commit: [SPARK-5586][Spark Shell][SQL] Make `sqlContext` available in spark shell

2015-02-06 Thread marmbrus
Repository: spark
Updated Branches:
  refs/heads/master 4793c8402 - 3d3ecd774


[SPARK-5586][Spark Shell][SQL] Make `sqlContext` available in spark shell

Result is like this
```
15/02/05 13:41:22 INFO SparkILoop: Created spark context..
Spark context available as sc.
15/02/05 13:41:22 INFO SparkILoop: Created sql context..
SQLContext available as sqlContext.

scala sq
sql  sqlContext   sqlParsersqrt
```

Author: OopsOutOfMemory victorshen...@126.com

Closes #4387 from OopsOutOfMemory/sqlContextInShell and squashes the following 
commits:

c7f5203 [OopsOutOfMemory] auto-import sql() function
e160697 [OopsOutOfMemory] Merge branch 'sqlContextInShell' of 
https://github.com/OopsOutOfMemory/spark into sqlContextInShell
37c0a16 [OopsOutOfMemory] auto detect hive support
a9c59d9 [OopsOutOfMemory] rename and reduce range of imports
6b9e309 [OopsOutOfMemory] Merge branch 'master' into sqlContextInShell
cae652f [OopsOutOfMemory] make sqlContext available in spark shell


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3d3ecd77
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3d3ecd77
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3d3ecd77

Branch: refs/heads/master
Commit: 3d3ecd77411bfdd1c72be51616c46a6caf839be2
Parents: 4793c84
Author: OopsOutOfMemory victorshen...@126.com
Authored: Fri Feb 6 13:20:10 2015 -0800
Committer: Michael Armbrust mich...@databricks.com
Committed: Fri Feb 6 13:20:10 2015 -0800

--
 .../scala/org/apache/spark/repl/SparkILoop.scala | 19 +++
 .../org/apache/spark/repl/SparkILoopInit.scala   | 10 ++
 .../main/scala/org/apache/spark/repl/Main.scala  | 18 ++
 .../scala/org/apache/spark/repl/SparkILoop.scala | 12 +++-
 4 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/3d3ecd77/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
--
diff --git 
a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala 
b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 72c1a98..b4db3df 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -45,6 +45,7 @@ import scala.reflect.api.{Mirror, TypeCreator, Universe = 
ApiUniverse}
 import org.apache.spark.Logging
 import org.apache.spark.SparkConf
 import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
 import org.apache.spark.util.Utils
 
 /** The Scala interactive shell.  It provides a read-eval-print loop
@@ -130,6 +131,7 @@ class SparkILoop(
   // NOTE: Must be public for visibility
   @DeveloperApi
   var sparkContext: SparkContext = _
+  var sqlContext: SQLContext = _
 
   override def echoCommandMessage(msg: String) {
 intp.reporter printMessage msg
@@ -1016,6 +1018,23 @@ class SparkILoop(
 sparkContext
   }
 
+  @DeveloperApi
+  def createSQLContext(): SQLContext = {
+val name = org.apache.spark.sql.hive.HiveContext
+val loader = Utils.getContextOrSparkClassLoader
+try {
+  sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
+.newInstance(sparkContext).asInstanceOf[SQLContext] 
+  logInfo(Created sql context (with Hive support)..)
+}
+catch {
+  case cnf: java.lang.ClassNotFoundException =
+sqlContext = new SQLContext(sparkContext)
+logInfo(Created sql context..)
+}
+sqlContext
+  }
+
   private def getMaster(): String = {
 val master = this.master match {
   case Some(m) = m

http://git-wip-us.apache.org/repos/asf/spark/blob/3d3ecd77/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
--
diff --git 
a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala 
b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index 99bd777..0cf2de6 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -127,7 +127,17 @@ private[repl] trait SparkILoopInit {
_sc
  }
 )
+  command(
+ @transient val sqlContext = {
+   val _sqlContext = 
org.apache.spark.repl.Main.interp.createSQLContext()
+   println(SQL context available as sqlContext.)
+   _sqlContext
+ }
+)
   command(import org.apache.spark.SparkContext._)
+  command(import sqlContext.implicits._)
+  command(import sqlContext.sql)
+  command(import org.apache.spark.sql.Dsl._)
 }
   }
 


spark git commit: [SPARK-5586][Spark Shell][SQL] Make `sqlContext` available in spark shell

2015-02-06 Thread marmbrus
Repository: spark
Updated Branches:
  refs/heads/branch-1.3 1b148adfc - 2ef9853e7


[SPARK-5586][Spark Shell][SQL] Make `sqlContext` available in spark shell

Result is like this
```
15/02/05 13:41:22 INFO SparkILoop: Created spark context..
Spark context available as sc.
15/02/05 13:41:22 INFO SparkILoop: Created sql context..
SQLContext available as sqlContext.

scala sq
sql  sqlContext   sqlParsersqrt
```

Author: OopsOutOfMemory victorshen...@126.com

Closes #4387 from OopsOutOfMemory/sqlContextInShell and squashes the following 
commits:

c7f5203 [OopsOutOfMemory] auto-import sql() function
e160697 [OopsOutOfMemory] Merge branch 'sqlContextInShell' of 
https://github.com/OopsOutOfMemory/spark into sqlContextInShell
37c0a16 [OopsOutOfMemory] auto detect hive support
a9c59d9 [OopsOutOfMemory] rename and reduce range of imports
6b9e309 [OopsOutOfMemory] Merge branch 'master' into sqlContextInShell
cae652f [OopsOutOfMemory] make sqlContext available in spark shell

(cherry picked from commit 3d3ecd77411bfdd1c72be51616c46a6caf839be2)
Signed-off-by: Michael Armbrust mich...@databricks.com


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2ef9853e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2ef9853e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2ef9853e

Branch: refs/heads/branch-1.3
Commit: 2ef9853e7ad42f551010697c389074d94fb69b54
Parents: 1b148ad
Author: OopsOutOfMemory victorshen...@126.com
Authored: Fri Feb 6 13:20:10 2015 -0800
Committer: Michael Armbrust mich...@databricks.com
Committed: Fri Feb 6 13:20:18 2015 -0800

--
 .../scala/org/apache/spark/repl/SparkILoop.scala | 19 +++
 .../org/apache/spark/repl/SparkILoopInit.scala   | 10 ++
 .../main/scala/org/apache/spark/repl/Main.scala  | 18 ++
 .../scala/org/apache/spark/repl/SparkILoop.scala | 12 +++-
 4 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/2ef9853e/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
--
diff --git 
a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala 
b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 72c1a98..b4db3df 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -45,6 +45,7 @@ import scala.reflect.api.{Mirror, TypeCreator, Universe = 
ApiUniverse}
 import org.apache.spark.Logging
 import org.apache.spark.SparkConf
 import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
 import org.apache.spark.util.Utils
 
 /** The Scala interactive shell.  It provides a read-eval-print loop
@@ -130,6 +131,7 @@ class SparkILoop(
   // NOTE: Must be public for visibility
   @DeveloperApi
   var sparkContext: SparkContext = _
+  var sqlContext: SQLContext = _
 
   override def echoCommandMessage(msg: String) {
 intp.reporter printMessage msg
@@ -1016,6 +1018,23 @@ class SparkILoop(
 sparkContext
   }
 
+  @DeveloperApi
+  def createSQLContext(): SQLContext = {
+val name = org.apache.spark.sql.hive.HiveContext
+val loader = Utils.getContextOrSparkClassLoader
+try {
+  sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
+.newInstance(sparkContext).asInstanceOf[SQLContext] 
+  logInfo(Created sql context (with Hive support)..)
+}
+catch {
+  case cnf: java.lang.ClassNotFoundException =
+sqlContext = new SQLContext(sparkContext)
+logInfo(Created sql context..)
+}
+sqlContext
+  }
+
   private def getMaster(): String = {
 val master = this.master match {
   case Some(m) = m

http://git-wip-us.apache.org/repos/asf/spark/blob/2ef9853e/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
--
diff --git 
a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala 
b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index 99bd777..0cf2de6 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -127,7 +127,17 @@ private[repl] trait SparkILoopInit {
_sc
  }
 )
+  command(
+ @transient val sqlContext = {
+   val _sqlContext = 
org.apache.spark.repl.Main.interp.createSQLContext()
+   println(SQL context available as sqlContext.)
+   _sqlContext
+ }
+)
   command(import org.apache.spark.SparkContext._)
+  command(import