Repository: spark
Updated Branches:
  refs/heads/master 753b04dea -> 44233865c


[SQL] Make it possible to create Java/Python SQLContexts from an existing Scala 
SQLContext.

Author: Michael Armbrust <mich...@databricks.com>

Closes #761 from marmbrus/existingContext and squashes the following commits:

4651051 [Michael Armbrust] Make it possible to create Java/Python SQLContexts 
from an existing Scala SQLContext.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/44233865
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/44233865
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/44233865

Branch: refs/heads/master
Commit: 44233865cf8020741d862d33cc660c88e9315dea
Parents: 753b04d
Author: Michael Armbrust <mich...@databricks.com>
Authored: Tue May 13 21:23:51 2014 -0700
Committer: Reynold Xin <r...@apache.org>
Committed: Tue May 13 21:23:51 2014 -0700

----------------------------------------------------------------------
 python/pyspark/sql.py                                         | 7 +++++--
 .../scala/org/apache/spark/sql/api/java/JavaSQLContext.scala  | 4 ++--
 2 files changed, 7 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/44233865/python/pyspark/sql.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql.py b/python/pyspark/sql.py
index 6789d70..bbe69e7 100644
--- a/python/pyspark/sql.py
+++ b/python/pyspark/sql.py
@@ -28,7 +28,7 @@ class SQLContext:
     register L{SchemaRDD}s as tables, execute sql over tables, cache tables, 
and read parquet files.
     """
 
-    def __init__(self, sparkContext):
+    def __init__(self, sparkContext, sqlContext = None):
         """
         Create a new SQLContext.
 
@@ -58,10 +58,13 @@ class SQLContext:
         self._jvm = self._sc._jvm
         self._pythonToJavaMap = self._jvm.PythonRDD.pythonToJavaMap
 
+        if sqlContext:
+            self._scala_SQLContext = sqlContext
+
     @property
     def _ssql_ctx(self):
         """
-        Accessor for the JVM SparkSQL context.  Subclasses can overrite this 
property to provide
+        Accessor for the JVM SparkSQL context.  Subclasses can override this 
property to provide
         their own JVM Contexts.
         """
         if not hasattr(self, '_scala_SQLContext'):

http://git-wip-us.apache.org/repos/asf/spark/blob/44233865/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
index 57facbe..6f7d431 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
@@ -33,9 +33,9 @@ import org.apache.spark.util.Utils
 /**
  * The entry point for executing Spark SQL queries from a Java program.
  */
-class JavaSQLContext(sparkContext: JavaSparkContext) {
+class JavaSQLContext(val sqlContext: SQLContext) {
 
-  val sqlContext = new SQLContext(sparkContext.sc)
+  def this(sparkContext: JavaSparkContext) = this(new 
SQLContext(sparkContext.sc))
 
   /**
    * Executes a query expressed in SQL, returning the result as a JavaSchemaRDD

Reply via email to