GEODE-37 changed package name in spark-connector

Project: http://git-wip-us.apache.org/repos/asf/incubator-geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-geode/commit/c32fccba
Tree: http://git-wip-us.apache.org/repos/asf/incubator-geode/tree/c32fccba
Diff: http://git-wip-us.apache.org/repos/asf/incubator-geode/diff/c32fccba

Branch: refs/heads/develop
Commit: c32fccba55db48bbdca7567bb62470895fcb5b05
Parents: 1fc4e6e
Author: Hitesh Khamesra <hkhame...@pivotal.io>
Authored: Tue Sep 20 16:00:29 2016 -0700
Committer: Hitesh Khamesra <hkhame...@pivotal.io>
Committed: Tue Sep 20 16:01:03 2016 -0700

----------------------------------------------------------------------
 geode-spark-connector/doc/2_quick.md            | 10 ++++----
 geode-spark-connector/doc/3_connecting.md       |  2 +-
 geode-spark-connector/doc/4_loading.md          |  4 ++--
 geode-spark-connector/doc/6_save_rdd.md         |  2 +-
 geode-spark-connector/doc/7_save_dstream.md     |  2 +-
 geode-spark-connector/doc/8_oql.md              |  4 ++--
 geode-spark-connector/doc/9_java_api.md         |  2 +-
 .../connector/internal/RegionMetadata.java      |  2 +-
 .../internal/geodefunctions/QueryFunction.java  |  2 +-
 .../geodefunctions/RetrieveRegionFunction.java  |  2 +-
 .../RetrieveRegionMetadataFunction.java         |  4 ++--
 .../StructStreamingResultSender.java            |  2 +-
 .../apache/geode/spark/connector/Employee.java  |  2 +-
 .../spark/connector/JavaApiIntegrationTest.java | 24 ++++++++++----------
 .../apache/geode/spark/connector/Portfolio.java |  2 +-
 .../apache/geode/spark/connector/Position.java  |  2 +-
 .../spark/connector/BasicIntegrationTest.scala  | 18 +++++++--------
 .../RDDJoinRegionIntegrationTest.scala          | 12 +++++-----
 .../RetrieveRegionIntegrationTest.scala         | 12 +++++-----
 .../apache/geode/spark/connector/package.scala  |  2 +-
 .../spark/connector/testkit/GeodeCluster.scala  |  2 +-
 .../spark/connector/testkit/GeodeRunner.scala   |  4 ++--
 .../geode/spark/connector/testkit/IOUtils.scala |  2 +-
 .../javaapi/GeodeJavaDStreamFunctions.java      | 10 ++++----
 .../javaapi/GeodeJavaPairDStreamFunctions.java  | 10 ++++----
 .../javaapi/GeodeJavaPairRDDFunctions.java      | 14 ++++++------
 .../javaapi/GeodeJavaRDDFunctions.java          | 14 ++++++------
 .../javaapi/GeodeJavaSQLContextFunctions.java   |  8 +++----
 .../javaapi/GeodeJavaSparkContextFunctions.java | 12 +++++-----
 .../spark/connector/javaapi/GeodeJavaUtil.java  |  4 ++--
 .../geode/spark/connector/GeodeConnection.scala |  6 ++---
 .../spark/connector/GeodeConnectionConf.scala   |  4 ++--
 .../connector/GeodeConnectionManager.scala      |  2 +-
 .../spark/connector/GeodeFunctionDeployer.scala |  2 +-
 .../spark/connector/GeodeKryoRegistrator.scala  |  4 ++--
 .../spark/connector/GeodePairRDDFunctions.scala |  6 ++---
 .../spark/connector/GeodeRDDFunctions.scala     |  6 ++---
 .../connector/GeodeSQLContextFunctions.scala    |  4 ++--
 .../connector/GeodeSparkContextFunctions.scala  |  4 ++--
 .../internal/DefaultGeodeConnection.scala       | 12 +++++-----
 .../DefaultGeodeConnectionManager.scala         |  4 ++--
 .../connector/internal/LocatorHelper.scala      |  2 +-
 .../StructStreamingResultCollector.scala        |  4 ++--
 .../connector/internal/oql/QueryParser.scala    |  2 +-
 .../spark/connector/internal/oql/QueryRDD.scala |  6 ++---
 .../internal/oql/QueryResultCollector.scala     |  2 +-
 .../connector/internal/oql/RDDConverter.scala   |  2 +-
 .../connector/internal/oql/RowBuilder.scala     |  2 +-
 .../connector/internal/oql/SchemaBuilder.scala  |  2 +-
 .../internal/oql/UndefinedSerializer.scala      |  2 +-
 .../connector/internal/rdd/GeodeJoinRDD.scala   |  4 ++--
 .../internal/rdd/GeodeOuterJoinRDD.scala        |  4 ++--
 .../internal/rdd/GeodeRDDPartition.scala        |  2 +-
 .../internal/rdd/GeodeRDDPartitioner.scala      |  6 ++---
 .../internal/rdd/GeodeRDDPartitionerImpl.scala  |  8 +++----
 .../connector/internal/rdd/GeodeRDDWriter.scala |  4 ++--
 .../connector/internal/rdd/GeodeRegionRDD.scala |  6 ++---
 .../connector/javaapi/GeodeJavaRegionRDD.scala  |  4 ++--
 .../spark/connector/javaapi/JavaAPIHelper.scala |  2 +-
 .../apache/geode/spark/connector/package.scala  |  4 ++--
 .../streaming/GeodeDStreamFunctions.scala       | 10 ++++----
 .../spark/connector/streaming/package.scala     |  2 +-
 .../geode/spark/connector/JavaAPITest.java      |  6 ++---
 .../connector/GeodeFunctionDeployerTest.scala   |  2 +-
 .../DefaultGeodeConnectionManagerTest.scala     |  4 ++--
 ...tStreamingResultSenderAndCollectorTest.scala |  2 +-
 .../internal/oql/QueryParserTest.scala          |  6 ++---
 .../connector/ConnectorImplicitsTest.scala      |  4 ++--
 .../connector/GeodeConnectionConfTest.scala     |  4 ++--
 .../connector/GeodeDStreamFunctionsTest.scala   | 12 +++++-----
 .../spark/connector/GeodeRDDFunctionsTest.scala | 14 ++++++------
 .../spark/connector/LocatorHelperTest.scala     |  4 ++--
 .../connector/rdd/GeodeRDDPartitionerTest.scala | 14 ++++++------
 .../connector/rdd/GeodeRegionRDDTest.scala      | 10 ++++----
 .../src/main/java/demo/OQLJavaDemo.java         |  2 +-
 .../src/main/java/demo/PairRDDSaveJavaDemo.java |  4 ++--
 .../src/main/java/demo/RDDSaveJavaDemo.java     |  4 ++--
 .../src/main/java/demo/RegionToRDDJavaDemo.java |  2 +-
 .../src/main/scala/demo/NetworkWordCount.scala  |  4 ++--
 geode-spark-connector/project/Settings.scala    |  2 +-
 80 files changed, 213 insertions(+), 213 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/doc/2_quick.md
----------------------------------------------------------------------
diff --git a/geode-spark-connector/doc/2_quick.md 
b/geode-spark-connector/doc/2_quick.md
index 01f3c06..ed79f39 100644
--- a/geode-spark-connector/doc/2_quick.md
+++ b/geode-spark-connector/doc/2_quick.md
@@ -79,10 +79,10 @@ res0: String = localhost[55221]
 ```
 
 In order to enable Geode specific functions, you need to import 
-`io.pivotal.geode.spark.connector._`
+`org.apache.geode.spark.connector._`
 ```
-scala> import io.pivotal.geode.spark.connector._
-import io.pivotal.geode.spark.connector._
+scala> import org.apache.geode.spark.connector._
+import org.apache.geode.spark.connector._
 ```
 
 ### Save Pair RDD to Geode
@@ -154,7 +154,7 @@ NEXT_STEP_NAME : END
 The same API is used to expose both replicated and partitioned region as RDDs. 
 ```
 scala> val rdd = sc.geodeRegion[String, String]("str_str_region")
-rdd: io.pivotal.geode.spark.connector.rdd.GemFireRDD[String,String] = 
GemFireRDD[2] at RDD at GemFireRDD.scala:19
+rdd: org.apache.geode.spark.connector.rdd.GemFireRDD[String,String] = 
GemFireRDD[2] at RDD at GemFireRDD.scala:19
 
 scala> rdd.foreach(println)
 (1,one)
@@ -162,7 +162,7 @@ scala> rdd.foreach(println)
 (2,two)
 
 scala> val rdd2 = sc.geodeRegion[Int, String]("int_str_region")
-rdd2: io.pivotal.geode.spark.connector.rdd.GemFireRDD[Int,String] = 
GemFireRDD[3] at RDD at GemFireRDD.scala:19
+rdd2: org.apache.geode.spark.connector.rdd.GemFireRDD[Int,String] = 
GemFireRDD[3] at RDD at GemFireRDD.scala:19
 
 scala> rdd2.foreach(println)
 (2,ab)

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/doc/3_connecting.md
----------------------------------------------------------------------
diff --git a/geode-spark-connector/doc/3_connecting.md 
b/geode-spark-connector/doc/3_connecting.md
index 1a4dadd..c0f4628 100644
--- a/geode-spark-connector/doc/3_connecting.md
+++ b/geode-spark-connector/doc/3_connecting.md
@@ -21,7 +21,7 @@ spark.geode.security-password=tiger
  
 Or in the Spark application code:
 ```
-import io.pivotal.geode.spark.connector._
+import org.apache.geode.spark.connector._
 val sparkConf = new SparkConf()
   .set(GeodeLocatorPropKey, "192.168.1.47[10334]")
   .set("spark.geode.security-client-auth-init", 
"com.gemstone.geode.security.templates.UserPasswordAuthInit.create")

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/doc/4_loading.md
----------------------------------------------------------------------
diff --git a/geode-spark-connector/doc/4_loading.md 
b/geode-spark-connector/doc/4_loading.md
index d6789dd..fffe6a3 100644
--- a/geode-spark-connector/doc/4_loading.md
+++ b/geode-spark-connector/doc/4_loading.md
@@ -32,7 +32,7 @@ on each Geode server into two RDD partitions by default.
 The number of splits is configurable, the following shows how to set 
 three partitions per Geode server:
 ```
-import io.pivotal.geode.spark.connector._
+import org.apache.geode.spark.connector._
 
 val opConf = Map(PreferredPartitionerPropKey -> ServerSplitsPartitionerName,
                  NumberPartitionsPerServerPropKey -> "3")
@@ -73,7 +73,7 @@ only contains `Emp.class`.
 Now in Spark shell, generate some random `Emp` records, and save them to 
region `emps` (remember to add `emp.jar` to 
 Spark shell classpath before starting Spark shell):
 ```
-import io.pivotal.geode.spark.connector._
+import org.apache.geode.spark.connector._
 import scala.util.Random
 import demo.Emp
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/doc/6_save_rdd.md
----------------------------------------------------------------------
diff --git a/geode-spark-connector/doc/6_save_rdd.md 
b/geode-spark-connector/doc/6_save_rdd.md
index 73fd8b0..5adc028 100644
--- a/geode-spark-connector/doc/6_save_rdd.md
+++ b/geode-spark-connector/doc/6_save_rdd.md
@@ -7,7 +7,7 @@ It is possible to save any RDD to a Geode region. The 
requirements are:
  - the target region exists.
 
 To save an RDD to an existing Geode region, import 
-`io.pivotal.geode.spark.connector._` and call the `saveToGeode` 
+`org.apache.geode.spark.connector._` and call the `saveToGeode` 
 method on RDD.
 
 ### Save RDD[(K, V)] to Geode

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/doc/7_save_dstream.md
----------------------------------------------------------------------
diff --git a/geode-spark-connector/doc/7_save_dstream.md 
b/geode-spark-connector/doc/7_save_dstream.md
index a0019c6..6cd2c66 100644
--- a/geode-spark-connector/doc/7_save_dstream.md
+++ b/geode-spark-connector/doc/7_save_dstream.md
@@ -51,7 +51,7 @@ Now let's save the running word count to Geode region 
`str_int_region`, which
 simply replace print() with saveToGeode():
 
 ```
-import io.pivotal.geode.spark.connector.streaming._
+import org.apache.geode.spark.connector.streaming._
 runningCounts.saveToGeode("str_int_region")
 ```
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/doc/8_oql.md
----------------------------------------------------------------------
diff --git a/geode-spark-connector/doc/8_oql.md 
b/geode-spark-connector/doc/8_oql.md
index ef32ef6..88456e5 100644
--- a/geode-spark-connector/doc/8_oql.md
+++ b/geode-spark-connector/doc/8_oql.md
@@ -35,7 +35,7 @@ val conf = new SparkConf()
   .setMaster("local[*]")
   .set(GeodeLocatorPropKey, "localhost[55221]")
   .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
-  .set("spark.kryo.registrator", 
"io.pivotal.geode.spark.connector.GeodeKryoRegistrator")
+  .set("spark.kryo.registrator", 
"org.apache.geode.spark.connector.GeodeKryoRegistrator")
 ```
 
 and register the classes (optional)
@@ -46,7 +46,7 @@ conf.registerKryoClasses(Array(classOf[MyClass1], 
classOf[MyClass2]))
 Use the following options to start Spark shell:
 ```
  --conf spark.serializer=org.apache.spark.serializer.KryoSerializer
- --conf 
spark.kryo.registrator=io.pivotal.geode.spark.connector.GeodeKryoRegistrator
+ --conf 
spark.kryo.registrator=org.apache.geode.spark.connector.GeodeKryoRegistrator
 ```
 
 ## References

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/doc/9_java_api.md
----------------------------------------------------------------------
diff --git a/geode-spark-connector/doc/9_java_api.md 
b/geode-spark-connector/doc/9_java_api.md
index 6fbc636..21d0605 100644
--- a/geode-spark-connector/doc/9_java_api.md
+++ b/geode-spark-connector/doc/9_java_api.md
@@ -9,7 +9,7 @@ The best way to use the Spark Geode Connector Java API is to 
statically
 import all of the methods in `GeodeJavaUtil`. This utility class is
 the main entry point for Spark Geode Connector Java API.
 ```
-import static io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil.*;
+import static org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.*;
 ```
 
 Create JavaSparkContext (don't forget about the static import):

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/RegionMetadata.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/RegionMetadata.java
 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/RegionMetadata.java
index 4fee0e0..d9d49e6 100644
--- 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/RegionMetadata.java
+++ 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/RegionMetadata.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal;
+package org.apache.geode.spark.connector.internal;
 
 import org.apache.geode.distributed.internal.ServerLocation;
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/QueryFunction.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/QueryFunction.java
 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/QueryFunction.java
index 6e6e295..a6a0910 100644
--- 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/QueryFunction.java
+++ 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/QueryFunction.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.geodefunctions;
+package org.apache.geode.spark.connector.internal.geodefunctions;
 
 import org.apache.geode.DataSerializer;
 import org.apache.geode.cache.CacheFactory;

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java
 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java
index d3a2572..096e4d5 100644
--- 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java
+++ 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionFunction.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.geodefunctions;
+package org.apache.geode.spark.connector.internal.geodefunctions;
 
 import java.util.Iterator;
 import org.apache.logging.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionMetadataFunction.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionMetadataFunction.java
 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionMetadataFunction.java
index 6041b70..646bc3e 100644
--- 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionMetadataFunction.java
+++ 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/RetrieveRegionMetadataFunction.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.geodefunctions;
+package org.apache.geode.spark.connector.internal.geodefunctions;
 
 import org.apache.geode.cache.execute.Function;
 import org.apache.geode.cache.execute.FunctionContext;
@@ -24,7 +24,7 @@ import org.apache.geode.internal.cache.BucketServerLocation66;
 import org.apache.geode.internal.cache.LocalRegion;
 import org.apache.geode.internal.cache.PartitionedRegion;
 import org.apache.geode.internal.cache.execute.InternalRegionFunctionContext;
-import io.pivotal.geode.spark.connector.internal.RegionMetadata;
+import org.apache.geode.spark.connector.internal.RegionMetadata;
 
 import java.util.HashMap;
 import java.util.HashSet;

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultSender.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultSender.java
 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultSender.java
index 9a7dc9d..cd086e0 100644
--- 
a/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultSender.java
+++ 
b/geode-spark-connector/geode-functions/src/main/java/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultSender.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.geodefunctions;
+package org.apache.geode.spark.connector.internal.geodefunctions;
 
 import org.apache.geode.DataSerializer;
 import org.apache.geode.cache.execute.ResultSender;

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Employee.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Employee.java
 
b/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Employee.java
index 9fba9e1..180e632 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Employee.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Employee.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector;
+package ittest.org.apache.geode.spark.connector;
 
 import java.io.Serializable;
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/JavaApiIntegrationTest.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/JavaApiIntegrationTest.java
 
b/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/JavaApiIntegrationTest.java
index f1577f3..281236f 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/JavaApiIntegrationTest.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/JavaApiIntegrationTest.java
@@ -14,17 +14,17 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector;
+package ittest.org.apache.geode.spark.connector;
 
 import org.apache.geode.cache.Region;
 import org.apache.geode.distributed.ConfigurationProperties;
-import io.pivotal.geode.spark.connector.GeodeConnection;
-import io.pivotal.geode.spark.connector.GeodeConnectionConf;
-import io.pivotal.geode.spark.connector.GeodeConnectionConf$;
-import 
io.pivotal.geode.spark.connector.internal.DefaultGeodeConnectionManager$;
-import io.pivotal.geode.spark.connector.javaapi.GeodeJavaRegionRDD;
-import ittest.io.pivotal.geode.spark.connector.testkit.GeodeCluster$;
-import ittest.io.pivotal.geode.spark.connector.testkit.IOUtils;
+import org.apache.geode.spark.connector.GeodeConnection;
+import org.apache.geode.spark.connector.GeodeConnectionConf;
+import org.apache.geode.spark.connector.GeodeConnectionConf$;
+import 
org.apache.geode.spark.connector.internal.DefaultGeodeConnectionManager$;
+import org.apache.geode.spark.connector.javaapi.GeodeJavaRegionRDD;
+import ittest.org.apache.geode.spark.connector.testkit.GeodeCluster$;
+import ittest.org.apache.geode.spark.connector.testkit.IOUtils;
 import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaRDD;
@@ -35,14 +35,14 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.scalatest.junit.JUnitSuite;
-import io.pivotal.geode.spark.connector.package$;
+import org.apache.geode.spark.connector.package$;
 import scala.Tuple2;
 import scala.Option;
 import scala.Some;
 import java.util.*;
 
-import static 
io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil.RDDSaveBatchSizePropKey;
-import static 
io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil.javaFunctions;
+import static 
org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.RDDSaveBatchSizePropKey;
+import static 
org.apache.geode.spark.connector.javaapi.GeodeJavaUtil.javaFunctions;
 import static org.junit.Assert.*;
 
 public class JavaApiIntegrationTest extends JUnitSuite {
@@ -65,7 +65,7 @@ public class JavaApiIntegrationTest extends JUnitSuite {
     // start spark context in local mode
     Properties props = new Properties();
     props.put("log4j.logger.org.apache.spark", "INFO");
-    props.put("log4j.logger.io.pivotal.geode.spark.connector","DEBUG");
+    props.put("log4j.logger.org.apache.geode.spark.connector","DEBUG");
     IOUtils.configTestLog4j("ERROR", props);
     SparkConf conf = new SparkConf()
             .setAppName("RetrieveRegionIntegrationTest")

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Portfolio.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Portfolio.java
 
b/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Portfolio.java
index 63477eb..dccebc9 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Portfolio.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Portfolio.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector;
+package ittest.org.apache.geode.spark.connector;
 
 import java.io.Serializable;
 import java.util.LinkedHashMap;

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Position.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Position.java
 
b/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Position.java
index 7c99ef7..0f24cdb 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Position.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/java/ittest/org/apache/geode/spark/connector/Position.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector;
+package ittest.org.apache.geode.spark.connector;
 
 import java.io.Serializable;
 import java.util.Properties;

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/BasicIntegrationTest.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/BasicIntegrationTest.scala
 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/BasicIntegrationTest.scala
index cb1b329..c057e1d 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/BasicIntegrationTest.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/BasicIntegrationTest.scala
@@ -14,17 +14,17 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector
+package ittest.org.apache.geode.spark.connector
 
 import java.util.Properties
 import org.apache.geode.cache.query.QueryService
 import org.apache.geode.cache.query.internal.StructImpl
-import io.pivotal.geode.spark.connector._
+import org.apache.geode.spark.connector._
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector.internal.{RegionMetadata, 
DefaultGeodeConnectionManager}
-import io.pivotal.geode.spark.connector.internal.oql.{RDDConverter, QueryRDD}
-import ittest.io.pivotal.geode.spark.connector.testkit.GeodeCluster
-import ittest.io.pivotal.geode.spark.connector.testkit.IOUtils
+import org.apache.geode.spark.connector.internal.{RegionMetadata, 
DefaultGeodeConnectionManager}
+import org.apache.geode.spark.connector.internal.oql.{RDDConverter, QueryRDD}
+import ittest.org.apache.geode.spark.connector.testkit.GeodeCluster
+import ittest.org.apache.geode.spark.connector.testkit.IOUtils
 import org.apache.spark.streaming.{Seconds, StreamingContext, TestInputDStream}
 import org.apache.spark.{SparkContext, SparkConf}
 import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}
@@ -46,14 +46,14 @@ class BasicIntegrationTest extends FunSuite with Matchers 
with BeforeAndAfterAll
 
     // start spark context in local mode
     IOUtils.configTestLog4j("ERROR", "log4j.logger.org.apache.spark" -> "INFO",
-                            "log4j.logger.io.pivotal.geode.spark.connector" -> 
"DEBUG")
+                            "log4j.logger.org.apache.geode.spark.connector" -> 
"DEBUG")
     val conf = new SparkConf()
       .setAppName("BasicIntegrationTest")
       .setMaster("local[2]")
       .set("spark.streaming.clock", 
"org.apache.spark.streaming.util.ManualClock")
       .set(GeodeLocatorPropKey, s"localhost[$locatorPort]")
       .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
-      .set("spark.kryo.registrator", 
"io.pivotal.geode.spark.connector.GeodeKryoRegistrator")
+      .set("spark.kryo.registrator", 
"org.apache.geode.spark.connector.GeodeKryoRegistrator")
 
     sc = new SparkContext(conf)
   }
@@ -561,7 +561,7 @@ class BasicIntegrationTest extends FunSuite with Matchers 
with BeforeAndAfterAll
 
   test("Basic DStream test") {
     import 
org.apache.spark.streaming.scheduler.{StreamingListenerBatchCompleted, 
StreamingListener}
-    import io.pivotal.geode.spark.connector.streaming._
+    import org.apache.geode.spark.connector.streaming._
     import org.apache.spark.streaming.ManualClockHelper
 
     class TestStreamListener extends StreamingListener {

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RDDJoinRegionIntegrationTest.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RDDJoinRegionIntegrationTest.scala
 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RDDJoinRegionIntegrationTest.scala
index 04d4198..1688345 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RDDJoinRegionIntegrationTest.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RDDJoinRegionIntegrationTest.scala
@@ -14,15 +14,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector
+package ittest.org.apache.geode.spark.connector
 
 import java.util.Properties
 
-import io.pivotal.geode.spark.connector._
+import org.apache.geode.spark.connector._
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector.internal.DefaultGeodeConnectionManager
-import ittest.io.pivotal.geode.spark.connector.testkit.GeodeCluster
-import ittest.io.pivotal.geode.spark.connector.testkit.IOUtils
+import org.apache.geode.spark.connector.internal.DefaultGeodeConnectionManager
+import ittest.org.apache.geode.spark.connector.testkit.GeodeCluster
+import ittest.org.apache.geode.spark.connector.testkit.IOUtils
 import org.apache.spark.{SparkContext, SparkConf}
 import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}
 import java.util.{HashMap => JHashMap}
@@ -42,7 +42,7 @@ class RDDJoinRegionIntegrationTest extends FunSuite with 
Matchers with BeforeAnd
 
     // start spark context in local mode
     IOUtils.configTestLog4j("ERROR", "log4j.logger.org.apache.spark" -> "INFO",
-      "log4j.logger.io.pivotal.geode.spark.connector" -> "DEBUG")
+      "log4j.logger.org.apache.geode.spark.connector" -> "DEBUG")
     val conf = new SparkConf()
       .setAppName("RDDJoinRegionIntegrationTest")
       .setMaster("local[2]")

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RetrieveRegionIntegrationTest.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RetrieveRegionIntegrationTest.scala
 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RetrieveRegionIntegrationTest.scala
index 93e7cbf..7c441a3 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RetrieveRegionIntegrationTest.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/RetrieveRegionIntegrationTest.scala
@@ -14,15 +14,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector
+package ittest.org.apache.geode.spark.connector
 
 import java.util.Properties
 
-import io.pivotal.geode.spark.connector._
+import org.apache.geode.spark.connector._
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector.internal.DefaultGeodeConnectionManager
-import ittest.io.pivotal.geode.spark.connector.testkit.GeodeCluster
-import ittest.io.pivotal.geode.spark.connector.testkit.IOUtils
+import org.apache.geode.spark.connector.internal.DefaultGeodeConnectionManager
+import ittest.org.apache.geode.spark.connector.testkit.GeodeCluster
+import ittest.org.apache.geode.spark.connector.testkit.IOUtils
 import org.apache.spark.{SparkContext, SparkConf}
 import org.scalatest.{Tag, BeforeAndAfterAll, FunSuite, Matchers}
 import java.util.{HashMap => JHashMap}
@@ -43,7 +43,7 @@ class RetrieveRegionIntegrationTest extends FunSuite with 
Matchers with BeforeAn
 
     // start spark context in local mode
     IOUtils.configTestLog4j("ERROR", "log4j.logger.org.apache.spark" -> "INFO",
-                            "log4j.logger.io.pivotal.geode.spark.connector" -> 
"DEBUG")
+                            "log4j.logger.org.apache.geode.spark.connector" -> 
"DEBUG")
     val conf = new SparkConf()
       .setAppName("RetrieveRegionIntegrationTest")
       .setMaster("local[2]")

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/package.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/package.scala
 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/package.scala
index b8571d8..fb379b4 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/package.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/package.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark
+package ittest.org.apache.geode.spark
 
 import org.scalatest.Tag
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeCluster.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeCluster.scala
 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeCluster.scala
index 18b2fd7..dd31bfe 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeCluster.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeCluster.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector.testkit
+package ittest.org.apache.geode.spark.connector.testkit
 
 import java.util.Properties
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeRunner.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeRunner.scala
 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeRunner.scala
index 725a012..f2f5d06 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeRunner.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/GeodeRunner.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector.testkit
+package ittest.org.apache.geode.spark.connector.testkit
 
 import java.io.{IOException, File}
 import java.net.InetAddress
@@ -112,7 +112,7 @@ class GeodeRunner(settings: Properties) {
   
   private def registerFunction(jmxHttpPort:Int, jar:File) {
     println("Deploying:" + jar.getName)
-    import io.pivotal.geode.spark.connector.GeodeFunctionDeployer
+    import org.apache.geode.spark.connector.GeodeFunctionDeployer
     val deployer = new GeodeFunctionDeployer(new HttpClient())
     deployer.deploy("localhost", jmxHttpPort, jar)
   }

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/IOUtils.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/IOUtils.scala
 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/IOUtils.scala
index 6d667e9..2ac4257 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/IOUtils.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/it/scala/ittest/org/apache/geode/spark/connector/testkit/IOUtils.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ittest.io.pivotal.geode.spark.connector.testkit
+package ittest.org.apache.geode.spark.connector.testkit
 
 import java.io.{File, IOException}
 import java.net.{InetAddress, Socket}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaDStreamFunctions.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaDStreamFunctions.java
 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaDStreamFunctions.java
index e7c7cf9..fcf91c2 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaDStreamFunctions.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaDStreamFunctions.java
@@ -14,22 +14,22 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.javaapi;
+package org.apache.geode.spark.connector.javaapi;
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf;
-import io.pivotal.geode.spark.connector.streaming.GeodeDStreamFunctions;
+import org.apache.geode.spark.connector.GeodeConnectionConf;
+import org.apache.geode.spark.connector.streaming.GeodeDStreamFunctions;
 import org.apache.spark.api.java.function.PairFunction;
 import org.apache.spark.streaming.api.java.JavaDStream;
 import java.util.Properties;
 
-import static io.pivotal.geode.spark.connector.javaapi.JavaAPIHelper.*;
+import static org.apache.geode.spark.connector.javaapi.JavaAPIHelper.*;
 
 /**
  * A Java API wrapper over {@link 
org.apache.spark.streaming.api.java.JavaDStream}
  * to provide Geode Spark Connector functionality.
  *
  * <p>To obtain an instance of this wrapper, use one of the factory methods in 
{@link
- * io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
+ * org.apache.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
  */ 
 public class GeodeJavaDStreamFunctions<T> {
   

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairDStreamFunctions.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairDStreamFunctions.java
 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairDStreamFunctions.java
index 2c83255..479f3e8 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairDStreamFunctions.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairDStreamFunctions.java
@@ -14,21 +14,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.javaapi;
+package org.apache.geode.spark.connector.javaapi;
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf;
-import io.pivotal.geode.spark.connector.streaming.GeodePairDStreamFunctions;
+import org.apache.geode.spark.connector.GeodeConnectionConf;
+import org.apache.geode.spark.connector.streaming.GeodePairDStreamFunctions;
 import org.apache.spark.streaming.api.java.JavaPairDStream;
 import java.util.Properties;
 
-import static io.pivotal.geode.spark.connector.javaapi.JavaAPIHelper.*;
+import static org.apache.geode.spark.connector.javaapi.JavaAPIHelper.*;
 
 /**
  * A Java API wrapper over {@link 
org.apache.spark.streaming.api.java.JavaPairDStream}
  * to provide Geode Spark Connector functionality.
  *
  * <p>To obtain an instance of this wrapper, use one of the factory methods in 
{@link
- * io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
+ * org.apache.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
  */
 public class GeodeJavaPairDStreamFunctions<K, V> {
   

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairRDDFunctions.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairRDDFunctions.java
 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairRDDFunctions.java
index 3278a5b..52d6eec 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairRDDFunctions.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaPairRDDFunctions.java
@@ -14,12 +14,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.javaapi;
+package org.apache.geode.spark.connector.javaapi;
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf;
-import io.pivotal.geode.spark.connector.GeodePairRDDFunctions;
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeJoinRDD;
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeOuterJoinRDD;
+import org.apache.geode.spark.connector.GeodeConnectionConf;
+import org.apache.geode.spark.connector.GeodePairRDDFunctions;
+import org.apache.geode.spark.connector.internal.rdd.GeodeJoinRDD;
+import org.apache.geode.spark.connector.internal.rdd.GeodeOuterJoinRDD;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.function.Function;
 import scala.Option;
@@ -28,14 +28,14 @@ import scala.reflect.ClassTag;
 
 import java.util.Properties;
 
-import static io.pivotal.geode.spark.connector.javaapi.JavaAPIHelper.*;
+import static org.apache.geode.spark.connector.javaapi.JavaAPIHelper.*;
 
 /**
  * A Java API wrapper over {@link org.apache.spark.api.java.JavaPairRDD} to 
provide Geode Spark
  * Connector functionality.
  *
  * <p>To obtain an instance of this wrapper, use one of the factory methods in 
{@link
- * io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
+ * org.apache.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
  */
 public class GeodeJavaPairRDDFunctions<K, V> {
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaRDDFunctions.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaRDDFunctions.java
 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaRDDFunctions.java
index e4f6f36..be60ede 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaRDDFunctions.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaRDDFunctions.java
@@ -14,12 +14,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.javaapi;
+package org.apache.geode.spark.connector.javaapi;
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf;
-import io.pivotal.geode.spark.connector.GeodeRDDFunctions;
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeJoinRDD;
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeOuterJoinRDD;
+import org.apache.geode.spark.connector.GeodeConnectionConf;
+import org.apache.geode.spark.connector.GeodeRDDFunctions;
+import org.apache.geode.spark.connector.internal.rdd.GeodeJoinRDD;
+import org.apache.geode.spark.connector.internal.rdd.GeodeOuterJoinRDD;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.function.Function;
@@ -29,14 +29,14 @@ import scala.reflect.ClassTag;
 
 import java.util.Properties;
 
-import static io.pivotal.geode.spark.connector.javaapi.JavaAPIHelper.*;
+import static org.apache.geode.spark.connector.javaapi.JavaAPIHelper.*;
 
 /**
  * A Java API wrapper over {@link org.apache.spark.api.java.JavaRDD} to 
provide Geode Spark
  * Connector functionality.
  *
  * <p>To obtain an instance of this wrapper, use one of the factory methods in 
{@link
- * io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
+ * org.apache.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
  */
 public class GeodeJavaRDDFunctions<T> {
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSQLContextFunctions.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSQLContextFunctions.java
 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSQLContextFunctions.java
index 3471bf90..5e1e354 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSQLContextFunctions.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSQLContextFunctions.java
@@ -14,10 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.javaapi;
+package org.apache.geode.spark.connector.javaapi;
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf;
-import io.pivotal.geode.spark.connector.GeodeSQLContextFunctions;
+import org.apache.geode.spark.connector.GeodeConnectionConf;
+import org.apache.geode.spark.connector.GeodeSQLContextFunctions;
 import org.apache.spark.sql.DataFrame;
 import org.apache.spark.sql.SQLContext;
 
@@ -26,7 +26,7 @@ import org.apache.spark.sql.SQLContext;
  * OQL functionality.
  *
  * <p></p>To obtain an instance of this wrapper, use one of the factory 
methods in {@link
- * io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
+ * org.apache.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
  */
 public class GeodeJavaSQLContextFunctions {
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSparkContextFunctions.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSparkContextFunctions.java
 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSparkContextFunctions.java
index ce6b1ff..a257617 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSparkContextFunctions.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaSparkContextFunctions.java
@@ -14,14 +14,14 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.javaapi;
+package org.apache.geode.spark.connector.javaapi;
 
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf;
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeRegionRDD;
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeRegionRDD$;
+import org.apache.geode.spark.connector.GeodeConnectionConf;
+import org.apache.geode.spark.connector.internal.rdd.GeodeRegionRDD;
+import org.apache.geode.spark.connector.internal.rdd.GeodeRegionRDD$;
 import org.apache.spark.SparkContext;
-import static io.pivotal.geode.spark.connector.javaapi.JavaAPIHelper.*;
+import static org.apache.geode.spark.connector.javaapi.JavaAPIHelper.*;
 
 import scala.reflect.ClassTag;
 import java.util.Properties;
@@ -31,7 +31,7 @@ import java.util.Properties;
  * Connector functionality.
  *
  * <p></p>To obtain an instance of this wrapper, use one of the factory 
methods in {@link
- * io.pivotal.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
+ * org.apache.geode.spark.connector.javaapi.GeodeJavaUtil} class.</p>
  */
 public class GeodeJavaSparkContextFunctions {
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaUtil.java
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaUtil.java
 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaUtil.java
index 41fe7e5..8f797ec 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaUtil.java
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/java/org/apache/geode/spark/connector/javaapi/GeodeJavaUtil.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.javaapi;
+package org.apache.geode.spark.connector.javaapi;
 
 import org.apache.spark.SparkContext;
 import org.apache.spark.api.java.JavaSparkContext;
@@ -25,7 +25,7 @@ import org.apache.spark.streaming.api.java.JavaDStream;
 import org.apache.spark.streaming.api.java.JavaPairDStream;
 import scala.Tuple2;
 
-import io.pivotal.geode.spark.connector.package$;
+import org.apache.geode.spark.connector.package$;
 
 /**
  * The main entry point to Spark Geode Connector Java API.

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnection.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnection.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnection.scala
index 6c1df67..4343b90 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnection.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnection.scala
@@ -14,13 +14,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
 import org.apache.geode.cache.execute.ResultCollector
 import org.apache.geode.cache.query.Query
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector.internal.RegionMetadata
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeRDDPartition
+import org.apache.geode.spark.connector.internal.RegionMetadata
+import org.apache.geode.spark.connector.internal.rdd.GeodeRDDPartition
 
 
 trait GeodeConnection {

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionConf.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionConf.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionConf.scala
index 38d9e07..cf0d7b6 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionConf.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionConf.scala
@@ -14,10 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
 import org.apache.spark.SparkConf
-import 
io.pivotal.geode.spark.connector.internal.{DefaultGeodeConnectionManager, 
LocatorHelper}
+import 
org.apache.geode.spark.connector.internal.{DefaultGeodeConnectionManager, 
LocatorHelper}
 
 /**
  * Stores configuration of a connection to Geode cluster. It is serializable 
and can

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionManager.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionManager.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionManager.scala
index bf678f0..b0dc3ee 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionManager.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeConnectionManager.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
 /**
  * GeodeConnectionFactory provide an common interface that manages Geode

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeFunctionDeployer.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeFunctionDeployer.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeFunctionDeployer.scala
index 6e93b05..0229306 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeFunctionDeployer.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeFunctionDeployer.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
 import java.io.File
 import java.net.URL

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeKryoRegistrator.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeKryoRegistrator.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeKryoRegistrator.scala
index 0bf7df5..e09e67c 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeKryoRegistrator.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeKryoRegistrator.scala
@@ -14,10 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
 import com.esotericsoftware.kryo.Kryo
-import io.pivotal.geode.spark.connector.internal.oql.UndefinedSerializer
+import org.apache.geode.spark.connector.internal.oql.UndefinedSerializer
 import org.apache.spark.serializer.KryoRegistrator
 import org.apache.geode.cache.query.internal.Undefined
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodePairRDDFunctions.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodePairRDDFunctions.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodePairRDDFunctions.scala
index ba5d2df..d0b6684 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodePairRDDFunctions.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodePairRDDFunctions.scala
@@ -14,16 +14,16 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
-import io.pivotal.geode.spark.connector.internal.rdd.{GeodeOuterJoinRDD, 
GeodeJoinRDD, GeodePairRDDWriter}
+import org.apache.geode.spark.connector.internal.rdd.{GeodeOuterJoinRDD, 
GeodeJoinRDD, GeodePairRDDWriter}
 import org.apache.spark.Logging
 import org.apache.spark.api.java.function.Function
 import org.apache.spark.rdd.RDD
 
 /**
  * Extra gemFire functions on RDDs of (key, value) pairs through an implicit 
conversion.
- * Import `io.pivotal.geode.spark.connector._` at the top of your program to
+ * Import `org.apache.geode.spark.connector._` at the top of your program to
  * use these functions.
  */
 class GeodePairRDDFunctions[K, V](val rdd: RDD[(K, V)]) extends Serializable 
with Logging {

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeRDDFunctions.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeRDDFunctions.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeRDDFunctions.scala
index 2e5c92a..5649fff 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeRDDFunctions.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeRDDFunctions.scala
@@ -14,16 +14,16 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
-import io.pivotal.geode.spark.connector.internal.rdd.{GeodeOuterJoinRDD, 
GeodeJoinRDD, GeodeRDDWriter}
+import org.apache.geode.spark.connector.internal.rdd.{GeodeOuterJoinRDD, 
GeodeJoinRDD, GeodeRDDWriter}
 import org.apache.spark.Logging
 import org.apache.spark.api.java.function.{PairFunction, Function}
 import org.apache.spark.rdd.RDD
 
 /**
  * Extra gemFire functions on non-Pair RDDs through an implicit conversion.
- * Import `io.pivotal.geode.spark.connector._` at the top of your program to 
+ * Import `org.apache.geode.spark.connector._` at the top of your program to 
  * use these functions.  
  */
 class GeodeRDDFunctions[T](val rdd: RDD[T]) extends Serializable with Logging {

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSQLContextFunctions.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSQLContextFunctions.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSQLContextFunctions.scala
index 83aab7a..433c066 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSQLContextFunctions.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSQLContextFunctions.scala
@@ -14,9 +14,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
-import io.pivotal.geode.spark.connector.internal.oql.{OQLRelation, QueryRDD}
+import org.apache.geode.spark.connector.internal.oql.{OQLRelation, QueryRDD}
 import org.apache.spark.Logging
 import org.apache.spark.sql.{DataFrame, SQLContext}
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSparkContextFunctions.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSparkContextFunctions.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSparkContextFunctions.scala
index 617cb33..40f69af 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSparkContextFunctions.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/GeodeSparkContextFunctions.scala
@@ -14,9 +14,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector
+package org.apache.geode.spark.connector
 
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeRegionRDD
+import org.apache.geode.spark.connector.internal.rdd.GeodeRegionRDD
 import org.apache.spark.SparkContext
 
 import scala.reflect.ClassTag

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala
index b232712..670a3f8 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnection.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal
+package org.apache.geode.spark.connector.internal
 
 import java.net.InetAddress
 
@@ -23,11 +23,11 @@ import org.apache.geode.cache.execute.{FunctionException, 
FunctionService}
 import org.apache.geode.cache.query.Query
 import org.apache.geode.cache.{Region, RegionService}
 import org.apache.geode.internal.cache.execute.InternalExecution
-import io.pivotal.geode.spark.connector.internal.oql.QueryResultCollector
-import io.pivotal.geode.spark.connector.internal.rdd.GeodeRDDPartition
+import org.apache.geode.spark.connector.internal.oql.QueryResultCollector
+import org.apache.geode.spark.connector.internal.rdd.GeodeRDDPartition
 import org.apache.spark.{SparkEnv, Logging}
-import io.pivotal.geode.spark.connector.GeodeConnection
-import io.pivotal.geode.spark.connector.internal.geodefunctions._
+import org.apache.geode.spark.connector.GeodeConnection
+import org.apache.geode.spark.connector.internal.geodefunctions._
 import java.util.{Set => JSet, List => JList }
 
 /**
@@ -58,7 +58,7 @@ private[connector] class DefaultGeodeConnection (
   }
   
   private def getClientCacheFactory: ClientCacheFactory = {
-    import io.pivotal.geode.spark.connector.map2Properties
+    import org.apache.geode.spark.connector.map2Properties
     val ccf = new ClientCacheFactory(gemFireProps)
     ccf.setPoolReadTimeout(30000)
     val servers = LocatorHelper.getAllGeodeServers(locators)

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManager.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManager.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManager.scala
index eb67cda..ed8a535 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManager.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/DefaultGeodeConnectionManager.scala
@@ -14,9 +14,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal
+package org.apache.geode.spark.connector.internal
 
-import io.pivotal.geode.spark.connector.{GeodeConnection, GeodeConnectionConf, 
GeodeConnectionManager}
+import org.apache.geode.spark.connector.{GeodeConnection, GeodeConnectionConf, 
GeodeConnectionManager}
 
 import scala.collection.mutable
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/LocatorHelper.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/LocatorHelper.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/LocatorHelper.scala
index 4baa936..b8b2b14 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/LocatorHelper.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/LocatorHelper.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal
+package org.apache.geode.spark.connector.internal
 
 import java.net.InetSocketAddress
 import java.util.{ArrayList => JArrayList}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultCollector.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultCollector.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultCollector.scala
index 5139be4..198887f 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultCollector.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/geodefunctions/StructStreamingResultCollector.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.geodefunctions
+package org.apache.geode.spark.connector.internal.geodefunctions
 
 import java.util.concurrent.{TimeUnit, LinkedBlockingQueue, BlockingQueue}
 import org.apache.geode.DataSerializer
@@ -23,7 +23,7 @@ import 
org.apache.geode.cache.query.internal.types.StructTypeImpl
 import org.apache.geode.cache.query.types.StructType
 import org.apache.geode.distributed.DistributedMember
 import org.apache.geode.internal.{Version, ByteArrayDataInput}
-import 
io.pivotal.geode.spark.connector.internal.geodefunctions.StructStreamingResultSender.
+import 
org.apache.geode.spark.connector.internal.geodefunctions.StructStreamingResultSender.
        {TYPE_CHUNK, DATA_CHUNK, ERROR_CHUNK, SER_DATA, UNSER_DATA, 
BYTEARR_DATA}
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryParser.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryParser.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryParser.scala
index 3f6dfad..7888d13 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryParser.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryParser.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.oql
+package org.apache.geode.spark.connector.internal.oql
 
 import scala.util.parsing.combinator.RegexParsers
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryRDD.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryRDD.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryRDD.scala
index 474aa6a..1539b9f 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryRDD.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryRDD.scala
@@ -14,10 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.oql
+package org.apache.geode.spark.connector.internal.oql
 
-import io.pivotal.geode.spark.connector.GeodeConnectionConf
-import io.pivotal.geode.spark.connector.internal.rdd.{GeodeRDDPartition, 
ServerSplitsPartitioner}
+import org.apache.geode.spark.connector.GeodeConnectionConf
+import org.apache.geode.spark.connector.internal.rdd.{GeodeRDDPartition, 
ServerSplitsPartitioner}
 import org.apache.spark.rdd.RDD
 import org.apache.spark.{TaskContext, SparkContext, Partition}
 import scala.reflect.ClassTag

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryResultCollector.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryResultCollector.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryResultCollector.scala
index 718d816..99adca8 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryResultCollector.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/QueryResultCollector.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.oql
+package org.apache.geode.spark.connector.internal.oql
 
 import java.util.concurrent.{TimeUnit, LinkedBlockingDeque}
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RDDConverter.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RDDConverter.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RDDConverter.scala
index 6a1611c..10799fe 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RDDConverter.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RDDConverter.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.oql
+package org.apache.geode.spark.connector.internal.oql
 
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.{DataFrame, SQLContext}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RowBuilder.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RowBuilder.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RowBuilder.scala
index acbabc1..73822ec 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RowBuilder.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/RowBuilder.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.oql
+package org.apache.geode.spark.connector.internal.oql
 
 import org.apache.geode.cache.query.internal.StructImpl
 import org.apache.spark.rdd.RDD

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/SchemaBuilder.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/SchemaBuilder.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/SchemaBuilder.scala
index 44972839..4216674 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/SchemaBuilder.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/SchemaBuilder.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.oql
+package org.apache.geode.spark.connector.internal.oql
 
 import org.apache.geode.cache.query.internal.StructImpl
 import org.apache.spark.sql.types._

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/UndefinedSerializer.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/UndefinedSerializer.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/UndefinedSerializer.scala
index 2809a73..b92e811 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/UndefinedSerializer.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/oql/UndefinedSerializer.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.oql
+package org.apache.geode.spark.connector.internal.oql
 
 import com.esotericsoftware.kryo.{Kryo, Serializer}
 import com.esotericsoftware.kryo.io.{Output, Input}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeJoinRDD.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeJoinRDD.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeJoinRDD.scala
index f971a3e..ea84c74 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeJoinRDD.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeJoinRDD.scala
@@ -14,10 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.rdd
+package org.apache.geode.spark.connector.internal.rdd
 
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector.GeodeConnectionConf
+import org.apache.geode.spark.connector.GeodeConnectionConf
 import org.apache.spark.{TaskContext, Partition}
 import org.apache.spark.rdd.RDD
 import scala.collection.JavaConversions._

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeOuterJoinRDD.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeOuterJoinRDD.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeOuterJoinRDD.scala
index 04855c1..e0632f4 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeOuterJoinRDD.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeOuterJoinRDD.scala
@@ -14,10 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.rdd
+package org.apache.geode.spark.connector.internal.rdd
 
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector.GeodeConnectionConf
+import org.apache.geode.spark.connector.GeodeConnectionConf
 import org.apache.spark.{TaskContext, Partition}
 import org.apache.spark.rdd.RDD
 import scala.collection.JavaConversions._

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartition.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartition.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartition.scala
index 24fe72e..041f036 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartition.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartition.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.rdd
+package org.apache.geode.spark.connector.internal.rdd
 
 import org.apache.spark.Partition
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitioner.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitioner.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitioner.scala
index d960cab..1933244 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitioner.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitioner.scala
@@ -14,10 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.rdd
+package org.apache.geode.spark.connector.internal.rdd
 
-import io.pivotal.geode.spark.connector.GeodeConnection
-import io.pivotal.geode.spark.connector.internal.RegionMetadata
+import org.apache.geode.spark.connector.GeodeConnection
+import org.apache.geode.spark.connector.internal.RegionMetadata
 import org.apache.spark.{Logging, Partition}
 
 import scala.reflect.ClassTag

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitionerImpl.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitionerImpl.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitionerImpl.scala
index 4606114..c50acf8 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitionerImpl.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDPartitionerImpl.scala
@@ -14,11 +14,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.rdd
+package org.apache.geode.spark.connector.internal.rdd
 
-import io.pivotal.geode.spark.connector.GeodeConnection
-import io.pivotal.geode.spark.connector.internal.RegionMetadata
-import io.pivotal.geode.spark.connector.NumberPartitionsPerServerPropKey
+import org.apache.geode.spark.connector.GeodeConnection
+import org.apache.geode.spark.connector.internal.RegionMetadata
+import org.apache.geode.spark.connector.NumberPartitionsPerServerPropKey
 import org.apache.spark.Partition
 import scala.collection.JavaConversions._
 import scala.collection.immutable.SortedSet

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c32fccba/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDWriter.scala
----------------------------------------------------------------------
diff --git 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDWriter.scala
 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDWriter.scala
index 27559f3..2626d1a 100644
--- 
a/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDWriter.scala
+++ 
b/geode-spark-connector/geode-spark-connector/src/main/scala/org/apache/geode/spark/connector/internal/rdd/GeodeRDDWriter.scala
@@ -14,10 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package io.pivotal.geode.spark.connector.internal.rdd
+package org.apache.geode.spark.connector.internal.rdd
 
 import org.apache.geode.cache.Region
-import io.pivotal.geode.spark.connector._
+import org.apache.geode.spark.connector._
 import org.apache.spark.{Logging, TaskContext}
 
 import scala.collection.Iterator


Reply via email to