Author: xuefu
Date: Tue Jun  9 18:34:32 2015
New Revision: 1684498

URL: http://svn.apache.org/r1684498
Log:
PIG-4586: Cleanup: Rename POConverter to RDDConverter (Mohit via Xuefu)

Added:
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RDDConverter.java
Removed:
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/POConverter.java
Modified:
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CollectedGroupConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CounterConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/DistinctConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FRJoinConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FilterConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/ForEachConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/GlobalRearrangeConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LimitConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LoadConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LocalRearrangeConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/MergeJoinConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/PackageConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RankConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SkewedJoinConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SortConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SplitConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StoreConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StreamConverter.java
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/UnionConverter.java

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java
 Tue Jun  9 18:34:32 2015
@@ -71,26 +71,7 @@ import org.apache.pig.backend.hadoop.exe
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POUnion;
 import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.util.PlanHelper;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.CollectedGroupConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.CounterConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.DistinctConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.FRJoinConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.FilterConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.ForEachConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.GlobalRearrangeConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.LimitConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.LoadConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.LocalRearrangeConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.MergeJoinConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.POConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.PackageConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.RankConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.SkewedJoinConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.SortConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.SplitConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.StoreConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.StreamConverter;
-import 
org.apache.pig.backend.hadoop.executionengine.spark.converter.UnionConverter;
+import org.apache.pig.backend.hadoop.executionengine.spark.converter.*;
 import 
org.apache.pig.backend.hadoop.executionengine.spark.operator.POGlobalRearrangeSpark;
 import 
org.apache.pig.backend.hadoop.executionengine.spark.optimizer.AccumulatorOptimizer;
 import 
org.apache.pig.backend.hadoop.executionengine.spark.optimizer.SecondaryKeyOptimizerSpark;
@@ -179,7 +160,8 @@ public class SparkLauncher extends Launc
                byte[] confBytes = KryoSerializer.serializeJobConf(jobConf);
 
                // Create conversion map, mapping between pig operator and 
spark convertor
-               Map<Class<? extends PhysicalOperator>, POConverter> convertMap 
= new HashMap<Class<? extends PhysicalOperator>, POConverter>();
+               Map<Class<? extends PhysicalOperator>, RDDConverter> convertMap
+                               = new HashMap<Class<? extends 
PhysicalOperator>, RDDConverter>();
                convertMap.put(POLoad.class, new LoadConverter(pigContext,
                                physicalPlan, sparkContext.sc()));
                convertMap.put(POStore.class, new StoreConverter(pigContext));
@@ -446,7 +428,7 @@ public class SparkLauncher extends Launc
        }
 
        private void sparkPlanToRDD(SparkOperPlan sparkPlan,
-                       Map<Class<? extends PhysicalOperator>, POConverter> 
convertMap,
+                       Map<Class<? extends PhysicalOperator>, RDDConverter> 
convertMap,
                        SparkPigStats sparkStats, JobConf jobConf)
                        throws IOException, InterruptedException {
                Set<Integer> seenJobIDs = new HashSet<Integer>();
@@ -488,7 +470,7 @@ public class SparkLauncher extends Launc
                        SparkOperator sparkOperator,
                        Map<OperatorKey, RDD<Tuple>> sparkOpRdds,
                        Map<OperatorKey, RDD<Tuple>> physicalOpRdds,
-                       Map<Class<? extends PhysicalOperator>, POConverter> 
convertMap,
+                       Map<Class<? extends PhysicalOperator>, RDDConverter> 
convertMap,
                        Set<Integer> seenJobIDs, SparkPigStats sparkStats, 
JobConf conf)
                        throws IOException, InterruptedException {
         addUDFJarsToSparkJobWorkingDirectory(sparkOperator);
@@ -561,7 +543,7 @@ public class SparkLauncher extends Launc
                        PhysicalOperator physicalOperator,
                        Map<OperatorKey, RDD<Tuple>> rdds,
                        List<RDD<Tuple>> rddsFromPredeSparkOper,
-                       Map<Class<? extends PhysicalOperator>, POConverter> 
convertMap)
+                       Map<Class<? extends PhysicalOperator>, RDDConverter> 
convertMap)
                        throws IOException {
         RDD<Tuple> nextRDD = null;
         List<PhysicalOperator> predecessors = plan
@@ -585,7 +567,7 @@ public class SparkLauncher extends Launc
                        }
                }
 
-               POConverter converter = 
convertMap.get(physicalOperator.getClass());
+               RDDConverter converter = 
convertMap.get(physicalOperator.getClass());
                if (converter == null) {
                        throw new IllegalArgumentException(
                                        "Pig on Spark does not support Physical 
Operator: " + physicalOperator);

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CollectedGroupConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CollectedGroupConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CollectedGroupConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CollectedGroupConverter.java
 Tue Jun  9 18:34:32 2015
@@ -30,7 +30,7 @@ import org.apache.spark.api.java.functio
 import org.apache.spark.rdd.RDD;
 
 @SuppressWarnings({ "serial"})
-public class CollectedGroupConverter implements POConverter<Tuple, Tuple, 
POCollectedGroup> {
+public class CollectedGroupConverter implements RDDConverter<Tuple, Tuple, 
POCollectedGroup> {
 
        @Override
   public RDD<Tuple> convert(List<RDD<Tuple>> predecessors,
@@ -85,4 +85,4 @@ public class CollectedGroupConverter imp
                  };
                }
        }
-}
\ No newline at end of file
+}

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CounterConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CounterConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CounterConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/CounterConverter.java
 Tue Jun  9 18:34:32 2015
@@ -35,7 +35,7 @@ import org.apache.spark.api.java.JavaRDD
 import org.apache.spark.api.java.function.Function2;
 import org.apache.spark.rdd.RDD;
 
-public class CounterConverter implements POConverter<Tuple, Tuple, POCounter> {
+public class CounterConverter implements RDDConverter<Tuple, Tuple, POCounter> 
{
 
        private static final Log LOG = 
LogFactory.getLog(CounterConverter.class);
        

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/DistinctConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/DistinctConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/DistinctConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/DistinctConverter.java
 Tue Jun  9 18:34:32 2015
@@ -34,7 +34,7 @@ import scala.runtime.AbstractFunction1;
 import scala.runtime.AbstractFunction2;
 
 @SuppressWarnings({ "serial" })
-public class DistinctConverter implements POConverter<Tuple, Tuple, 
PODistinct> {
+public class DistinctConverter implements RDDConverter<Tuple, Tuple, 
PODistinct> {
     private static final Log LOG = LogFactory.getLog(DistinctConverter.class);
 
     @Override

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FRJoinConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FRJoinConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FRJoinConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FRJoinConverter.java
 Tue Jun  9 18:34:32 2015
@@ -44,7 +44,7 @@ import scala.runtime.AbstractFunction1;
 
 @SuppressWarnings("serial")
 public class FRJoinConverter implements
-        POConverter<Tuple, Tuple, POFRJoin> {
+        RDDConverter<Tuple, Tuple, POFRJoin> {
     private static final Log LOG = LogFactory.getLog(FRJoinConverter.class);
 
     @Override

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FilterConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FilterConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FilterConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/FilterConverter.java
 Tue Jun  9 18:34:32 2015
@@ -34,7 +34,7 @@ import org.apache.spark.rdd.RDD;
  * Converter that converts an RDD to a filtered RRD using POFilter
  */
 @SuppressWarnings({ "serial" })
-public class FilterConverter implements POConverter<Tuple, Tuple, POFilter> {
+public class FilterConverter implements RDDConverter<Tuple, Tuple, POFilter> {
 
     @Override
     public RDD<Tuple> convert(List<RDD<Tuple>> predecessors,

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/ForEachConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/ForEachConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/ForEachConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/ForEachConverter.java
 Tue Jun  9 18:34:32 2015
@@ -43,7 +43,7 @@ import org.apache.spark.rdd.RDD;
  * Convert that is able to convert an RRD to another RRD using a POForEach
  */
 @SuppressWarnings({"serial" })
-public class ForEachConverter implements POConverter<Tuple, Tuple, POForEach> {
+public class ForEachConverter implements RDDConverter<Tuple, Tuple, POForEach> 
{
 
     private byte[] confBytes;
 

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/GlobalRearrangeConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/GlobalRearrangeConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/GlobalRearrangeConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/GlobalRearrangeConverter.java
 Tue Jun  9 18:34:32 2015
@@ -48,7 +48,7 @@ import scala.runtime.AbstractFunction1;
 
 @SuppressWarnings({ "serial" })
 public class GlobalRearrangeConverter implements
-        POConverter<Tuple, Tuple, POGlobalRearrangeSpark> {
+        RDDConverter<Tuple, Tuple, POGlobalRearrangeSpark> {
     private static final Log LOG = LogFactory
             .getLog(GlobalRearrangeConverter.class);
 

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LimitConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LimitConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LimitConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LimitConverter.java
 Tue Jun  9 18:34:32 2015
@@ -30,7 +30,7 @@ import org.apache.spark.api.java.functio
 import org.apache.spark.rdd.RDD;
 
 @SuppressWarnings({ "serial" })
-public class LimitConverter implements POConverter<Tuple, Tuple, POLimit> {
+public class LimitConverter implements RDDConverter<Tuple, Tuple, POLimit> {
 
     @Override
     public RDD<Tuple> convert(List<RDD<Tuple>> predecessors, POLimit poLimit)
@@ -77,4 +77,4 @@ public class LimitConverter implements P
             };
         }
     }
-}
\ No newline at end of file
+}

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LoadConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LoadConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LoadConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LoadConverter.java
 Tue Jun  9 18:34:32 2015
@@ -55,7 +55,7 @@ import org.apache.commons.logging.LogFac
  *
  */
 @SuppressWarnings({ "serial" })
-public class LoadConverter implements POConverter<Tuple, Tuple, POLoad> {
+public class LoadConverter implements RDDConverter<Tuple, Tuple, POLoad> {
 
     private static final ToTupleFunction TO_TUPLE_FUNCTION = new 
ToTupleFunction();
     private static Log log = LogFactory.getLog(LoadConverter.class);

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LocalRearrangeConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LocalRearrangeConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LocalRearrangeConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/LocalRearrangeConverter.java
 Tue Jun  9 18:34:32 2015
@@ -35,7 +35,7 @@ import org.apache.spark.rdd.RDD;
 
 @SuppressWarnings({ "serial" })
 public class LocalRearrangeConverter implements
-        POConverter<Tuple, Tuple, POLocalRearrange> {
+        RDDConverter<Tuple, Tuple, POLocalRearrange> {
     private static final Log LOG = LogFactory
             .getLog(GlobalRearrangeConverter.class);
 

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/MergeJoinConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/MergeJoinConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/MergeJoinConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/MergeJoinConverter.java
 Tue Jun  9 18:34:32 2015
@@ -41,7 +41,7 @@ import scala.runtime.AbstractFunction1;
 
 @SuppressWarnings("serial")
 public class MergeJoinConverter implements
-        POConverter<Tuple, Tuple, POMergeJoin> {
+        RDDConverter<Tuple, Tuple, POMergeJoin> {
 
     @Override
     public RDD<Tuple> convert(List<RDD<Tuple>> predecessors,
@@ -169,4 +169,4 @@ public class MergeJoinConverter implemen
             return new Tuple2TransformIterable(input);
         }
     }
-}
\ No newline at end of file
+}

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/PackageConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/PackageConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/PackageConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/PackageConverter.java
 Tue Jun  9 18:34:32 2015
@@ -40,7 +40,7 @@ import org.apache.spark.rdd.RDD;
 import scala.runtime.AbstractFunction1;
 
 @SuppressWarnings({ "serial" })
-public class PackageConverter implements POConverter<Tuple, Tuple, POPackage> {
+public class PackageConverter implements RDDConverter<Tuple, Tuple, POPackage> 
{
     private static final Log LOG = LogFactory.getLog(PackageConverter.class);
 
     private transient JobConf jobConf;

Added: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RDDConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RDDConverter.java?rev=1684498&view=auto
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RDDConverter.java
 (added)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RDDConverter.java
 Tue Jun  9 18:34:32 2015
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.pig.backend.hadoop.executionengine.spark.converter;
+
+import java.io.IOException;
+import java.util.List;
+
+import 
org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
+
+import org.apache.spark.rdd.RDD;
+
+/**
+ * Given an RDD and a PhysicalOperater, and implementation of this class can
+ * convert the RDD to another RDD.
+ */
+public interface RDDConverter<IN, OUT, T extends PhysicalOperator> {
+    RDD<OUT> convert(List<RDD<IN>> rdd, T physicalOperator) throws IOException;
+}

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RankConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RankConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RankConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/RankConverter.java
 Tue Jun  9 18:34:32 2015
@@ -38,7 +38,7 @@ import org.apache.spark.rdd.RDD;
 
 import scala.Tuple2;
 
-public class RankConverter implements POConverter<Tuple, Tuple, PORank> {
+public class RankConverter implements RDDConverter<Tuple, Tuple, PORank> {
 
        private static final Log LOG = LogFactory.getLog(RankConverter.class);
        

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SkewedJoinConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SkewedJoinConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SkewedJoinConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SkewedJoinConverter.java
 Tue Jun  9 18:34:32 2015
@@ -45,7 +45,7 @@ import scala.Tuple2;
 import scala.runtime.AbstractFunction1;
 
 public class SkewedJoinConverter implements
-        POConverter<Tuple, Tuple, POSkewedJoin>, Serializable {
+        RDDConverter<Tuple, Tuple, POSkewedJoin>, Serializable {
 
     private POLocalRearrange[] LRs;
     private POSkewedJoin poSkewedJoin;
@@ -212,4 +212,4 @@ public class SkewedJoinConverter impleme
             return new Tuple2TransformIterable(input);
         }
     }
-}
\ No newline at end of file
+}

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SortConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SortConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SortConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SortConverter.java
 Tue Jun  9 18:34:32 2015
@@ -38,7 +38,7 @@ import org.apache.spark.api.java.functio
 import org.apache.spark.rdd.RDD;
 
 @SuppressWarnings("serial")
-public class SortConverter implements POConverter<Tuple, Tuple, POSort> {
+public class SortConverter implements RDDConverter<Tuple, Tuple, POSort> {
     private static final Log LOG = LogFactory.getLog(SortConverter.class);
 
     private static final FlatMapFunction<Iterator<Tuple2<Tuple, Object>>, 
Tuple> TO_VALUE_FUNCTION = new ToValueFunction();

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SplitConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SplitConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SplitConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/SplitConverter.java
 Tue Jun  9 18:34:32 2015
@@ -27,7 +27,7 @@ import org.apache.pig.data.Tuple;
 
 import org.apache.spark.rdd.RDD;
 
-public class SplitConverter implements POConverter<Tuple, Tuple, POSplit> {
+public class SplitConverter implements RDDConverter<Tuple, Tuple, POSplit> {
 
     @Override
     public RDD<Tuple> convert(List<RDD<Tuple>> predecessors, POSplit poSplit)

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StoreConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StoreConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StoreConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StoreConverter.java
 Tue Jun  9 18:34:32 2015
@@ -56,7 +56,7 @@ import com.google.common.collect.Lists;
  */
 @SuppressWarnings({ "serial" })
 public class StoreConverter implements
-        POConverter<Tuple, Tuple2<Text, Tuple>, POStore> {
+        RDDConverter<Tuple, Tuple2<Text, Tuple>, POStore> {
 
   private static final Log LOG = LogFactory.getLog(StoreConverter.class);
   private static final FromTupleFunction FROM_TUPLE_FUNCTION = new 
FromTupleFunction();

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StreamConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StreamConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StreamConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/StreamConverter.java
 Tue Jun  9 18:34:32 2015
@@ -38,7 +38,7 @@ import org.apache.spark.rdd.RDD;
 import org.apache.spark.api.java.function.FlatMapFunction;
 
 public class StreamConverter implements
-               POConverter<Tuple, Tuple, POStream> {
+               RDDConverter<Tuple, Tuple, POStream> {
        private byte[] confBytes;
 
        public StreamConverter(byte[] confBytes) {
@@ -111,4 +111,4 @@ public class StreamConverter implements
                        };
                }
        }
-}
\ No newline at end of file
+}

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/UnionConverter.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/UnionConverter.java?rev=1684498&r1=1684497&r2=1684498&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/UnionConverter.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/converter/UnionConverter.java
 Tue Jun  9 18:34:32 2015
@@ -29,7 +29,7 @@ import org.apache.spark.rdd.UnionRDD;
 import org.apache.spark.rdd.RDD;
 import org.apache.spark.SparkContext;
 
-public class UnionConverter implements POConverter<Tuple, Tuple, POUnion> {
+public class UnionConverter implements RDDConverter<Tuple, Tuple, POUnion> {
 
     private final SparkContext sc;
 


Reply via email to