Author: zly
Date: Fri Apr  7 01:41:05 2017
New Revision: 1790492

URL: http://svn.apache.org/viewvc?rev=1790492&view=rev
Log:
PIG-5203:Partitioner E2E test fails on spark(Adam via Liyun)

Modified:
    
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/optimizer/CombinerOptimizer.java

Modified: 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/optimizer/CombinerOptimizer.java
URL: 
http://svn.apache.org/viewvc/pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/optimizer/CombinerOptimizer.java?rev=1790492&r1=1790491&r2=1790492&view=diff
==============================================================================
--- 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/optimizer/CombinerOptimizer.java
 (original)
+++ 
pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/spark/optimizer/CombinerOptimizer.java
 Fri Apr  7 01:41:05 2017
@@ -248,9 +248,9 @@ public class CombinerOptimizer extends S
                     phyPlan.replace(rearrange, combinerLocalRearrange);
 
                     // Create a reduceBy operator.
-                    POReduceBySpark reduceOperator = new 
POReduceBySpark(cfe.getOperatorKey(), cfe
-                            .getRequestedParallelism(),
-                            cfe.getInputPlans(), cfe.getToBeFlattened(), 
combinePack, newRearrange);
+                    POReduceBySpark reduceOperator = new 
POReduceBySpark(cfe.getOperatorKey(), combinerLocalRearrange
+                            .getRequestedParallelism(), cfe.getInputPlans(), 
cfe.getToBeFlattened(), combinePack,
+                            newRearrange);
                     
reduceOperator.setCustomPartitioner(glr.getCustomPartitioner());
                     fixReduceSideFE(postReduceFE, algebraicOps);
                     CombinerOptimizerUtil.changeFunc(reduceOperator, 
POUserFunc.INTERMEDIATE);


Reply via email to