Author: brock
Date: Mon Aug 18 16:00:13 2014
New Revision: 1618642

URL: http://svn.apache.org/r1618642
Log:
HIVE-7766 - Cleanup Reduce operator code [Spark Branch] (Rui Li via Brock)

Modified:
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java?rev=1618642&r1=1618641&r2=1618642&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
 Mon Aug 18 16:00:13 2014
@@ -321,8 +321,4 @@ public class ExecReducer extends MapRedu
       Utilities.clearWorkMap();
     }
   }
-
-  public Operator<?> getReducer() {
-    return reducer;
-  }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java?rev=1618642&r1=1618641&r2=1618642&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java
 Mon Aug 18 16:00:13 2014
@@ -18,15 +18,10 @@
 package org.apache.hadoop.hive.ql.exec.spark;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.OperatorUtils;
-import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.mapred.Reporter;
 import scala.Tuple2;
 
 import java.io.IOException;
-import java.util.Arrays;
 import java.util.Iterator;
 
 public class HiveReduceFunctionResultList extends
@@ -44,7 +39,6 @@ public class HiveReduceFunctionResultLis
     SparkReduceRecordHandler reducer) {
     super(conf, inputIterator);
     this.reduceRecordHandler = reducer;
-    setOutputCollector();
   }
 
   @Override
@@ -62,11 +56,4 @@ public class HiveReduceFunctionResultLis
   protected void closeRecordProcessor() {
     reduceRecordHandler.close();
   }
-
-  private void setOutputCollector() {
-    if (reduceRecordHandler != null && reduceRecordHandler.getReducer() != 
null) {
-      OperatorUtils.setChildrenCollector(
-          Arrays.<Operator<? extends 
OperatorDesc>>asList(reduceRecordHandler.getReducer()), this);
-    }
-  }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java?rev=1618642&r1=1618641&r2=1618642&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java
 Mon Aug 18 16:00:13 2014
@@ -29,13 +29,10 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.MapredContext;
-import org.apache.hadoop.hive.ql.exec.ObjectCache;
-import org.apache.hadoop.hive.ql.exec.ObjectCacheFactory;
-import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.*;
 import org.apache.hadoop.hive.ql.exec.mr.ExecMapper.ReportStats;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.ReduceWork;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
@@ -135,6 +132,8 @@ public class SparkReduceRecordHandler ex
     }
 
     reducer.setReporter(rp);
+    OperatorUtils.setChildrenCollector(
+        Arrays.<Operator<? extends OperatorDesc>>asList(reducer), output);
 
     // initialize reduce operator tree
     try {
@@ -279,8 +278,4 @@ public class SparkReduceRecordHandler ex
       Utilities.clearWorkMap();
     }
   }
-
-  public Operator<?> getReducer() {
-    return reducer;
-  }
 }


Reply via email to