Author: xuefu
Date: Fri Sep 12 16:22:37 2014
New Revision: 1624584

URL: http://svn.apache.org/r1624584
Log:
HIVE-8017: Use HiveKey instead of BytesWritable as key type of the pair RDD 
[Spark Branch] Rui Li via Xuefu

Modified:
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GraphTran.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GroupByShuffler.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunction.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveVoidFunction.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/MapTran.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ReduceTran.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SortByShuffler.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkShuffler.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTran.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/UnionTran.java
    
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/spark/limit_pushdown.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/merge1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/merge2.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_19.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_25.q.out

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GraphTran.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GraphTran.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GraphTran.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GraphTran.java
 Fri Sep 12 16:22:37 2014
@@ -25,6 +25,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.JavaPairRDD;
 
@@ -34,7 +35,8 @@ public class GraphTran {
   private final Set<SparkTran> leafTrans = new HashSet<SparkTran>();
   private final Map<SparkTran, List<SparkTran>> transGraph = new 
HashMap<SparkTran, List<SparkTran>>();
   private final Map<SparkTran, List<SparkTran>> invertedTransGraph = new 
HashMap<SparkTran, List<SparkTran>>();
-  private final Map<SparkTran, List<JavaPairRDD<BytesWritable, 
BytesWritable>>> unionInputs = new HashMap<SparkTran, 
List<JavaPairRDD<BytesWritable, BytesWritable>>>();
+  private final Map<SparkTran, List<JavaPairRDD<HiveKey, BytesWritable>>> 
unionInputs =
+      new HashMap<SparkTran, List<JavaPairRDD<HiveKey, BytesWritable>>>();
   private final Map<SparkTran, JavaPairRDD<BytesWritable, BytesWritable>> 
mapInputs = new HashMap<SparkTran, JavaPairRDD<BytesWritable, BytesWritable>>();
 
   public void addRootTranWithInput(SparkTran tran, JavaPairRDD<BytesWritable, 
BytesWritable> input) {
@@ -50,7 +52,8 @@ public class GraphTran {
   }
 
   public void execute() throws IllegalStateException {
-    Map<SparkTran, JavaPairRDD<BytesWritable, BytesWritable>> resultRDDs = new 
HashMap<SparkTran, JavaPairRDD<BytesWritable, BytesWritable>>();
+    Map<SparkTran, JavaPairRDD<HiveKey, BytesWritable>> resultRDDs =
+        new HashMap<SparkTran, JavaPairRDD<HiveKey, BytesWritable>>();
     for (SparkTran tran : rootTrans) {
       // make sure all the root trans are MapTran
       if (!(tran instanceof MapTran)) {
@@ -60,16 +63,16 @@ public class GraphTran {
       if (input == null) {
         throw new IllegalStateException("input is missing for 
transformation!");
       }
-      JavaPairRDD<BytesWritable, BytesWritable> rdd = tran.transform(input);
+      JavaPairRDD<HiveKey, BytesWritable> rdd = tran.transform(input);
 
       while (getChildren(tran).size() > 0) {
         SparkTran childTran = getChildren(tran).get(0);
         if (childTran instanceof UnionTran) {
-          List<JavaPairRDD<BytesWritable, BytesWritable>> unionInputList = 
unionInputs
+          List<JavaPairRDD<HiveKey, BytesWritable>> unionInputList = 
unionInputs
               .get(childTran);
           if (unionInputList == null) {
             // process the first union input RDD, cache it in the hash map
-            unionInputList = new LinkedList<JavaPairRDD<BytesWritable, 
BytesWritable>>();
+            unionInputList = new LinkedList<JavaPairRDD<HiveKey, 
BytesWritable>>();
             unionInputList.add(rdd);
             unionInputs.put(childTran, unionInputList);
             break;
@@ -79,7 +82,7 @@ public class GraphTran {
             break;
           } else if (unionInputList.size() == 
this.getParents(childTran).size() - 1) { // process
             // process the last input RDD
-            for (JavaPairRDD<BytesWritable, BytesWritable> inputRDD : 
unionInputList) {
+            for (JavaPairRDD<HiveKey, BytesWritable> inputRDD : 
unionInputList) {
               ((UnionTran) childTran).setOtherInput(inputRDD);
               rdd = childTran.transform(rdd);
             }
@@ -94,7 +97,7 @@ public class GraphTran {
         resultRDDs.put(tran, rdd);
       }
     }
-    for (JavaPairRDD<BytesWritable, BytesWritable> resultRDD : 
resultRDDs.values()) {
+    for (JavaPairRDD<HiveKey, BytesWritable> resultRDD : resultRDDs.values()) {
       resultRDD.foreach(HiveVoidFunction.getInstance());
     }
   }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GroupByShuffler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GroupByShuffler.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GroupByShuffler.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/GroupByShuffler.java
 Fri Sep 12 16:22:37 2014
@@ -1,13 +1,14 @@
 package org.apache.hadoop.hive.ql.exec.spark;
 
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.JavaPairRDD;
 
 public class GroupByShuffler implements SparkShuffler {
 
   @Override
-  public JavaPairRDD<BytesWritable, Iterable<BytesWritable>> shuffle(
-      JavaPairRDD<BytesWritable, BytesWritable> input, int numPartitions) {
+  public JavaPairRDD<HiveKey, Iterable<BytesWritable>> shuffle(
+      JavaPairRDD<HiveKey, BytesWritable> input, int numPartitions) {
     if (numPartitions > 0) {
       return input.groupByKey(numPartitions);
     }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
 Fri Sep 12 16:22:37 2014
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.exec.s
 
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.OutputCollector;
 import scala.Tuple2;
@@ -38,7 +39,7 @@ import java.util.NoSuchElementException;
  *     through Iterator interface.
  */
 public abstract class HiveBaseFunctionResultList<T> implements
-    Iterable, OutputCollector<BytesWritable, BytesWritable>, Serializable {
+    Iterable, OutputCollector<HiveKey, BytesWritable>, Serializable {
 
   private final Iterator<T> inputIterator;
   private boolean isClosed = false;
@@ -60,8 +61,16 @@ public abstract class HiveBaseFunctionRe
   }
 
   @Override
-  public void collect(BytesWritable key, BytesWritable value) throws 
IOException {
-    lastRecordOutput.add(copyBytesWritable(key), copyBytesWritable(value));
+  public void collect(HiveKey key, BytesWritable value) throws IOException {
+    lastRecordOutput.add(copyHiveKey(key), copyBytesWritable(value));
+  }
+
+  private static HiveKey copyHiveKey(HiveKey key) {
+    HiveKey copy = new HiveKey();
+    copy.setDistKeyLength(key.getDistKeyLength());
+    copy.setHashCode(key.hashCode());
+    copy.set(key);
+    return copy;
   }
 
   private static BytesWritable copyBytesWritable(BytesWritable bw) {
@@ -125,7 +134,7 @@ public abstract class HiveBaseFunctionRe
     }
 
     @Override
-    public Tuple2<BytesWritable, BytesWritable> next() {
+    public Tuple2<HiveKey, BytesWritable> next() {
       if (hasNext()) {
         return lastRecordOutput.next();
       }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java
 Fri Sep 12 16:22:37 2014
@@ -21,6 +21,7 @@ import com.google.common.base.Preconditi
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -72,14 +73,16 @@ public class HiveKVResultCache {
     }
   }
 
-  public void add(BytesWritable key, BytesWritable value) {
+  public void add(HiveKey key, BytesWritable value) {
+    byte[] hiveKeyBytes = KryoSerializer.serialize(key);
+    BytesWritable wrappedHiveKey = new BytesWritable(hiveKeyBytes);
     List<BytesWritable> row = new ArrayList<BytesWritable>(2);
-    row.add(key);
+    row.add(wrappedHiveKey);
     row.add(value);
 
     try {
       container.addRow(row);
-    } catch(HiveException ex) {
+    } catch (HiveException ex) {
       throw new RuntimeException("Failed to add KV pair to RowContainer", ex);
     }
   }
@@ -97,7 +100,7 @@ public class HiveKVResultCache {
     return container.rowCount() > 0 && cursor < container.rowCount();
   }
 
-  public Tuple2<BytesWritable, BytesWritable> next() {
+  public Tuple2<HiveKey, BytesWritable> next() {
     Preconditions.checkState(hasNext());
 
     try {
@@ -108,8 +111,9 @@ public class HiveKVResultCache {
         row = container.next();
       }
       cursor++;
-      return new Tuple2<BytesWritable, BytesWritable>(row.get(0), row.get(1));
-    } catch(HiveException ex) {
+      HiveKey key = KryoSerializer.deserialize(row.get(0).getBytes(), 
HiveKey.class);
+      return new Tuple2<HiveKey, BytesWritable>(key, row.get(1));
+    } catch (HiveException ex) {
       throw new RuntimeException("Failed to get row from RowContainer", ex);
     }
   }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java
 Fri Sep 12 16:22:37 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.s
 
 import java.util.Iterator;
 
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
@@ -28,7 +29,7 @@ import org.apache.spark.api.java.functio
 import scala.Tuple2;
 
 public class HiveMapFunction implements 
PairFlatMapFunction<Iterator<Tuple2<BytesWritable, BytesWritable>>,
-BytesWritable, BytesWritable> {
+    HiveKey, BytesWritable> {
   private static final long serialVersionUID = 1L;
 
   private transient JobConf jobConf;
@@ -40,7 +41,7 @@ BytesWritable, BytesWritable> {
   }
 
   @Override
-  public Iterable<Tuple2<BytesWritable, BytesWritable>> 
+  public Iterable<Tuple2<HiveKey, BytesWritable>>
   call(Iterator<Tuple2<BytesWritable, BytesWritable>> it) throws Exception {
     if (jobConf == null) {
       jobConf = KryoSerializer.deserializeJobConf(this.buffer);

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunction.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunction.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunction.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunction.java
 Fri Sep 12 16:22:37 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec.s
 import java.util.Iterator;
 
 import org.apache.hadoop.hive.ql.exec.mr.ExecReducer;
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
@@ -28,8 +29,8 @@ import org.apache.spark.api.java.functio
 
 import scala.Tuple2;
 
-public class HiveReduceFunction implements 
PairFlatMapFunction<Iterator<Tuple2<BytesWritable,Iterable<BytesWritable>>>,
-BytesWritable, BytesWritable> {
+public class HiveReduceFunction implements PairFlatMapFunction<
+    Iterator<Tuple2<HiveKey, Iterable<BytesWritable>>>, HiveKey, 
BytesWritable> {
   private static final long serialVersionUID = 1L;
 
   private transient JobConf jobConf;
@@ -41,14 +42,15 @@ BytesWritable, BytesWritable> {
   }
 
   @Override
-  public Iterable<Tuple2<BytesWritable, BytesWritable>>
-  call(Iterator<Tuple2<BytesWritable,Iterable<BytesWritable>>> it) throws 
Exception {
+  public Iterable<Tuple2<HiveKey, BytesWritable>>
+  call(Iterator<Tuple2<HiveKey, Iterable<BytesWritable>>> it) throws Exception 
{
     if (jobConf == null) {
       jobConf = KryoSerializer.deserializeJobConf(this.buffer);
     }
 
     SparkReduceRecordHandler reducerRecordhandler = new 
SparkReduceRecordHandler();
-    HiveReduceFunctionResultList result = new 
HiveReduceFunctionResultList(jobConf, it, reducerRecordhandler);
+    HiveReduceFunctionResultList result =
+        new HiveReduceFunctionResultList(jobConf, it, reducerRecordhandler);
     reducerRecordhandler.init(jobConf, result, Reporter.NULL);
 
     return result;

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveReduceFunctionResultList.java
 Fri Sep 12 16:22:37 2014
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.ql.exec.spark;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import scala.Tuple2;
 
@@ -25,7 +26,7 @@ import java.io.IOException;
 import java.util.Iterator;
 
 public class HiveReduceFunctionResultList extends
-    HiveBaseFunctionResultList<Tuple2<BytesWritable, Iterable<BytesWritable>>> 
{
+    HiveBaseFunctionResultList<Tuple2<HiveKey, Iterable<BytesWritable>>> {
   private final SparkReduceRecordHandler reduceRecordHandler;
 
   /**
@@ -35,14 +36,14 @@ public class HiveReduceFunctionResultLis
    * @param reducer Initialized {@link 
org.apache.hadoop.hive.ql.exec.mr.ExecReducer} instance.
    */
   public HiveReduceFunctionResultList(Configuration conf,
-      Iterator<Tuple2<BytesWritable, Iterable<BytesWritable>>> inputIterator,
+      Iterator<Tuple2<HiveKey, Iterable<BytesWritable>>> inputIterator,
     SparkReduceRecordHandler reducer) {
     super(conf, inputIterator);
     this.reduceRecordHandler = reducer;
   }
 
   @Override
-  protected void processNextRecord(Tuple2<BytesWritable, 
Iterable<BytesWritable>> inputRecord)
+  protected void processNextRecord(Tuple2<HiveKey, Iterable<BytesWritable>> 
inputRecord)
       throws IOException {
     reduceRecordHandler.processRow(inputRecord._1(), 
inputRecord._2().iterator());
   }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveVoidFunction.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveVoidFunction.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveVoidFunction.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveVoidFunction.java
 Fri Sep 12 16:22:37 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.spark;
 
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.function.VoidFunction;
 
@@ -25,9 +26,8 @@ import scala.Tuple2;
 
 /**
  * Implementation of a voidFunction that does nothing.
- *
  */
-public class HiveVoidFunction implements VoidFunction<Tuple2<BytesWritable, 
BytesWritable>> {
+public class HiveVoidFunction implements VoidFunction<Tuple2<HiveKey, 
BytesWritable>> {
   private static final long serialVersionUID = 1L;
 
   private static HiveVoidFunction instance = new HiveVoidFunction();
@@ -40,7 +40,7 @@ public class HiveVoidFunction implements
   }
 
   @Override
-  public void call(Tuple2<BytesWritable, BytesWritable> t) throws Exception {
+  public void call(Tuple2<HiveKey, BytesWritable> t) throws Exception {
   }
 
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
 Fri Sep 12 16:22:37 2014
@@ -35,25 +35,21 @@ import com.esotericsoftware.kryo.io.Inpu
 import com.esotericsoftware.kryo.io.Output;
 
 public class KryoSerializer {
-  private static final Log LOG = LogFactory.getLog("KryoSerializer");
-  private static final Kryo kryo = Utilities.runtimeSerializationKryo.get();
-
-  static {
-    kryo.register(ExecMapper.class);
-  }
+  private static final Log LOG = LogFactory.getLog(KryoSerializer.class);
 
   public static byte[] serialize(Object object) {
     ByteArrayOutputStream stream = new ByteArrayOutputStream();
     Output output = new Output(stream);
 
-    kryo.writeObject(output, object);
+    Utilities.runtimeSerializationKryo.get().writeObject(output, object);
 
     output.close(); // close() also calls flush()
     return stream.toByteArray();
   }
 
-  public static <T> T deserialize(byte[] buffer,Class<T> clazz)  {
-    return kryo.readObject(new Input(new ByteArrayInputStream(buffer)), clazz);
+  public static <T> T deserialize(byte[] buffer, Class<T> clazz) {
+    return Utilities.runtimeSerializationKryo.get().readObject(
+        new Input(new ByteArrayInputStream(buffer)), clazz);
   }
 
   public static byte[] serializeJobConf(JobConf jobConf) {

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/MapTran.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/MapTran.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/MapTran.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/MapTran.java
 Fri Sep 12 16:22:37 2014
@@ -18,14 +18,15 @@
 
 package org.apache.hadoop.hive.ql.exec.spark;
 
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.JavaPairRDD;
 
-public class MapTran implements SparkTran {
+public class MapTran implements SparkTran<BytesWritable,HiveKey> {
   private HiveMapFunction mapFunc;
 
   @Override
-  public JavaPairRDD<BytesWritable, BytesWritable> transform(
+  public JavaPairRDD<HiveKey, BytesWritable> transform(
       JavaPairRDD<BytesWritable, BytesWritable> input) {
     return input.mapPartitionsToPair(mapFunc);
   }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ReduceTran.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ReduceTran.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ReduceTran.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ReduceTran.java
 Fri Sep 12 16:22:37 2014
@@ -18,17 +18,18 @@
 
 package org.apache.hadoop.hive.ql.exec.spark;
 
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.JavaPairRDD;
 
-public class ReduceTran implements SparkTran {
+public class ReduceTran implements SparkTran<HiveKey, HiveKey> {
   private SparkShuffler shuffler;
   private HiveReduceFunction reduceFunc;
   private int numPartitions;
 
   @Override
-  public JavaPairRDD<BytesWritable, BytesWritable> transform(
-      JavaPairRDD<BytesWritable, BytesWritable> input) {
+  public JavaPairRDD<HiveKey, BytesWritable> transform(
+      JavaPairRDD<HiveKey, BytesWritable> input) {
     return shuffler.shuffle(input, 
numPartitions).mapPartitionsToPair(reduceFunc);
   }
 

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SortByShuffler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SortByShuffler.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SortByShuffler.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SortByShuffler.java
 Fri Sep 12 16:22:37 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.spark;
 
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.function.PairFlatMapFunction;
@@ -28,9 +29,9 @@ import java.util.*;
 public class SortByShuffler implements SparkShuffler {
 
   @Override
-  public JavaPairRDD<BytesWritable, Iterable<BytesWritable>> shuffle(
-      JavaPairRDD<BytesWritable, BytesWritable> input, int numPartitions) {
-    JavaPairRDD<BytesWritable, BytesWritable> rdd;
+  public JavaPairRDD<HiveKey, Iterable<BytesWritable>> shuffle(
+      JavaPairRDD<HiveKey, BytesWritable> input, int numPartitions) {
+    JavaPairRDD<HiveKey, BytesWritable> rdd;
     if (numPartitions > 0) {
       rdd = input.sortByKey(true, numPartitions);
     } else {
@@ -40,64 +41,64 @@ public class SortByShuffler implements S
   }
 
   private static class ShuffleFunction implements
-  PairFlatMapFunction<Iterator<Tuple2<BytesWritable, BytesWritable>>,
-  BytesWritable, Iterable<BytesWritable>> {
+      PairFlatMapFunction<Iterator<Tuple2<HiveKey, BytesWritable>>,
+          HiveKey, Iterable<BytesWritable>> {
     // make eclipse happy
     private static final long serialVersionUID = 1L;
 
     @Override
-    public Iterable<Tuple2<BytesWritable, Iterable<BytesWritable>>> call(
-        final Iterator<Tuple2<BytesWritable, BytesWritable>> it) throws 
Exception {
+    public Iterable<Tuple2<HiveKey, Iterable<BytesWritable>>> call(
+        final Iterator<Tuple2<HiveKey, BytesWritable>> it) throws Exception {
       // Use input iterator to back returned iterable object.
-      final Iterator<Tuple2<BytesWritable, Iterable<BytesWritable>>> resultIt =
-          new Iterator<Tuple2<BytesWritable, Iterable<BytesWritable>>>() {
-        BytesWritable curKey = null;
-        List<BytesWritable> curValues = new ArrayList<BytesWritable>();
-
-        @Override
-        public boolean hasNext() {
-          return it.hasNext() || curKey != null;
-        }
+      final Iterator<Tuple2<HiveKey, Iterable<BytesWritable>>> resultIt =
+          new Iterator<Tuple2<HiveKey, Iterable<BytesWritable>>>() {
+            HiveKey curKey = null;
+            List<BytesWritable> curValues = new ArrayList<BytesWritable>();
+
+            @Override
+            public boolean hasNext() {
+              return it.hasNext() || curKey != null;
+            }
 
-        @Override
-        public Tuple2<BytesWritable, Iterable<BytesWritable>> next() {
-          // TODO: implement this by accumulating rows with the same key into 
a list.
-          // Note that this list needs to improved to prevent excessive memory 
usage, but this
-          // can be done in later phase.
-          while (it.hasNext()) {
-            Tuple2<BytesWritable, BytesWritable> pair = it.next();
-            if (curKey != null && !curKey.equals(pair._1())) {
-              BytesWritable key = curKey;
-              List<BytesWritable> values = curValues;
-              curKey = pair._1();
-              curValues = new ArrayList<BytesWritable>();
-              curValues.add(pair._2());
-              return new Tuple2<BytesWritable, Iterable<BytesWritable>>(key, 
values);
+            @Override
+            public Tuple2<HiveKey, Iterable<BytesWritable>> next() {
+              // TODO: implement this by accumulating rows with the same key 
into a list.
+              // Note that this list needs to improved to prevent excessive 
memory usage, but this
+              // can be done in later phase.
+              while (it.hasNext()) {
+                Tuple2<HiveKey, BytesWritable> pair = it.next();
+                if (curKey != null && !curKey.equals(pair._1())) {
+                  HiveKey key = curKey;
+                  List<BytesWritable> values = curValues;
+                  curKey = pair._1();
+                  curValues = new ArrayList<BytesWritable>();
+                  curValues.add(pair._2());
+                  return new Tuple2<HiveKey, Iterable<BytesWritable>>(key, 
values);
+                }
+                curKey = pair._1();
+                curValues.add(pair._2());
+              }
+              if (curKey == null) {
+                throw new NoSuchElementException();
+              }
+              // if we get here, this should be the last element we have
+              HiveKey key = curKey;
+              curKey = null;
+              return new Tuple2<HiveKey, Iterable<BytesWritable>>(key, 
curValues);
             }
-            curKey = pair._1();
-            curValues.add(pair._2());
-          }
-          if (curKey == null) {
-            throw new NoSuchElementException();
-          }
-          // if we get here, this should be the last element we have
-          BytesWritable key = curKey;
-          curKey = null;
-          return new Tuple2<BytesWritable, Iterable<BytesWritable>>(key, 
curValues);
-        }
 
-        @Override
-        public void remove() {
-          // Not implemented.
-          // throw Unsupported Method Invocation Exception.
-          throw new UnsupportedOperationException();
-        }
+            @Override
+            public void remove() {
+              // Not implemented.
+              // throw Unsupported Method Invocation Exception.
+              throw new UnsupportedOperationException();
+            }
 
-      };
+          };
 
-      return new Iterable<Tuple2<BytesWritable, Iterable<BytesWritable>>>() {
+      return new Iterable<Tuple2<HiveKey, Iterable<BytesWritable>>>() {
         @Override
-        public Iterator<Tuple2<BytesWritable, Iterable<BytesWritable>>> 
iterator() {
+        public Iterator<Tuple2<HiveKey, Iterable<BytesWritable>>> iterator() {
           return resultIt;
         }
       };

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java
 Fri Sep 12 16:22:37 2014
@@ -33,7 +33,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 
 public abstract class SparkRecordHandler {
-  private static final Log LOG = LogFactory.getLog(SparkRecordHandler.class);
+  private final Log LOG = LogFactory.getLog(this.getClass());
 
   // used to log memory usage periodically
   protected final MemoryMXBean memoryMXBean = 
ManagementFactory.getMemoryMXBean();
@@ -83,7 +83,7 @@ public abstract class SparkRecordHandler
     rowNumber++;
     if (rowNumber == nextLogThreshold) {
       long used_memory = memoryMXBean.getHeapMemoryUsage().getUsed();
-      LOG.info("ExecReducer: processing " + rowNumber
+      LOG.info("processing " + rowNumber
         + " rows: used memory = " + used_memory);
       nextLogThreshold = getNextLogThreshold(rowNumber);
     }
@@ -96,7 +96,7 @@ public abstract class SparkRecordHandler
    */
   protected void logCloseInfo() {
     long used_memory = memoryMXBean.getHeapMemoryUsage().getUsed();
-    LOG.info("ExecMapper: processed " + rowNumber + " rows: used memory = "
+    LOG.info("processed " + rowNumber + " rows: used memory = "
       + used_memory);
   }
 

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkShuffler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkShuffler.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkShuffler.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkShuffler.java
 Fri Sep 12 16:22:37 2014
@@ -18,12 +18,13 @@
 
 package org.apache.hadoop.hive.ql.exec.spark;
 
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.JavaPairRDD;
 
 public interface SparkShuffler {
 
-  JavaPairRDD<BytesWritable, Iterable<BytesWritable>> shuffle(
-      JavaPairRDD<BytesWritable, BytesWritable> input, int numPartitions);
+  JavaPairRDD<HiveKey, Iterable<BytesWritable>> shuffle(
+      JavaPairRDD<HiveKey, BytesWritable> input, int numPartitions);
 
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTran.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTran.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTran.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTran.java
 Fri Sep 12 16:22:37 2014
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.exec.s
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.JavaPairRDD;
 
-public interface SparkTran {
-  JavaPairRDD<BytesWritable, BytesWritable> transform(
-      JavaPairRDD<BytesWritable, BytesWritable> input);
+public interface SparkTran<KI extends BytesWritable, KO extends BytesWritable> 
{
+  JavaPairRDD<KO, BytesWritable> transform(
+      JavaPairRDD<KI, BytesWritable> input);
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/UnionTran.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/UnionTran.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/UnionTran.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/UnionTran.java
 Fri Sep 12 16:22:37 2014
@@ -18,23 +18,24 @@
 
 package org.apache.hadoop.hive.ql.exec.spark;
 
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.spark.api.java.JavaPairRDD;
 
-public class UnionTran implements SparkTran {
-  JavaPairRDD<BytesWritable, BytesWritable> otherInput;
+public class UnionTran implements SparkTran<HiveKey, HiveKey> {
+  JavaPairRDD<HiveKey, BytesWritable> otherInput;
 
   @Override
-  public JavaPairRDD<BytesWritable, BytesWritable> transform(
-      JavaPairRDD<BytesWritable, BytesWritable> input) {
+  public JavaPairRDD<HiveKey, BytesWritable> transform(
+      JavaPairRDD<HiveKey, BytesWritable> input) {
     return input.union(otherInput);
   }
 
-  public void setOtherInput(JavaPairRDD<BytesWritable, BytesWritable> 
otherInput) {
+  public void setOtherInput(JavaPairRDD<HiveKey, BytesWritable> otherInput) {
     this.otherInput = otherInput;
   }
 
-  public JavaPairRDD<BytesWritable, BytesWritable> getOtherInput() {
+  public JavaPairRDD<HiveKey, BytesWritable> getOtherInput() {
     return this.otherInput;
   }
 }

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java
 Fri Sep 12 16:22:37 2014
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.ql.exec.spark;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.io.BytesWritable;
 import org.junit.Test;
 import scala.Tuple2;
@@ -31,13 +32,13 @@ public class TestHiveKVResultCache {
     HiveConf conf = new HiveConf();
     HiveKVResultCache cache = new HiveKVResultCache(conf);
 
-    BytesWritable key = new BytesWritable("key".getBytes());
+    HiveKey key = new HiveKey("key".getBytes(), "key".hashCode());
     BytesWritable value = new BytesWritable("value".getBytes());
     cache.add(key, value);
 
     assertTrue("KV result cache should have at least one element", 
cache.hasNext());
 
-    Tuple2<BytesWritable, BytesWritable> row = cache.next();
+    Tuple2<HiveKey, BytesWritable> row = cache.next();
     assertTrue("Incorrect key", row._1().equals(key));
     assertTrue("Incorrect value", row._2().equals(value));
 
@@ -64,7 +65,7 @@ public class TestHiveKVResultCache {
     for(int i=0; i<numRecords; i++) {
       String key = "key_" + i;
       String value = "value_" + i;
-      cache.add(new BytesWritable(key.getBytes()), new 
BytesWritable(value.getBytes()));
+      cache.add(new HiveKey(key.getBytes(), key.hashCode()), new 
BytesWritable(value.getBytes()));
     }
 
     int recordsSeen = 0;
@@ -72,7 +73,7 @@ public class TestHiveKVResultCache {
       String key = "key_" + recordsSeen;
       String value = "value_" + recordsSeen;
 
-      Tuple2<BytesWritable, BytesWritable> row = cache.next();
+      Tuple2<HiveKey, BytesWritable> row = cache.next();
       assertTrue("Unexpected key at position: " + recordsSeen,
           new String(row._1().getBytes()).equals(key));
       assertTrue("Unexpected value at position: " + recordsSeen,

Modified: 
hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out 
(original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby1.q.out 
Fri Sep 12 16:22:37 2014
@@ -1,8 +1,12 @@
-PREHOOK: query: CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@dest_g1
-POSTHOOK: query: CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dest_g1
@@ -111,312 +115,312 @@ POSTHOOK: query: SELECT dest_g1.* FROM d
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest_g1
 #### A masked pattern was here ####
-168    168.0
-170    170.0
-436    436.0
-364    364.0
-209    418.0
-11     11.0
-374    374.0
-403    1209.0
-195    390.0
-252    252.0
-146    292.0
-95     190.0
-118    236.0
-189    189.0
-199    597.0
-196    196.0
+0      0.0
+10     10.0
 100    200.0
-382    764.0
-30     30.0
-455    455.0
-498    1494.0
+103    206.0
+104    208.0
+105    105.0
+11     11.0
 111    111.0
-287    287.0
-248    248.0
-19     19.0
-311    933.0
+113    226.0
+114    114.0
+116    116.0
+118    236.0
+119    357.0
+12     24.0
+120    240.0
 125    250.0
-178    178.0
+126    126.0
+128    384.0
+129    258.0
+131    131.0
+133    133.0
+134    268.0
+136    136.0
+137    274.0
+138    552.0
+143    143.0
+145    145.0
+146    292.0
+149    298.0
+15     30.0
+150    150.0
+152    304.0
+153    153.0
+155    155.0
+156    156.0
+157    157.0
+158    158.0
 160    160.0
-221    442.0
-87     87.0
-406    1624.0
-76     152.0
-335    335.0
-459    918.0
-263    263.0
+162    162.0
+163    163.0
+164    328.0
+165    330.0
 166    166.0
-463    926.0
-28     28.0
-223    446.0
+167    501.0
+168    168.0
+169    676.0
+17     17.0
+170    170.0
+172    344.0
 174    348.0
+175    350.0
+176    352.0
+177    177.0
+178    178.0
+179    358.0
+18     36.0
+180    180.0
+181    181.0
+183    183.0
+186    186.0
+187    561.0
+189    189.0
+19     19.0
+190    190.0
+191    382.0
+192    192.0
+193    579.0
+194    194.0
+195    390.0
+196    196.0
+197    394.0
+199    597.0
+2      2.0
+20     20.0
+200    400.0
+201    201.0
+202    202.0
 203    406.0
+205    410.0
+207    414.0
 208    624.0
-4      4.0
-404    808.0
-377    377.0
-54     54.0
-368    368.0
-37     74.0
-280    560.0
-57     57.0
-47     47.0
-308    308.0
-291    291.0
-278    556.0
-98     196.0
-484    484.0
-409    1227.0
-155    155.0
-260    260.0
-317    634.0
-429    858.0
-309    618.0
-284    284.0
-413    826.0
-417    1251.0
-27     27.0
-296    296.0
-67     134.0
-244    244.0
+209    418.0
+213    426.0
+214    214.0
+216    432.0
+217    434.0
+218    218.0
+219    438.0
+221    442.0
+222    222.0
+223    446.0
+224    448.0
+226    226.0
+228    228.0
+229    458.0
 230    1150.0
-96     96.0
-183    183.0
-475    475.0
-249    249.0
-289    289.0
-427    427.0
-418    418.0
-181    181.0
-472    472.0
-454    1362.0
-207    414.0
-310    310.0
 233    466.0
-194    194.0
-224    448.0
-129    258.0
-10     10.0
-360    360.0
-460    460.0
-12     24.0
-5      15.0
-481    481.0
-85     85.0
-58     116.0
-369    1107.0
-482    482.0
-214    214.0
-177    177.0
-193    579.0
-9      9.0
-34     34.0
-378    378.0
-419    419.0
-165    330.0
-201    201.0
+235    235.0
+237    474.0
+238    476.0
+239    478.0
+24     48.0
 241    241.0
-281    562.0
-397    794.0
-277    1108.0
+242    484.0
+244    244.0
+247    247.0
+248    248.0
+249    249.0
+252    252.0
+255    510.0
+256    512.0
+257    257.0
+258    258.0
+26     52.0
+260    260.0
+262    262.0
+263    263.0
+265    530.0
+266    266.0
+27     27.0
 272    544.0
+273    819.0
+274    274.0
+275    275.0
+277    1108.0
+278    556.0
+28     28.0
+280    560.0
+281    562.0
+282    564.0
+283    283.0
+284    284.0
 285    285.0
+286    286.0
+287    287.0
+288    576.0
+289    289.0
+291    291.0
 292    292.0
+296    296.0
 298    894.0
-332    332.0
-43     43.0
-400    400.0
+30     30.0
+302    302.0
+305    305.0
+306    306.0
+307    614.0
+308    308.0
+309    618.0
+310    310.0
+311    933.0
+315    315.0
+316    948.0
+317    634.0
+318    954.0
 321    642.0
-131    131.0
+322    644.0
+323    323.0
+325    650.0
+327    981.0
+33     33.0
+331    662.0
+332    332.0
 333    666.0
-53     53.0
-242    484.0
-286    286.0
-396    1188.0
-389    389.0
-477    477.0
-421    421.0
-487    487.0
-375    375.0
-327    981.0
-258    258.0
-307    614.0
-392    392.0
-82     82.0
-41     41.0
-83     166.0
-490    490.0
-134    268.0
-78     78.0
-64     64.0
-256    512.0
-401    2005.0
-402    402.0
-393    393.0
-362    362.0
-435    435.0
-288    576.0
-113    226.0
-104    208.0
-20     20.0
-176    352.0
-448    448.0
-237    474.0
-394    394.0
-162    162.0
-480    1440.0
-384    1152.0
-323    323.0
-497    497.0
-446    446.0
-457    457.0
-438    1314.0
-222    222.0
-26     52.0
-386    386.0
-467    467.0
-468    1872.0
-218    218.0
-51     102.0
-163    163.0
-315    315.0
-150    150.0
-273    819.0
-366    366.0
-226    226.0
+335    335.0
+336    336.0
+338    338.0
+339    339.0
+34     34.0
+341    341.0
 342    684.0
-74     74.0
+344    688.0
 345    345.0
-424    848.0
-491    491.0
-431    1293.0
-395    790.0
-302    302.0
-149    298.0
-105    105.0
-167    501.0
-172    344.0
-492    984.0
-239    478.0
-69     69.0
-66     66.0
-70     210.0
-2      2.0
-449    449.0
-179    358.0
-458    916.0
-202    202.0
+348    1740.0
+35     105.0
+351    351.0
+353    706.0
 356    356.0
-430    1290.0
-466    1398.0
-478    956.0
-338    338.0
-453    453.0
-493    493.0
+360    360.0
+362    362.0
+364    364.0
 365    365.0
+366    366.0
+367    734.0
+368    368.0
+369    1107.0
+37     74.0
+373    373.0
+374    374.0
+375    375.0
+377    377.0
+378    378.0
 379    379.0
-353    706.0
-247    247.0
-200    400.0
-283    283.0
-158    158.0
-485    485.0
-103    206.0
-275    275.0
-138    552.0
-452    452.0
-217    434.0
-325    650.0
-274    274.0
-197    394.0
-351    351.0
-336    336.0
+382    764.0
+384    1152.0
+386    386.0
+389    389.0
+392    392.0
+393    393.0
+394    394.0
+395    790.0
+396    1188.0
+397    794.0
 399    798.0
-24     48.0
-133    133.0
-305    305.0
-367    734.0
-157    157.0
-262    262.0
-128    384.0
-414    828.0
-116    116.0
-437    437.0
-156    156.0
-479    479.0
-120    240.0
-153    153.0
+4      4.0
+400    400.0
+401    2005.0
+402    402.0
+403    1209.0
+404    808.0
+406    1624.0
 407    407.0
-80     80.0
+409    1227.0
+41     41.0
 411    411.0
-84     168.0
-322    644.0
+413    826.0
+414    828.0
+417    1251.0
+418    418.0
+419    419.0
+42     84.0
+421    421.0
+424    848.0
+427    427.0
+429    858.0
+43     43.0
+430    1290.0
+431    1293.0
+432    432.0
+435    435.0
+436    436.0
+437    437.0
+438    1314.0
+439    878.0
 44     44.0
 443    443.0
-187    561.0
-341    341.0
-238    476.0
-255    510.0
-8      8.0
-143    143.0
-213    426.0
-126    126.0
-219    438.0
-318    954.0
-169    676.0
-15     30.0
-92     92.0
-33     33.0
-235    235.0
-114    114.0
-145    145.0
-180    180.0
-191    382.0
 444    444.0
-496    496.0
-432    432.0
-344    688.0
+446    446.0
+448    448.0
+449    449.0
+452    452.0
+453    453.0
+454    1362.0
+455    455.0
+457    457.0
+458    916.0
+459    918.0
+460    460.0
+462    924.0
+463    926.0
+466    1398.0
+467    467.0
+468    1872.0
+469    2345.0
+47     47.0
 470    470.0
+472    472.0
+475    475.0
+477    477.0
+478    956.0
+479    479.0
+480    1440.0
+481    481.0
+482    482.0
 483    483.0
-495    495.0
-339    339.0
-35     105.0
-18     36.0
-373    373.0
-152    304.0
-348    1740.0
-462    924.0
-316    948.0
-494    494.0
-229    458.0
-331    662.0
-216    432.0
-72     144.0
-90     270.0
-164    328.0
-97     194.0
-175    350.0
-119    357.0
-190    190.0
-0      0.0
-192    192.0
+484    484.0
+485    485.0
+487    487.0
 489    1956.0
+490    490.0
+491    491.0
+492    984.0
+493    493.0
+494    494.0
+495    495.0
+496    496.0
+497    497.0
+498    1494.0
+5      15.0
+51     102.0
+53     53.0
+54     54.0
+57     57.0
+58     116.0
+64     64.0
 65     65.0
-228    228.0
-306    306.0
-469    2345.0
+66     66.0
+67     134.0
+69     69.0
+70     210.0
+72     144.0
+74     74.0
+76     152.0
 77     77.0
-282    564.0
-17     17.0
-137    274.0
-266    266.0
-42     84.0
-205    410.0
-257    257.0
-186    186.0
-136    136.0
-265    530.0
-439    878.0
+78     78.0
+8      8.0
+80     80.0
+82     82.0
+83     166.0
+84     168.0
+85     85.0
 86     86.0
+87     87.0
+9      9.0
+90     270.0
+92     92.0
+95     190.0
+96     96.0
+97     194.0
+98     196.0

Modified: 
hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby4.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby4.q.out?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby4.q.out 
(original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/groupby4.q.out 
Fri Sep 12 16:22:37 2014
@@ -1,8 +1,12 @@
-PREHOOK: query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@dest1
-POSTHOOK: query: CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dest1
@@ -110,13 +114,13 @@ POSTHOOK: query: SELECT dest1.* FROM des
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
 #### A masked pattern was here ####
-7
+0
+1
 2
+3
 4
-8
-0
 5
-3
-1
-9
 6
+7
+8
+9

Modified: 
hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out 
(original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/having.q.out 
Fri Sep 12 16:22:37 2014
@@ -1,6 +1,8 @@
-PREHOOK: query: EXPLAIN SELECT count(value) AS c FROM src GROUP BY key HAVING 
c > 3
+PREHOOK: query: -- SORT_QUERY_RESULTS
+EXPLAIN SELECT count(value) AS c FROM src GROUP BY key HAVING c > 3
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT count(value) AS c FROM src GROUP BY key HAVING 
c > 3
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+EXPLAIN SELECT count(value) AS c FROM src GROUP BY key HAVING c > 3
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -72,15 +74,15 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 4
-5
 4
-5
 4
 4
 4
-5
 4
 5
+5
+5
+5
 PREHOOK: query: EXPLAIN SELECT key, max(value) AS c FROM src GROUP BY key 
HAVING key != 302
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT key, max(value) AS c FROM src GROUP BY key 
HAVING key != 302
@@ -154,314 +156,314 @@ POSTHOOK: query: SELECT key, max(value) 
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-168    val_168
-436    val_436
-170    val_170
-364    val_364
-209    val_209
-403    val_403
-374    val_374
-11     val_11
-195    val_195
-252    val_252
-146    val_146
-95     val_95
-118    val_118
-189    val_189
-199    val_199
-196    val_196
+0      val_0
+10     val_10
 100    val_100
-382    val_382
-30     val_30
-455    val_455
-498    val_498
+103    val_103
+104    val_104
+105    val_105
+11     val_11
 111    val_111
-287    val_287
-248    val_248
-19     val_19
-311    val_311
-76     val_76
-178    val_178
-160    val_160
-221    val_221
-87     val_87
-406    val_406
+113    val_113
+114    val_114
+116    val_116
+118    val_118
+119    val_119
+12     val_12
+120    val_120
 125    val_125
-463    val_463
-459    val_459
-263    val_263
+126    val_126
+128    val_128
+129    val_129
+131    val_131
+133    val_133
+134    val_134
+136    val_136
+137    val_137
+138    val_138
+143    val_143
+145    val_145
+146    val_146
+149    val_149
+15     val_15
+150    val_150
+152    val_152
+153    val_153
+155    val_155
+156    val_156
+157    val_157
+158    val_158
+160    val_160
+162    val_162
+163    val_163
+164    val_164
+165    val_165
 166    val_166
-335    val_335
-28     val_28
-223    val_223
+167    val_167
+168    val_168
+169    val_169
+17     val_17
+170    val_170
+172    val_172
 174    val_174
-98     val_98
-208    val_208
-4      val_4
-404    val_404
-47     val_47
-54     val_54
-368    val_368
-377    val_377
-37     val_37
-280    val_280
-57     val_57
-203    val_203
-308    val_308
-291    val_291
-278    val_278
-484    val_484
-409    val_409
-155    val_155
-317    val_317
-475    val_475
-260    val_260
-429    val_429
-417    val_417
-284    val_284
-413    val_413
-309    val_309
-27     val_27
-296    val_296
-67     val_67
-244    val_244
-230    val_230
-96     val_96
-183    val_183
-249    val_249
-289    val_289
-427    val_427
-418    val_418
-181    val_181
-472    val_472
-454    val_454
-310    val_310
-207    val_207
-233    val_233
-194    val_194
-224    val_224
-129    val_129
-360    val_360
-10     val_10
-460    val_460
-5      val_5
-12     val_12
-481    val_481
-85     val_85
-58     val_58
-369    val_369
-482    val_482
-214    val_214
+175    val_175
+176    val_176
 177    val_177
+178    val_178
+179    val_179
+18     val_18
+180    val_180
+181    val_181
+183    val_183
+186    val_186
+187    val_187
+189    val_189
+19     val_19
+190    val_190
+191    val_191
+192    val_192
 193    val_193
-9      val_9
-34     val_34
-419    val_419
-378    val_378
-165    val_165
+194    val_194
+195    val_195
+196    val_196
+197    val_197
+199    val_199
+2      val_2
+20     val_20
+200    val_200
 201    val_201
+202    val_202
+203    val_203
+205    val_205
+207    val_207
+208    val_208
+209    val_209
+213    val_213
+214    val_214
+216    val_216
+217    val_217
+218    val_218
+219    val_219
+221    val_221
+222    val_222
+223    val_223
+224    val_224
+226    val_226
+228    val_228
+229    val_229
+230    val_230
+233    val_233
+235    val_235
+237    val_237
+238    val_238
+239    val_239
+24     val_24
 241    val_241
+242    val_242
+244    val_244
+247    val_247
+248    val_248
+249    val_249
+252    val_252
+255    val_255
+256    val_256
+257    val_257
+258    val_258
+26     val_26
+260    val_260
+262    val_262
+263    val_263
+265    val_265
+266    val_266
+27     val_27
+272    val_272
+273    val_273
+274    val_274
+275    val_275
+277    val_277
+278    val_278
+28     val_28
+280    val_280
 281    val_281
-82     val_82
-292    val_292
+282    val_282
+283    val_283
+284    val_284
 285    val_285
-400    val_400
-53     val_53
-298    val_298
-397    val_397
-43     val_43
-272    val_272
-332    val_332
-321    val_321
-389    val_389
-131    val_131
 286    val_286
-242    val_242
-421    val_421
-487    val_487
-78     val_78
-396    val_396
-375    val_375
-83     val_83
-258    val_258
+287    val_287
+288    val_288
+289    val_289
+291    val_291
+292    val_292
+296    val_296
+298    val_298
+30     val_30
+305    val_305
+306    val_306
 307    val_307
-41     val_41
-392    val_392
-277    val_277
+308    val_308
+309    val_309
+310    val_310
+311    val_311
+315    val_315
+316    val_316
+317    val_317
+318    val_318
+321    val_321
+322    val_322
+323    val_323
+325    val_325
 327    val_327
-490    val_490
-134    val_134
+33     val_33
+331    val_331
+332    val_332
 333    val_333
-64     val_64
-477    val_477
-256    val_256
-401    val_401
-402    val_402
-435    val_435
-393    val_393
-362    val_362
-492    val_492
-288    val_288
-438    val_438
-104    val_104
-113    val_113
-176    val_176
-448    val_448
-394    val_394
-457    val_457
-162    val_162
-480    val_480
-384    val_384
-323    val_323
-497    val_497
-20     val_20
-446    val_446
-237    val_237
-26     val_26
-222    val_222
-386    val_386
-467    val_467
-468    val_468
-218    val_218
-51     val_51
-163    val_163
-315    val_315
-150    val_150
-273    val_273
-366    val_366
-226    val_226
-74     val_74
-345    val_345
-424    val_424
-491    val_491
-431    val_431
-395    val_395
+335    val_335
+336    val_336
+338    val_338
+339    val_339
+34     val_34
+341    val_341
 342    val_342
-149    val_149
-105    val_105
-167    val_167
-69     val_69
-172    val_172
-66     val_66
-70     val_70
-2      val_2
-239    val_239
-449    val_449
-179    val_179
-458    val_458
-202    val_202
+344    val_344
+345    val_345
+348    val_348
+35     val_35
+351    val_351
+353    val_353
 356    val_356
-430    val_430
-466    val_466
-478    val_478
-453    val_453
-493    val_493
-338    val_338
+360    val_360
+362    val_362
+364    val_364
 365    val_365
+366    val_366
+367    val_367
+368    val_368
+369    val_369
+37     val_37
+373    val_373
+374    val_374
+375    val_375
+377    val_377
+378    val_378
 379    val_379
-353    val_353
-247    val_247
-283    val_283
-200    val_200
-158    val_158
-485    val_485
-103    val_103
-275    val_275
-138    val_138
-452    val_452
-217    val_217
-351    val_351
-325    val_325
-197    val_197
-414    val_414
-84     val_84
+382    val_382
+384    val_384
+386    val_386
+389    val_389
+392    val_392
+393    val_393
+394    val_394
+395    val_395
+396    val_396
+397    val_397
 399    val_399
-24     val_24
-133    val_133
-305    val_305
-367    val_367
-157    val_157
-262    val_262
-128    val_128
-116    val_116
-437    val_437
-156    val_156
-479    val_479
-153    val_153
-80     val_80
+4      val_4
+400    val_400
+401    val_401
+402    val_402
+403    val_403
+404    val_404
+406    val_406
 407    val_407
-120    val_120
+409    val_409
+41     val_41
 411    val_411
-336    val_336
-322    val_322
+413    val_413
+414    val_414
+417    val_417
+418    val_418
+419    val_419
+42     val_42
+421    val_421
+424    val_424
+427    val_427
+429    val_429
+43     val_43
+430    val_430
+431    val_431
+432    val_432
+435    val_435
+436    val_436
+437    val_437
+438    val_438
+439    val_439
 44     val_44
 443    val_443
-187    val_187
-341    val_341
-238    val_238
-255    val_255
-274    val_274
-8      val_8
-213    val_213
-235    val_235
-143    val_143
-126    val_126
-219    val_219
-318    val_318
-169    val_169
-92     val_92
-15     val_15
-33     val_33
-114    val_114
-496    val_496
-180    val_180
-191    val_191
-145    val_145
 444    val_444
-432    val_432
-344    val_344
+446    val_446
+448    val_448
+449    val_449
+452    val_452
+453    val_453
+454    val_454
+455    val_455
+457    val_457
+458    val_458
+459    val_459
+460    val_460
+462    val_462
+463    val_463
+466    val_466
+467    val_467
+468    val_468
+469    val_469
+47     val_47
 470    val_470
+472    val_472
+475    val_475
+477    val_477
+478    val_478
+479    val_479
+480    val_480
+481    val_481
+482    val_482
 483    val_483
-495    val_495
-339    val_339
-35     val_35
-373    val_373
-18     val_18
-152    val_152
-348    val_348
-72     val_72
-316    val_316
-462    val_462
-97     val_97
-229    val_229
+484    val_484
+485    val_485
+487    val_487
+489    val_489
+490    val_490
+491    val_491
+492    val_492
+493    val_493
 494    val_494
-331    val_331
-90     val_90
-216    val_216
-164    val_164
-175    val_175
-119    val_119
-190    val_190
-0      val_0
-192    val_192
+495    val_495
+496    val_496
+497    val_497
+498    val_498
+5      val_5
+51     val_51
+53     val_53
+54     val_54
+57     val_57
+58     val_58
+64     val_64
 65     val_65
-489    val_489
-228    val_228
-306    val_306
-469    val_469
+66     val_66
+67     val_67
+69     val_69
+70     val_70
+72     val_72
+74     val_74
+76     val_76
 77     val_77
-282    val_282
-17     val_17
-137    val_137
-266    val_266
-42     val_42
-205    val_205
-257    val_257
-186    val_186
-136    val_136
-265    val_265
-439    val_439
+78     val_78
+8      val_8
+80     val_80
+82     val_82
+83     val_83
+84     val_84
+85     val_85
 86     val_86
+87     val_87
+9      val_9
+90     val_90
+92     val_92
+95     val_95
+96     val_96
+97     val_97
+98     val_98
 PREHOOK: query: EXPLAIN SELECT key FROM src GROUP BY key HAVING max(value) > 
"val_255"
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT key FROM src GROUP BY key HAVING max(value) > 
"val_255"
@@ -535,205 +537,205 @@ POSTHOOK: query: SELECT key FROM src GRO
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-436
-364
-403
-374
-95
-382
-30
-455
-498
-287
-311
-76
-87
-406
-463
-459
+256
+257
+258
+26
+260
+262
 263
-335
+265
+266
+27
+272
+273
+274
+275
+277
+278
 28
-98
-4
-404
-47
-54
-368
-377
-37
 280
-57
-308
-291
-278
-484
-409
-317
-475
-260
-429
-417
+281
+282
+283
 284
-413
-309
-27
-296
-67
-96
+285
+286
+287
+288
 289
-427
-418
-472
-454
-310
-360
-460
-5
-481
-85
-58
-369
-482
-9
-34
-419
-378
-281
-82
+291
 292
-285
-400
-53
+296
 298
-397
-43
-272
-332
-321
-389
-286
-421
-487
-78
-396
-375
-83
-258
+30
+302
+305
+306
 307
-41
-392
-277
-327
-490
-333
-64
-477
-256
-401
-402
-435
-393
-362
-492
-288
-438
-448
-394
-457
-480
-384
-323
-497
-446
-26
-386
-467
-468
-51
+308
+309
+310
+311
 315
-273
-366
-74
-345
-424
-491
-431
-395
+316
+317
+318
+321
+322
+323
+325
+327
+33
+331
+332
+333
+335
+336
+338
+339
+34
+341
 342
-302
-69
-66
-70
-449
-458
+344
+345
+348
+35
+351
+353
 356
-430
-466
-478
-453
-493
-338
+360
+362
+364
 365
+366
+367
+368
+369
+37
+373
+374
+375
+377
+378
 379
-353
-283
-485
-275
-452
-351
-325
-414
-84
+382
+384
+386
+389
+392
+393
+394
+395
+396
+397
 399
-305
-367
-262
-437
-479
-80
+4
+400
+401
+402
+403
+404
+406
 407
+409
+41
 411
-336
-322
+413
+414
+417
+418
+419
+42
+421
+424
+427
+429
+43
+430
+431
+432
+435
+436
+437
+438
+439
 44
 443
-341
-274
-8
-318
-92
-33
-496
 444
-432
-344
+446
+448
+449
+452
+453
+454
+455
+457
+458
+459
+460
+462
+463
+466
+467
+468
+469
+47
 470
+472
+475
+477
+478
+479
+480
+481
+482
 483
-495
-339
-35
-373
-348
-72
-316
-462
-97
+484
+485
+487
+489
+490
+491
+492
+493
 494
-331
-90
+495
+496
+497
+498
+5
+51
+53
+54
+57
+58
+64
 65
-489
-306
-469
+66
+67
+69
+70
+72
+74
+76
 77
-282
-266
-42
-257
-265
-439
+78
+8
+80
+82
+83
+84
+85
 86
+87
+9
+90
+92
+95
+96
+97
+98
 PREHOOK: query: EXPLAIN SELECT key FROM src where key > 300 GROUP BY key 
HAVING max(value) > "val_255"
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT key FROM src where key > 300 GROUP BY key 
HAVING max(value) > "val_255"
@@ -810,131 +812,131 @@ POSTHOOK: query: SELECT key FROM src whe
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-364
-436
-438
-448
-403
-394
-374
-480
-384
-323
-497
-446
-457
-468
-467
-386
-315
-382
-455
-366
-498
-345
-342
-424
-491
-311
-395
-431
 302
-406
-463
-459
-335
-449
-458
-356
-404
-430
-377
-368
-466
-478
-453
-493
+305
+306
+307
+308
+309
+310
+311
+315
+316
+317
+318
+321
+322
+323
+325
+327
+331
+332
+333
+335
+336
 338
-308
+339
+341
+342
+344
+345
+348
+351
+353
+356
+360
+362
+364
 365
+366
+367
+368
+369
+373
+374
+375
+377
+378
 379
-353
-485
-452
-484
-409
-351
-325
-317
-475
-429
-414
-417
-413
-309
+382
+384
+386
+389
+392
+393
+394
+395
+396
+397
 399
-305
-367
-336
-479
-437
+400
+401
+402
+403
+404
+406
 407
+409
 411
-427
+413
+414
+417
 418
+419
+421
+424
+427
+429
+430
+431
+432
+435
+436
+437
+438
+439
 443
-322
-341
-472
+444
+446
+448
+449
+452
+453
 454
-310
-360
+455
+457
+458
+459
 460
-318
+462
+463
+466
+467
+468
+469
+470
+472
+475
+477
+478
+479
+480
 481
-496
-369
 482
-444
-432
-344
-470
 483
-495
-419
-378
-339
-373
-348
-400
-316
-462
-494
-397
-332
-331
-321
-389
-333
-477
-396
+484
+485
 487
-421
-375
 489
-327
-307
-392
-306
-469
 490
-439
-435
-401
-402
-393
-362
+491
 492
+493
+494
+495
+496
+497
+498
 PREHOOK: query: EXPLAIN SELECT key, max(value) FROM src GROUP BY key HAVING 
max(value) > "val_255"
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT key, max(value) FROM src GROUP BY key HAVING 
max(value) > "val_255"
@@ -1008,205 +1010,205 @@ POSTHOOK: query: SELECT key, max(value) 
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-436    val_436
-364    val_364
-403    val_403
-374    val_374
-95     val_95
-382    val_382
-30     val_30
-455    val_455
-498    val_498
-287    val_287
-311    val_311
-76     val_76
-87     val_87
-406    val_406
-463    val_463
-459    val_459
+256    val_256
+257    val_257
+258    val_258
+26     val_26
+260    val_260
+262    val_262
 263    val_263
-335    val_335
+265    val_265
+266    val_266
+27     val_27
+272    val_272
+273    val_273
+274    val_274
+275    val_275
+277    val_277
+278    val_278
 28     val_28
-98     val_98
-4      val_4
-404    val_404
-47     val_47
-54     val_54
-368    val_368
-377    val_377
-37     val_37
 280    val_280
-57     val_57
-308    val_308
-291    val_291
-278    val_278
-484    val_484
-409    val_409
-317    val_317
-475    val_475
-260    val_260
-429    val_429
-417    val_417
+281    val_281
+282    val_282
+283    val_283
 284    val_284
-413    val_413
-309    val_309
-27     val_27
-296    val_296
-67     val_67
-96     val_96
+285    val_285
+286    val_286
+287    val_287
+288    val_288
 289    val_289
-427    val_427
-418    val_418
-472    val_472
-454    val_454
+291    val_291
+292    val_292
+296    val_296
+298    val_298
+30     val_30
+302    val_302
+305    val_305
+306    val_306
+307    val_307
+308    val_308
+309    val_309
 310    val_310
+311    val_311
+315    val_315
+316    val_316
+317    val_317
+318    val_318
+321    val_321
+322    val_322
+323    val_323
+325    val_325
+327    val_327
+33     val_33
+331    val_331
+332    val_332
+333    val_333
+335    val_335
+336    val_336
+338    val_338
+339    val_339
+34     val_34
+341    val_341
+342    val_342
+344    val_344
+345    val_345
+348    val_348
+35     val_35
+351    val_351
+353    val_353
+356    val_356
 360    val_360
-460    val_460
-5      val_5
-481    val_481
-85     val_85
-58     val_58
+362    val_362
+364    val_364
+365    val_365
+366    val_366
+367    val_367
+368    val_368
 369    val_369
-482    val_482
-9      val_9
-34     val_34
-419    val_419
+37     val_37
+373    val_373
+374    val_374
+375    val_375
+377    val_377
 378    val_378
-281    val_281
-82     val_82
-292    val_292
-285    val_285
-400    val_400
-53     val_53
-298    val_298
-397    val_397
-43     val_43
-272    val_272
-332    val_332
-321    val_321
+379    val_379
+382    val_382
+384    val_384
+386    val_386
 389    val_389
-286    val_286
-421    val_421
-487    val_487
-78     val_78
-396    val_396
-375    val_375
-83     val_83
-258    val_258
-307    val_307
-41     val_41
 392    val_392
-277    val_277
-327    val_327
-490    val_490
-333    val_333
-64     val_64
-477    val_477
-256    val_256
-401    val_401
-402    val_402
-435    val_435
 393    val_393
-362    val_362
-492    val_492
-288    val_288
-438    val_438
-448    val_448
 394    val_394
-457    val_457
-480    val_480
-384    val_384
-323    val_323
-497    val_497
-446    val_446
-26     val_26
-386    val_386
-467    val_467
-468    val_468
-51     val_51
-315    val_315
-273    val_273
-366    val_366
-74     val_74
-345    val_345
-424    val_424
-491    val_491
-431    val_431
 395    val_395
-342    val_342
-302    val_302
-69     val_69
-66     val_66
-70     val_70
-449    val_449
-458    val_458
-356    val_356
-430    val_430
-466    val_466
-478    val_478
-453    val_453
-493    val_493
-338    val_338
-365    val_365
-379    val_379
-353    val_353
-283    val_283
-485    val_485
-275    val_275
-452    val_452
-351    val_351
-325    val_325
-414    val_414
-84     val_84
+396    val_396
+397    val_397
 399    val_399
-305    val_305
-367    val_367
-262    val_262
-437    val_437
-479    val_479
-80     val_80
+4      val_4
+400    val_400
+401    val_401
+402    val_402
+403    val_403
+404    val_404
+406    val_406
 407    val_407
+409    val_409
+41     val_41
 411    val_411
-336    val_336
-322    val_322
+413    val_413
+414    val_414
+417    val_417
+418    val_418
+419    val_419
+42     val_42
+421    val_421
+424    val_424
+427    val_427
+429    val_429
+43     val_43
+430    val_430
+431    val_431
+432    val_432
+435    val_435
+436    val_436
+437    val_437
+438    val_438
+439    val_439
 44     val_44
 443    val_443
-341    val_341
-274    val_274
-8      val_8
-318    val_318
-92     val_92
-33     val_33
-496    val_496
 444    val_444
-432    val_432
-344    val_344
+446    val_446
+448    val_448
+449    val_449
+452    val_452
+453    val_453
+454    val_454
+455    val_455
+457    val_457
+458    val_458
+459    val_459
+460    val_460
+462    val_462
+463    val_463
+466    val_466
+467    val_467
+468    val_468
+469    val_469
+47     val_47
 470    val_470
+472    val_472
+475    val_475
+477    val_477
+478    val_478
+479    val_479
+480    val_480
+481    val_481
+482    val_482
 483    val_483
-495    val_495
-339    val_339
-35     val_35
-373    val_373
-348    val_348
-72     val_72
-316    val_316
-462    val_462
-97     val_97
+484    val_484
+485    val_485
+487    val_487
+489    val_489
+490    val_490
+491    val_491
+492    val_492
+493    val_493
 494    val_494
-331    val_331
-90     val_90
+495    val_495
+496    val_496
+497    val_497
+498    val_498
+5      val_5
+51     val_51
+53     val_53
+54     val_54
+57     val_57
+58     val_58
+64     val_64
 65     val_65
-489    val_489
-306    val_306
-469    val_469
+66     val_66
+67     val_67
+69     val_69
+70     val_70
+72     val_72
+74     val_74
+76     val_76
 77     val_77
-282    val_282
-266    val_266
-42     val_42
-257    val_257
-265    val_265
-439    val_439
+78     val_78
+8      val_8
+80     val_80
+82     val_82
+83     val_83
+84     val_84
+85     val_85
 86     val_86
+87     val_87
+9      val_9
+90     val_90
+92     val_92
+95     val_95
+96     val_96
+97     val_97
+98     val_98
 PREHOOK: query: EXPLAIN SELECT key, COUNT(value) FROM src GROUP BY key HAVING 
count(value) >= 4
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT key, COUNT(value) FROM src GROUP BY key HAVING 
count(value) >= 4
@@ -1280,13 +1282,13 @@ POSTHOOK: query: SELECT key, COUNT(value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-406    4
+138    4
+169    4
 230    5
 277    4
+348    5
 401    5
+406    4
 468    4
-138    4
-169    4
-348    5
-489    4
 469    5
+489    4

Modified: 
hive/branches/spark/ql/src/test/results/clientpositive/spark/limit_pushdown.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/limit_pushdown.q.out?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/results/clientpositive/spark/limit_pushdown.q.out
 (original)
+++ 
hive/branches/spark/ql/src/test/results/clientpositive/spark/limit_pushdown.q.out
 Fri Sep 12 16:22:37 2014
@@ -1,9 +1,13 @@
-PREHOOK: query: -- HIVE-3562 Some limit can be pushed down to map stage
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+-- HIVE-3562 Some limit can be pushed down to map stage
 
 explain
 select key,value from src order by key limit 20
 PREHOOK: type: QUERY
-POSTHOOK: query: -- HIVE-3562 Some limit can be pushed down to map stage
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+-- HIVE-3562 Some limit can be pushed down to map stage
 
 explain
 select key,value from src order by key limit 20
@@ -148,26 +152,26 @@ POSTHOOK: query: select key,value from s
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-98     val_98
-98     val_98
-97     val_97
-97     val_97
-96     val_96
-95     val_95
-95     val_95
-92     val_92
+82     val_82
+83     val_83
+83     val_83
+84     val_84
+84     val_84
+85     val_85
+86     val_86
+87     val_87
+9      val_9
 90     val_90
 90     val_90
 90     val_90
-9      val_9
-87     val_87
-86     val_86
-85     val_85
-84     val_84
-84     val_84
-83     val_83
-83     val_83
-82     val_82
+92     val_92
+95     val_95
+95     val_95
+96     val_96
+97     val_97
+97     val_97
+98     val_98
+98     val_98
 PREHOOK: query: explain
 select value, sum(key + 1) as sum from src group by value limit 20
 PREHOOK: type: QUERY
@@ -244,26 +248,26 @@ POSTHOOK: query: select value, sum(key +
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-val_131        132.0
-val_113        228.0
+val_0  3.0
+val_10 11.0
+val_100        202.0
 val_103        208.0
+val_104        210.0
+val_105        106.0
+val_11 12.0
+val_111        112.0
+val_113        228.0
 val_114        115.0
-val_0  3.0
 val_116        117.0
 val_118        238.0
-val_129        260.0
 val_119        360.0
-val_104        210.0
+val_12 26.0
+val_120        242.0
+val_125        252.0
 val_126        127.0
 val_128        387.0
-val_105        106.0
-val_10 11.0
-val_11 12.0
-val_100        202.0
-val_125        252.0
-val_120        242.0
-val_111        112.0
-val_12 26.0
+val_129        260.0
+val_131        132.0
 PREHOOK: query: -- deduped RS
 explain
 select value,avg(key + 1) from src group by value order by value limit 20
@@ -342,26 +346,26 @@ POSTHOOK: query: select value,avg(key + 
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-val_131        132.0
-val_113        114.0
+val_0  1.0
+val_10 11.0
+val_100        101.0
 val_103        104.0
+val_104        105.0
+val_105        106.0
+val_11 12.0
+val_111        112.0
+val_113        114.0
 val_114        115.0
-val_0  1.0
 val_116        117.0
 val_118        119.0
-val_129        130.0
 val_119        120.0
-val_104        105.0
+val_12 13.0
+val_120        121.0
+val_125        126.0
 val_126        127.0
 val_128        129.0
-val_105        106.0
-val_10 11.0
-val_11 12.0
-val_100        101.0
-val_125        126.0
-val_120        121.0
-val_111        112.0
-val_12 13.0
+val_129        130.0
+val_131        132.0
 PREHOOK: query: -- distincts
 explain
 select distinct(cdouble) from alltypesorc limit 20
@@ -437,26 +441,26 @@ POSTHOOK: query: select distinct(cdouble
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
--16372.0
--16339.0
--16310.0
--16311.0
--16296.0
--16307.0
+-16269.0
+-16274.0
 -16277.0
--16305.0
--16309.0
--16300.0
 -16280.0
--16373.0
+-16296.0
+-16300.0
+-16305.0
 -16306.0
--16369.0
+-16307.0
+-16309.0
+-16310.0
+-16311.0
 -16324.0
-NULL
+-16339.0
 -16355.0
+-16369.0
+-16372.0
+-16373.0
 -16379.0
--16274.0
--16269.0
+NULL
 PREHOOK: query: explain
 select ctinyint, count(distinct(cdouble)) from alltypesorc group by ctinyint 
limit 20
 PREHOOK: type: QUERY
@@ -532,26 +536,26 @@ POSTHOOK: query: select ctinyint, count(
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-NULL   2932
--64    24
--63    19
--62    27
--61    25
--60    27
--59    31
--58    23
--57    35
--56    36
--55    29
--54    26
--53    22
--52    33
--51    21
--50    30
--49    26
--48    29
--47    22
 -46    24
+-47    22
+-48    29
+-49    26
+-50    30
+-51    21
+-52    33
+-53    22
+-54    26
+-55    29
+-56    36
+-57    35
+-58    23
+-59    31
+-60    27
+-61    25
+-62    27
+-63    19
+-64    24
+NULL   2932
 PREHOOK: query: -- multi distinct
 explain
 select ctinyint, count(distinct(cstring1)), count(distinct(cstring2)) from 
alltypesorc group by ctinyint limit 20
@@ -629,26 +633,26 @@ POSTHOOK: query: select ctinyint, count(
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-NULL   3065    3
--64    3       13
--63    3       16
--62    3       23
--61    3       25
--60    3       25
--59    3       27
--58    3       24
--57    3       23
--56    3       22
--55    3       21
--54    3       21
--53    3       17
--52    3       21
--51    1012    1045
--50    3       25
--49    3       24
--48    3       27
--47    3       23
 -46    3       19
+-47    3       23
+-48    3       27
+-49    3       24
+-50    3       25
+-51    1012    1045
+-52    3       21
+-53    3       17
+-54    3       21
+-55    3       21
+-56    3       22
+-57    3       23
+-58    3       24
+-59    3       27
+-60    3       25
+-61    3       25
+-62    3       23
+-63    3       16
+-64    3       13
+NULL   3065    3
 PREHOOK: query: -- limit zero
 explain
 select key,value from src order by key limit 0
@@ -764,25 +768,25 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 val_0  0.0
-val_2  2.0
-val_4  4.0
-val_8  8.0
-val_9  9.0
 val_10 10.0
 val_11 11.0
-val_5  15.0
+val_12 24.0
+val_15 30.0
 val_17 17.0
+val_18 36.0
 val_19 19.0
+val_2  2.0
 val_20 20.0
-val_12 24.0
 val_27 27.0
 val_28 28.0
 val_30 30.0
-val_15 30.0
 val_33 33.0
 val_34 34.0
-val_18 36.0
+val_4  4.0
 val_41 41.0
+val_5  15.0
+val_8  8.0
+val_9  9.0
 PREHOOK: query: -- subqueries
 explain
 select * from
@@ -1022,26 +1026,26 @@ POSTHOOK: query: select value, sum(key) 
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-val_15 15.0
-val_138        414.0
-val_131        131.0
-val_113        226.0
-val_129        258.0
-val_103        206.0
-val_114        114.0
 val_0  0.0
-val_118        236.0
-val_116        116.0
-val_174        174.0
-val_119        357.0
+val_100        200.0
+val_103        206.0
 val_104        208.0
-val_128        384.0
 val_105        105.0
-val_193        193.0
-val_10 10.0
 val_11 11.0
-val_100        200.0
+val_111        111.0
+val_113        226.0
+val_114        114.0
+val_118        236.0
+val_119        357.0
+val_12 24.0
+val_120        240.0
+val_125        250.0
 val_126        126.0
+val_129        258.0
+val_131        131.0
+val_138        414.0
+val_15 15.0
+val_193        193.0
 PREHOOK: query: -- flush for order-by
 explain
 select key,value,value,value,value,value,value,value,value from src order by 
key limit 100
@@ -1279,103 +1283,103 @@ POSTHOOK: query: select sum(key) as sum 
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-579.0
-126.0
+0.0
+10.0
 105.0
-143.0
-414.0
-178.0
-399.0
-406.0
-330.0
-202.0
-410.0
-282.0
-235.0
-131.0
-309.0
-304.0
-194.0
-327.0
-434.0
-242.0
-280.0
-344.0
-432.0
-222.0
-214.0
-19.0
-201.0
-17.0
-177.0
-624.0
+11.0
+111.0
+114.0
 1150.0
-272.0
-250.0
-240.0
-170.0
-192.0
-160.0
-426.0
-384.0
-158.0
+116.0
+126.0
+131.0
+133.0
+136.0
+143.0
 145.0
-348.0
-501.0
-114.0
-226.0
-111.0
-418.0
-156.0
-394.0
 150.0
+153.0
+155.0
+156.0
+157.0
+162.0
+163.0
+166.0
+168.0
+17.0
+177.0
+178.0
 180.0
+181.0
+189.0
+19.0
+192.0
+194.0
+196.0
+20.0
+200.0
+201.0
+206.0
 208.0
-183.0
+214.0
 218.0
-390.0
-476.0
+226.0
+226.0
+228.0
+235.0
+236.0
+24.0
+24.0
+240.0
+242.0
+250.0
+255.0
+268.0
+272.0
+273.0
+274.0
+278.0
+282.0
+292.0
+298.0
+30.0
+304.0
 316.0
-474.0
+327.0
+328.0
+330.0
+344.0
+348.0
+350.0
+352.0
 357.0
-459.0
+36.0
+382.0
+384.0
+390.0
+394.0
+400.0
+406.0
+410.0
+417.0
+418.0
+426.0
+430.0
+432.0
+434.0
+438.0
 442.0
 446.0
+459.0
 466.0
-20.0
-458.0
-0.0
-181.0
-166.0
-30.0
-328.0
-10.0
+474.0
+478.0
 489.0
-469.0
-24.0
-417.0
-622.0
-292.0
-636.0
-554.0
-400.0
-278.0
-186.0
-157.0
-352.0
-358.0
-382.0
+501.0
 552.0
-268.0
-348.0
-236.0
-258.0
-163.0
-136.0
-226.0
-228.0
-189.0
-206.0
-561.0
-438.0
-162.0
+554.0
+579.0
+597.0
+622.0
+624.0
+676.0

Modified: 
hive/branches/spark/ql/src/test/results/clientpositive/spark/merge1.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/merge1.q.out?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/merge1.q.out 
(original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/merge1.q.out 
Fri Sep 12 16:22:37 2014
@@ -1,8 +1,12 @@
-PREHOOK: query: create table dest1(key int, val int)
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+create table dest1(key int, val int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@dest1
-POSTHOOK: query: create table dest1(key int, val int)
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+create table dest1(key int, val int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dest1
@@ -106,315 +110,315 @@ POSTHOOK: query: select * from dest1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
 #### A masked pattern was here ####
-168    1
-436    1
-170    1
-364    1
-209    2
-403    3
-374    1
-11     1
-195    2
-252    1
-146    2
-95     2
-118    2
-189    1
-199    3
-196    1
+0      3
+10     1
 100    2
-382    2
-30     1
-455    1
-498    3
+103    2
+104    2
+105    1
+11     1
 111    1
-287    1
-248    1
-19     1
-311    3
-76     2
-178    1
-87     1
-160    1
-221    2
-406    4
+113    2
+114    1
+116    1
+118    2
+119    3
+12     2
+120    2
 125    2
-463    2
-459    2
-263    1
+126    1
+128    3
+129    2
+131    1
+133    1
+134    2
+136    1
+137    2
+138    4
+143    1
+145    1
+146    2
+149    2
+15     2
+150    1
+152    2
+153    1
+155    1
+156    1
+157    1
+158    1
+160    1
+162    1
+163    1
+164    2
+165    2
 166    1
-335    1
-28     1
-223    2
+167    3
+168    1
+169    4
+17     1
+170    1
+172    2
 174    2
-98     2
-208    3
-4      1
-404    2
-47     1
-54     1
-368    1
-377    1
-37     2
-280    2
-57     1
-203    2
-308    1
-291    1
-278    2
-484    1
-409    3
-155    1
-317    2
-475    1
-260    1
-429    2
-417    3
-284    1
-413    2
-309    2
-27     1
-296    1
-67     2
-244    1
-230    5
-96     1
-183    1
-249    1
-289    1
-427    1
-418    1
-181    1
-472    1
-454    3
-310    1
-207    2
-233    2
-194    1
-224    2
-129    2
-360    1
-10     1
-460    1
-5      3
-12     2
-481    1
-85     1
-58     2
-369    3
-482    1
-214    1
+175    2
+176    2
 177    1
+178    1
+179    2
+18     2
+180    1
+181    1
+183    1
+186    1
+187    3
+189    1
+19     1
+190    1
+191    2
+192    1
 193    3
-9      1
-34     1
-419    1
-378    1
-165    2
+194    1
+195    2
+196    1
+197    2
+199    3
+2      1
+20     1
+200    2
 201    1
+202    1
+203    2
+205    2
+207    2
+208    3
+209    2
+213    2
+214    1
+216    2
+217    2
+218    1
+219    2
+221    2
+222    1
+223    2
+224    2
+226    1
+228    1
+229    2
+230    5
+233    2
+235    1
+237    2
+238    2
+239    2
+24     2
 241    1
+242    2
+244    1
+247    1
+248    1
+249    1
+252    1
+255    2
+256    2
+257    1
+258    1
+26     2
+260    1
+262    1
+263    1
+265    2
+266    1
+27     1
+272    2
+273    3
+274    1
+275    1
+277    4
+278    2
+28     1
+280    2
 281    2
-82     1
-292    1
+282    2
+283    1
+284    1
 285    1
-400    1
-53     1
-298    3
-397    2
-43     1
-272    2
-332    1
-321    2
-389    1
-131    1
 286    1
-242    2
-421    1
-487    1
-78     1
-396    3
-375    1
-83     2
-258    1
+287    1
+288    2
+289    1
+291    1
+292    1
+296    1
+298    3
+30     1
+302    1
+305    1
+306    1
 307    2
-41     1
-392    1
-277    4
+308    1
+309    2
+310    1
+311    3
+315    1
+316    3
+317    2
+318    3
+321    2
+322    2
+323    1
+325    2
 327    3
-490    1
-134    2
+33     1
+331    2
+332    1
 333    2
-64     1
-477    1
-256    2
-401    5
-402    1
-435    1
-393    1
-362    1
-492    2
-288    2
-438    3
-104    2
-113    2
-176    2
-448    1
-394    1
-457    1
-162    1
-480    3
-384    3
-323    1
-497    1
-20     1
-446    1
-237    2
-26     2
-222    1
-386    1
-467    1
-468    4
-218    1
-51     2
-163    1
-315    1
-150    1
-273    3
-366    1
-226    1
-74     1
-345    1
-424    2
-491    1
-431    3
-395    2
+335    1
+336    1
+338    1
+339    1
+34     1
+341    1
 342    2
-302    1
-149    2
-105    1
-167    3
-69     1
-172    2
-66     1
-70     3
-2      1
-239    2
-449    1
-179    2
-458    2
-202    1
+344    2
+345    1
+348    5
+35     3
+351    1
+353    2
 356    1
-430    3
-466    3
-478    2
-453    1
-493    1
-338    1
+360    1
+362    1
+364    1
 365    1
+366    1
+367    2
+368    1
+369    3
+37     2
+373    1
+374    1
+375    1
+377    1
+378    1
 379    1
-353    2
-247    1
-283    1
-200    2
-158    1
-485    1
-103    2
-275    1
-138    4
-452    1
-217    2
-351    1
-325    2
-197    2
-414    2
-84     2
+382    2
+384    3
+386    1
+389    1
+392    1
+393    1
+394    1
+395    2
+396    3
+397    2
 399    2
-24     2
-133    1
-305    1
-367    2
-157    1
-262    1
-128    3
-116    1
-437    1
-156    1
-479    1
-153    1
-80     1
+4      1
+400    1
+401    5
+402    1
+403    3
+404    2
+406    4
 407    1
-120    2
+409    3
+41     1
 411    1
-336    1
-322    2
+413    2
+414    2
+417    3
+418    1
+419    1
+42     2
+421    1
+424    2
+427    1
+429    2
+43     1
+430    3
+431    3
+432    1
+435    1
+436    1
+437    1
+438    3
+439    2
 44     1
 443    1
-187    3
-341    1
-238    2
-255    2
-274    1
-8      1
-213    2
-235    1
-143    1
-126    1
-219    2
-318    3
-169    4
-92     1
-15     2
-33     1
-114    1
-496    1
-180    1
-191    2
-145    1
 444    1
-432    1
-344    2
+446    1
+448    1
+449    1
+452    1
+453    1
+454    3
+455    1
+457    1
+458    2
+459    2
+460    1
+462    2
+463    2
+466    3
+467    1
+468    4
+469    5
+47     1
 470    1
+472    1
+475    1
+477    1
+478    2
+479    1
+480    3
+481    1
+482    1
 483    1
-495    1
-339    1
-35     3
-373    1
-18     2
-152    2
-348    5
-72     2
-316    3
-462    2
-97     2
-229    2
+484    1
+485    1
+487    1
+489    4
+490    1
+491    1
+492    2
+493    1
 494    1
-331    2
-90     3
-216    2
-164    2
-175    2
-119    3
-190    1
-0      3
-192    1
+495    1
+496    1
+497    1
+498    3
+5      3
+51     2
+53     1
+54     1
+57     1
+58     2
+64     1
 65     1
-489    4
-228    1
-306    1
-469    5
+66     1
+67     2
+69     1
+70     3
+72     2
+74     1
+76     2
 77     1
-282    2
-17     1
-137    2
-266    1
-42     2
-205    2
-257    1
-186    1
-136    1
-265    2
-439    2
+78     1
+8      1
+80     1
+82     1
+83     2
+84     2
+85     1
 86     1
+87     1
+9      1
+90     3
+92     1
+95     2
+96     1
+97     2
+98     2
 PREHOOK: query: drop table dest1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@dest1

Modified: 
hive/branches/spark/ql/src/test/results/clientpositive/spark/merge2.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/merge2.q.out?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/merge2.q.out 
(original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/merge2.q.out 
Fri Sep 12 16:22:37 2014
@@ -1,8 +1,12 @@
-PREHOOK: query: create table test1(key int, val int)
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+create table test1(key int, val int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@test1
-POSTHOOK: query: create table test1(key int, val int)
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+create table test1(key int, val int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@test1
@@ -106,315 +110,315 @@ POSTHOOK: query: select * from test1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test1
 #### A masked pattern was here ####
-364    1
-170    1
-436    1
-168    1
-209    2
-403    3
-374    1
-11     1
-195    2
-252    1
-146    2
-95     2
-118    2
-189    1
-199    3
-196    1
+0      3
+10     1
 100    2
-382    2
-30     1
-455    1
-498    3
+103    2
+104    2
+105    1
+11     1
 111    1
-287    1
-248    1
-19     1
-311    3
+113    2
+114    1
+116    1
+118    2
+119    3
+12     2
+120    2
 125    2
-87     1
+126    1
+128    3
+129    2
+131    1
+133    1
+134    2
+136    1
+137    2
+138    4
+143    1
+145    1
+146    2
+149    2
+15     2
+150    1
+152    2
+153    1
+155    1
+156    1
+157    1
+158    1
 160    1
-221    2
-406    4
-76     2
-463    2
-459    2
-263    1
+162    1
+163    1
+164    2
+165    2
 166    1
-335    1
-178    1
-28     1
-223    2
+167    3
+168    1
+169    4
+17     1
+170    1
+172    2
 174    2
-98     2
-208    3
-203    2
-4      1
-377    1
-54     1
-47     1
-368    1
-37     2
-280    2
-57     1
-404    2
-308    1
-291    1
-278    2
-484    1
-409    3
-155    1
-475    1
-317    2
-260    1
-429    2
-417    3
-309    2
-413    2
-284    1
-27     1
-296    1
-67     2
-244    1
-230    5
-96     1
-183    1
-249    1
-289    1
-427    1
-418    1
+175    2
+176    2
+177    1
+178    1
+179    2
+18     2
+180    1
 181    1
-472    1
-454    3
-207    2
-310    1
-233    2
-224    2
+183    1
+186    1
+187    3
+189    1
+19     1
+190    1
+191    2
+192    1
+193    3
 194    1
-129    2
-10     1
-360    1
-460    1
-12     2
-5      3
-481    1
-85     1
-58     2
-369    3
-482    1
+195    2
+196    1
+197    2
+199    3
+2      1
+20     1
+200    2
+201    1
+202    1
+203    2
+205    2
+207    2
+208    3
+209    2
+213    2
 214    1
-177    1
-193    3
-9      1
-34     1
-378    1
-419    1
-165    2
+216    2
+217    2
+218    1
+219    2
+221    2
+222    1
+223    2
+224    2
+226    1
+228    1
+229    2
+230    5
+233    2
+235    1
+237    2
+238    2
+239    2
+24     2
 241    1
-201    1
+242    2
+244    1
+247    1
+248    1
+249    1
+252    1
+255    2
+256    2
+257    1
+258    1
+26     2
+260    1
+262    1
+263    1
+265    2
+266    1
+27     1
+272    2
+273    3
+274    1
+275    1
+277    4
+278    2
+28     1
+280    2
 281    2
-82     1
+282    2
+283    1
+284    1
+285    1
+286    1
+287    1
+288    2
+289    1
+291    1
 292    1
-277    4
-272    2
-43     1
+296    1
 298    3
-397    2
-332    1
-285    1
-53     1
+30     1
+302    1
+305    1
+306    1
+307    2
+308    1
+309    2
+310    1
+311    3
+315    1
+316    3
+317    2
+318    3
 321    2
+322    2
+323    1
+325    2
+327    3
+33     1
+331    2
+332    1
 333    2
-131    1
-286    1
-242    2
-396    3
-389    1
-78     1
-421    1
-487    1
-375    1
-327    3
-83     2
-307    2
-392    1
-258    1
-41     1
-490    1
-134    2
-64     1
-400    1
-477    1
-256    2
-401    5
-435    1
-402    1
-393    1
-362    1
-492    2
-288    2
-237    2
-113    2
-104    2
-20     1
-176    2
-457    1
-394    1
-448    1
-162    1
-497    1
-438    3
-384    3
-480    3
-446    1
-468    4
-26     2
-323    1
-386    1
-467    1
-218    1
-51     2
-163    1
-315    1
-150    1
-273    3
-222    1
-366    1
+335    1
+336    1
+338    1
+339    1
+34     1
+341    1
 342    2
+344    2
 345    1
-74     1
-226    1
-424    2
-491    1
-431    3
-395    2
-302    1
-149    2
-105    1
-167    3
-239    2
-69     1
-172    2
-66     1
-70     3
-2      1
-449    1
-179    2
-458    2
+348    5
+35     3
+351    1
+353    2
 356    1
-202    1
-430    3
-466    3
-478    2
-338    1
-453    1
-493    1
+360    1
+362    1
+364    1
 365    1
+366    1
+367    2
+368    1
+369    3
+37     2
+373    1
+374    1
+375    1
+377    1
+378    1
 379    1
-353    2
-247    1
-283    1
-200    2
-158    1
-485    1
-103    2
-275    1
-138    4
-452    1
-325    2
-217    2
-274    1
-197    2
-351    1
+382    2
+384    3
+386    1
+389    1
+392    1
+393    1
+394    1
+395    2
+396    3
+397    2
 399    2
-84     2
-24     2
-336    1
-367    2
-414    2
-157    1
-262    1
-128    3
-133    1
-116    1
-437    1
-156    1
-479    1
-153    1
-120    2
-80     1
+4      1
+400    1
+401    5
+402    1
+403    3
+404    2
+406    4
 407    1
+409    3
+41     1
 411    1
-322    2
+413    2
+414    2
+417    3
+418    1
+419    1
+42     2
+421    1
+424    2
+427    1
+429    2
+43     1
+430    3
+431    3
+432    1
+435    1
+436    1
+437    1
+438    3
+439    2
 44     1
-305    1
-187    3
 443    1
-238    2
-255    2
-341    1
-8      1
-213    2
-143    1
-235    1
-126    1
-219    2
-318    3
-92     1
-15     2
-169    4
-33     1
-496    1
-145    1
-180    1
-191    2
-114    1
 444    1
-432    1
+446    1
+448    1
+449    1
+452    1
+453    1
+454    3
+455    1
+457    1
+458    2
+459    2
+460    1
+462    2
+463    2
+466    3
+467    1
+468    4
+469    5
+47     1
 470    1
-344    2
-495    1
+472    1
+475    1
+477    1
+478    2
+479    1
+480    3
+481    1
+482    1
 483    1
-339    1
-35     3
-373    1
-18     2
-152    2
-348    5
-494    1
-316    3
-462    2
-72     2
-229    2
-331    2
-216    2
-90     3
-164    2
-97     2
-175    2
-119    3
-190    1
-0      3
-192    1
+484    1
+485    1
+487    1
 489    4
+490    1
+491    1
+492    2
+493    1
+494    1
+495    1
+496    1
+497    1
+498    3
+5      3
+51     2
+53     1
+54     1
+57     1
+58     2
+64     1
 65     1
-228    1
-469    5
-306    1
+66     1
+67     2
+69     1
+70     3
+72     2
+74     1
+76     2
 77     1
-282    2
-17     1
-137    2
-266    1
-42     2
-205    2
-257    1
-186    1
-136    1
-265    2
+78     1
+8      1
+80     1
+82     1
+83     2
+84     2
+85     1
 86     1
-439    2
+87     1
+9      1
+90     3
+92     1
+95     2
+96     1
+97     2
+98     2
 PREHOOK: query: drop table test1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@test1

Modified: 
hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_19.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_19.q.out?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_19.q.out
 (original)
+++ 
hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_19.q.out
 Fri Sep 12 16:22:37 2014
@@ -8,6 +8,8 @@ PREHOOK: query: -- This is to test the u
 -- Since this test creates sub-directories for the output table outputTbl1, it 
might be easier
 -- to run the test only on hadoop 23
 
+-- SORT_QUERY_RESULTS
+
 create table inputTbl1(key string, val string) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
@@ -22,6 +24,8 @@ POSTHOOK: query: -- This is to test the 
 -- Since this test creates sub-directories for the output table outputTbl1, it 
might be easier
 -- to run the test only on hadoop 23
 
+-- SORT_QUERY_RESULTS
+
 create table inputTbl1(key string, val string) stored as textfile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -235,16 +239,16 @@ POSTHOOK: query: select * from outputTbl
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@outputtbl1
 #### A masked pattern was here ####
-7      1
+1      1
+1      1
+2      1
 2      1
-8      2
 3      1
-1      1
+3      1
+7      1
 7      1
-2      1
 8      2
-3      1
-1      1
+8      2
 PREHOOK: query: -- filter should be fine
 explain
 insert overwrite table outputTbl1

Modified: 
hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_25.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_25.q.out?rev=1624584&r1=1624583&r2=1624584&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_25.q.out
 (original)
+++ 
hive/branches/spark/ql/src/test/results/clientpositive/spark/union_remove_25.q.out
 Fri Sep 12 16:22:37 2014
@@ -452,7 +452,7 @@ Partition Parameters:                
        numFiles                2                   
        numRows                 -1                  
        rawDataSize             -1                  
-       totalSize               6812                
+       totalSize               6826                
 #### A masked pattern was here ####
                 
 # Storage Information           


Reply via email to