HIVE-16756 : Vectorization: LongColModuloLongColumn throws 
java.lang.ArithmeticException: / by zero (Vihang Karajgaonkar, reviewed by Matt 
McCline)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/33dc63bb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/33dc63bb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/33dc63bb

Branch: refs/heads/branch-2
Commit: 33dc63bb3b4ae1fa1aa0ab39827d52ba18984e1d
Parents: 4c45169
Author: Vihang Karajgaonkar <vih...@cloudera.com>
Authored: Sun Nov 19 18:58:54 2017 -0800
Committer: Vihang Karajgaonkar <vih...@cloudera.com>
Committed: Sun Nov 19 18:58:54 2017 -0800

----------------------------------------------------------------------
 .../ExpressionTemplates/ColumnDivideColumn.txt  |   2 +-
 .../expressions/LongColModuloLongColumn.java    | 209 +++++++++
 .../hive/ql/udf/generic/GenericUDFOPMod.java    |   1 +
 .../exec/vector/TestVectorizationContext.java   |   2 +-
 .../TestVectorArithmeticExpressions.java        |  49 ++
 .../queries/clientpositive/vectorization_div0.q |  12 +-
 .../llap/vectorization_div0.q.out               | 444 ++++++++++++++-----
 .../spark/vectorization_div0.q.out              | 434 +++++++++++++-----
 .../clientpositive/tez/vectorization_div0.q.out | 443 +++++++++++++-----
 .../clientpositive/vectorization_div0.q.out     | 414 ++++++++++++-----
 .../apache/hadoop/hive/tools/GenVectorCode.java |   1 -
 11 files changed, 1566 insertions(+), 445 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/33dc63bb/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt
----------------------------------------------------------------------
diff --git 
a/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt 
b/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt
index 04b533a..ee2d10c 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt
@@ -25,7 +25,7 @@ import 
org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
- * Generated from template ColumnArithmeticColumn.txt, which covers binary 
arithmetic
+ * Generated from template ColumnDivideColumn.txt, which covers division and 
modulo
  * expressions between columns.
  */
 public class <ClassName> extends VectorExpression {

http://git-wip-us.apache.org/repos/asf/hive/blob/33dc63bb/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColModuloLongColumn.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColModuloLongColumn.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColModuloLongColumn.java
new file mode 100644
index 0000000..3766116
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColModuloLongColumn.java
@@ -0,0 +1,209 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * This operation is handled as a special case because Hive
+ * long%long division needs special handling to avoid
+ * for divide by zero exception
+ */
+public class LongColModuloLongColumn extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+
+  public LongColModuloLongColumn(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+  }
+
+  public LongColModuloLongColumn() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
+    LongColumnVector outputColVector = (LongColumnVector) 
batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+    long[] vector2 = inputColVector2.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && 
inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && 
inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, 
batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    boolean hasDivBy0 = false;
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      long denom = vector2[0];
+      hasDivBy0 = hasDivBy0 || (denom == 0);
+      if (denom != 0) {
+        outputVector[0] = vector1[0] % denom;
+      }
+    } else if (inputColVector1.isRepeating) {
+      final long vector1Value = vector1[0];
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          long denom = vector2[i];
+          hasDivBy0 = hasDivBy0 || (denom == 0);
+          if (denom != 0) {
+            outputVector[i] = vector1Value % denom;
+          }
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          hasDivBy0 = hasDivBy0 || (vector2[i] == 0);
+          if (vector2[i] != 0) {
+            outputVector[i] = vector1Value % vector2[i];
+          }
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      final long vector2Value = vector2[0];
+      if (vector2Value == 0) {
+        // Denominator is zero, convert the batch to nulls
+        outputColVector.noNulls = false;
+        outputColVector.isRepeating = true;
+        outputColVector.isNull[0] = true;
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = vector1[i] % vector2Value;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = vector1[i] % vector2Value;
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          long denom = vector2[i];
+          hasDivBy0 = hasDivBy0 || (denom == 0);
+          if (denom != 0) {
+            outputVector[i] = vector1[i] % denom;
+          }
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          hasDivBy0 = hasDivBy0 || (vector2[i] == 0);
+          if (vector2[i] != 0) {
+            outputVector[i] = vector1[i] % vector2[i];
+          }
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 % (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    if (!hasDivBy0) {
+      NullUtil.setNullDataEntriesLong(outputColVector, batch.selectedInUse, 
sel, n);
+    } else {
+      NullUtil.setNullAndDivBy0DataEntriesLong(
+          outputColVector, batch.selectedInUse, sel, n, inputColVector2);
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  public int getColNum1() {
+    return colNum1;
+  }
+
+  public void setColNum1(int colNum1) {
+    this.colNum1 = colNum1;
+  }
+
+  public int getColNum2() {
+    return colNum2;
+  }
+
+  public void setColNum2(int colNum2) {
+    this.colNum2 = colNum2;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  @Override
+  public String vectorExpressionParameters() {
+    return "col " + colNum1 + ", col " + + colNum2;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("long"),
+            VectorExpressionDescriptor.ArgumentType.getType("long"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/33dc63bb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java
index 6d3e82e..c85fdc8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.udf.generic;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import 
org.apache.hadoop.hive.ql.exec.vector.expressions.LongColModuloLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;

http://git-wip-us.apache.org/repos/asf/hive/blob/33dc63bb/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
index 9fcb392..66ce378 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
@@ -54,6 +54,7 @@ import 
org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampScalarSc
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNotNull;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNull;
+import 
org.apache.hadoop.hive.ql.exec.vector.expressions.LongColModuloLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColumnInList;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.LongColEqualLongScalar;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.LongColGreaterLongScalar;
@@ -107,7 +108,6 @@ import 
org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDoubl
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRoundDoubleToDouble;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDouble;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongColumn;
-import 
org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloLongColumn;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyLongColumn;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColUnaryMinus;

http://git-wip-us.apache.org/repos/asf/hive/blob/33dc63bb/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java
index ea06ea0..5033958 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertNull;
 import junit.framework.Assert;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
@@ -308,6 +309,54 @@ public class TestVectorArithmeticExpressions {
   }
 
   @Test
+  public void testLongColModuloLongColumn() {
+    VectorizedRowBatch batch = getVectorizedRowBatch2LongInLongOut();
+    LongColModuloLongColumn expr = new LongColModuloLongColumn(0, 1, 2);
+    batch.cols[0].isNull[1] = true;
+    batch.cols[0].noNulls = false;
+    batch.cols[1].noNulls = false;
+    LongColumnVector out = (LongColumnVector) batch.cols[2];
+
+    // Set so we can verify they are reset by operation
+    out.noNulls = true;
+    out.isRepeating = true;
+
+    expr.evaluate(batch);
+
+    // 0/0 for entry 0 should be set as NULL
+    assertFalse(out.noNulls);
+    assertTrue(out.isNull[0]);
+
+    // verify NULL output in entry 1 is correct
+    assertTrue(out.isNull[1]);
+
+    // check entries beyond first 2
+    for (int i = 2; i != batch.size; i++) {
+      assertTrue(out.vector[i] == 0L);
+    }
+    assertFalse(out.noNulls);
+    assertFalse(out.isRepeating);
+  }
+
+  private VectorizedRowBatch getVectorizedRowBatch2LongInLongOut() {
+    VectorizedRowBatch batch = new VectorizedRowBatch(3);
+    LongColumnVector lcv, lcv2;
+    lcv = new LongColumnVector();
+    for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
+      lcv.vector[i] = i * 37;
+    }
+    batch.cols[0] = lcv;
+    lcv2 = new LongColumnVector();
+    batch.cols[1] = lcv2;
+    for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
+      lcv2.vector[i] = i * 37;
+    }
+    batch.cols[2] = new LongColumnVector();
+    batch.size = VectorizedRowBatch.DEFAULT_SIZE;
+    return batch;
+  }
+
+  @Test
   public void testDecimalColAddDecimalColumn() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
     VectorExpression expr = new DecimalColAddDecimalColumn(0, 1, 2);

http://git-wip-us.apache.org/repos/asf/hive/blob/33dc63bb/ql/src/test/queries/clientpositive/vectorization_div0.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/vectorization_div0.q 
b/ql/src/test/queries/clientpositive/vectorization_div0.q
index 025d457..d7b6c3c 100644
--- a/ql/src/test/queries/clientpositive/vectorization_div0.q
+++ b/ql/src/test/queries/clientpositive/vectorization_div0.q
@@ -5,8 +5,8 @@ set hive.fetch.task.conversion=none;
 
 -- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and 
constants
 explain vectorization expression
-select cdouble / 0.0 from alltypesorc limit 100;
-select cdouble / 0.0 from alltypesorc limit 100;
+select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from alltypesorc 
limit 100;
+select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from alltypesorc 
limit 100;
 
 -- There are no zeros in the table, but there is 988888, so use it as zero
 
@@ -25,3 +25,11 @@ from alltypesorc where cdouble >= -500 and cdouble < -199 
order by s1, s2 limit
 select (cdouble + 200.0) as s1, cbigint / (cdouble + 200.0) as s2, (cdouble + 
200.0) / (cdouble + 200.0), cbigint / (cdouble + 200.0), 3 / (cdouble + 200.0), 
1.2 / (cdouble + 200.0) 
 from alltypesorc where cdouble >= -500 and cdouble < -199 order by s1, s2 
limit 100;
 
+-- There are no zeros in the table, but there is 1018195815 in cbigint, 
528534767 in cint, so using it to do a divide by zero. ctinyint has a zero so 
can be used directly
+
+explain vectorization expression
+select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as c1, (cbigint / 
(cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, (cint % (cint - 
528534767)) as c4, (cbigint % (cbigint - 1018195815)), (ctinyint % ctinyint) as 
c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100;
+
+select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as c1, (cbigint / 
(cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, (cint % (cint - 
528534767)) as c4, (cbigint % (cbigint - 1018195815)), (ctinyint % ctinyint) as 
c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100;

http://git-wip-us.apache.org/repos/asf/hive/blob/33dc63bb/ql/src/test/results/clientpositive/llap/vectorization_div0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_div0.q.out 
b/ql/src/test/results/clientpositive/llap/vectorization_div0.q.out
index 2b5e5a8..31be04e 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_div0.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_div0.q.out
@@ -1,8 +1,8 @@
 PREHOOK: query: explain vectorization expression
-select cdouble / 0.0 from alltypesorc limit 100
+select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from alltypesorc 
limit 100
 PREHOOK: type: QUERY
 POSTHOOK: query: explain vectorization expression
-select cdouble / 0.0 from alltypesorc limit 100
+select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from alltypesorc 
limit 100
 POSTHOOK: type: QUERY
 PLAN VECTORIZATION:
   enabled: true
@@ -21,31 +21,31 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesorc
-                  Statistics: Num rows: 12288 Data size: 73400 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 12288 Data size: 220184 Basic stats: 
COMPLETE Column stats: COMPLETE
                   TableScan Vectorization:
                       native: true
                       projectedOutputColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 
10, 11]
                   Select Operator
-                    expressions: (cdouble / 0.0) (type: double)
-                    outputColumnNames: _col0
+                    expressions: (UDFToDouble(cint) / 0.0) (type: double), 
(UDFToDouble(ctinyint) / 0.0) (type: double), (UDFToDouble(cbigint) / 0.0) 
(type: double), (cdouble / 0.0) (type: double)
+                    outputColumnNames: _col0, _col1, _col2, _col3
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
-                        projectedOutputColumns: [12]
-                        selectExpressions: DoubleColDivideDoubleScalar(col 5, 
val 0.0) -> 12:double
-                    Statistics: Num rows: 12288 Data size: 98304 Basic stats: 
COMPLETE Column stats: COMPLETE
+                        projectedOutputColumns: [13, 14, 15, 12]
+                        selectExpressions: DoubleColDivideDoubleScalar(col 12, 
val 0.0)(children: CastLongToDouble(col 2) -> 12:double) -> 13:double, 
DoubleColDivideDoubleScalar(col 12, val 0.0)(children: CastLongToDouble(col 0) 
-> 12:double) -> 14:double, DoubleColDivideDoubleScalar(col 12, val 
0.0)(children: CastLongToDouble(col 3) -> 12:double) -> 15:double, 
DoubleColDivideDoubleScalar(col 5, val 0.0) -> 12:double
+                    Statistics: Num rows: 12288 Data size: 393216 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Limit
                       Number of rows: 100
                       Limit Vectorization:
                           className: VectorLimitOperator
                           native: true
-                      Statistics: Num rows: 100 Data size: 800 Basic stats: 
COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 100 Data size: 3200 Basic stats: 
COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
                         File Sink Vectorization:
                             className: VectorFileSinkOperator
                             native: false
-                        Statistics: Num rows: 100 Data size: 800 Basic stats: 
COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 100 Data size: 3200 Basic stats: 
COMPLETE Column stats: COMPLETE
                         table:
                             input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                             output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -67,114 +67,114 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: select cdouble / 0.0 from alltypesorc limit 100
+PREHOOK: query: select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from 
alltypesorc limit 100
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-POSTHOOK: query: select cdouble / 0.0 from alltypesorc limit 100
+POSTHOOK: query: select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 
from alltypesorc limit 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
 PREHOOK: query: explain vectorization expression
 select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / 
(cbigint - 988888L) 
 from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 
limit 100
@@ -615,3 +615,223 @@ POSTHOOK: Input: default@alltypesorc
 0.0    NULL    NULL    NULL    NULL    NULL
 0.0    NULL    NULL    NULL    NULL    NULL
 0.0    NULL    NULL    NULL    NULL    NULL
+PREHOOK: query: explain vectorization expression
+select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as c1, (cbigint / 
(cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, (cint % (cint - 
528534767)) as c4, (cbigint % (cbigint - 1018195815)), (ctinyint % ctinyint) as 
c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+PREHOOK: type: QUERY
+POSTHOOK: query: explain vectorization expression
+select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as c1, (cbigint / 
(cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, (cint % (cint - 
528534767)) as c4, (cbigint % (cbigint - 1018195815)), (ctinyint % ctinyint) as 
c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+POSTHOOK: type: QUERY
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  Statistics: Num rows: 12288 Data size: 220184 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  TableScan Vectorization:
+                      native: true
+                      projectedOutputColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 
10, 11]
+                  Filter Operator
+                    Filter Vectorization:
+                        className: VectorFilterOperator
+                        native: true
+                        predicateExpression: FilterExprOrExpr(children: 
FilterLongColGreaterLongScalar(col 2, val 500000000) -> boolean, 
FilterDoubleColGreaterDoubleScalar(col 5, val 1.0E9) -> boolean, 
FilterLongColEqualLongScalar(col 0, val 0) -> boolean) -> boolean
+                    predicate: ((cint > 500000000) or (cdouble > 1.0E9) or 
(ctinyint = 0)) (type: boolean)
+                    Statistics: Num rows: 4227 Data size: 75752 Basic stats: 
COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: cint (type: int), cbigint (type: bigint), 
ctinyint (type: tinyint), (cint / (cint - 528534767)) (type: double), (cbigint 
/ (cbigint - 1018195815)) (type: double), (ctinyint / ctinyint) (type: double), 
(cint % (cint - 528534767)) (type: int), (cbigint % (cbigint - 1018195815)) 
(type: bigint), (ctinyint % ctinyint) (type: tinyint)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8
+                      Select Vectorization:
+                          className: VectorSelectOperator
+                          native: true
+                          projectedOutputColumns: [2, 3, 0, 13, 14, 15, 16, 
17, 12]
+                          selectExpressions: LongColDivideLongColumn(col 2, 
col 12)(children: LongColSubtractLongScalar(col 2, val 528534767) -> 12:long) 
-> 13:double, LongColDivideLongColumn(col 3, col 12)(children: 
LongColSubtractLongScalar(col 3, val 1018195815) -> 12:long) -> 14:double, 
LongColDivideLongColumn(col 0, col 0) -> 15:double, LongColModuloLongColumn(col 
2, col 12)(children: LongColSubtractLongScalar(col 2, val 528534767) -> 
12:long) -> 16:long, LongColModuloLongColumn(col 3, col 12)(children: 
LongColSubtractLongScalar(col 3, val 1018195815) -> 12:long) -> 17:long, 
LongColModuloLongColumn(col 0, col 0) -> 12:long
+                      Statistics: Num rows: 4227 Data size: 219576 Basic 
stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col3 (type: double), _col4 (type: 
double)
+                        sort order: ++
+                        Reduce Sink Vectorization:
+                            className: VectorReduceSinkOperator
+                            native: false
+                            nativeConditionsMet: 
hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine 
tez IN [tez, spark] IS true, No DISTINCT columns IS true, BinarySortableSerDe 
for keys IS true, LazyBinarySerDe for values IS true
+                            nativeConditionsNotMet: No TopN IS false
+                        Statistics: Num rows: 4227 Data size: 219576 Basic 
stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+                        value expressions: _col0 (type: int), _col1 (type: 
bigint), _col2 (type: tinyint), _col5 (type: double), _col6 (type: int), _col7 
(type: bigint), _col8 (type: tinyint)
+            Execution mode: vectorized, llap
+            LLAP IO: all inputs
+            Map Vectorization:
+                enabled: true
+                enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
+                groupByVectorOutput: true
+                inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+                allNative: false
+                usesVectorUDFAdaptor: false
+                vectorized: true
+        Reducer 2 
+            Execution mode: vectorized, llap
+            Reduce Vectorization:
+                enabled: true
+                enableConditionsMet: hive.vectorized.execution.reduce.enabled 
IS true, hive.execution.engine tez IN [tez, spark] IS true
+                groupByVectorOutput: true
+                allNative: false
+                usesVectorUDFAdaptor: false
+                vectorized: true
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: int), VALUE._col1 (type: 
bigint), VALUE._col2 (type: tinyint), KEY.reducesinkkey0 (type: double), 
KEY.reducesinkkey1 (type: double), VALUE._col3 (type: double), VALUE._col4 
(type: int), VALUE._col5 (type: bigint), VALUE._col6 (type: tinyint)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8
+                Select Vectorization:
+                    className: VectorSelectOperator
+                    native: true
+                    projectedOutputColumns: [2, 3, 4, 0, 1, 5, 6, 7, 8]
+                Statistics: Num rows: 4227 Data size: 219576 Basic stats: 
COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 100
+                  Limit Vectorization:
+                      className: VectorLimitOperator
+                      native: true
+                  Statistics: Num rows: 100 Data size: 5216 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    File Sink Vectorization:
+                        className: VectorFileSinkOperator
+                        native: false
+                    Statistics: Num rows: 100 Data size: 5216 Basic stats: 
COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 100
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as 
c1, (cbigint / (cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, 
(cint % (cint - 528534767)) as c4, (cbigint % (cbigint - 1018195815)), 
(ctinyint % ctinyint) as c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select cint, cbigint, ctinyint, (cint / (cint - 528534767)) 
as c1, (cbigint / (cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, 
(cint % (cint - 528534767)) as c4, (cbigint % (cbigint - 1018195815)), 
(ctinyint % ctinyint) as c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+528534767      NULL    -50     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    33      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -28     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    31      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -34     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    29      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    31      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    NULL    NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    -11     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    61      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    16      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    62      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -23     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -51     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -11     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -48     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -62     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -45     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    40      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    39      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -32     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -56     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -7      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    24      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    36      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -23     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -55     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -11     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    51      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -24     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -44     NULL    NULL    1.0     NULL    NULL    0
+NULL   1018195815      0       NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    24      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    4       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -57     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -22     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    28      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -16     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    46      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    29      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -56     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -16     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    38      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -54     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -23     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -19     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    40      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    53      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -34     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    5       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    51      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -4      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    61      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    19      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -33     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    53      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    18      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    30      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -36     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    34      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -55     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -40     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    21      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    61      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -59     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    0       NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    -21     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -33     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -30     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -5      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -53     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    34      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -5      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    27      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    NULL    NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    -21     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    43      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    41      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -28     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -5      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    13      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -45     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    10      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -22     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    38      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -48     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    2       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -37     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -43     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    36      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -1      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -12     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    0       NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    26      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -22     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    9       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -13     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    38      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -4      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -47     NULL    NULL    1.0     NULL    NULL    0

http://git-wip-us.apache.org/repos/asf/hive/blob/33dc63bb/ql/src/test/results/clientpositive/spark/vectorization_div0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorization_div0.q.out 
b/ql/src/test/results/clientpositive/spark/vectorization_div0.q.out
index 81979d6..97c0e40 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_div0.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_div0.q.out
@@ -1,8 +1,8 @@
 PREHOOK: query: explain vectorization expression
-select cdouble / 0.0 from alltypesorc limit 100
+select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from alltypesorc 
limit 100
 PREHOOK: type: QUERY
 POSTHOOK: query: explain vectorization expression
-select cdouble / 0.0 from alltypesorc limit 100
+select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from alltypesorc 
limit 100
 POSTHOOK: type: QUERY
 PLAN VECTORIZATION:
   enabled: true
@@ -26,13 +26,13 @@ STAGE PLANS:
                       native: true
                       projectedOutputColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 
10, 11]
                   Select Operator
-                    expressions: (cdouble / 0.0) (type: double)
-                    outputColumnNames: _col0
+                    expressions: (UDFToDouble(cint) / 0.0) (type: double), 
(UDFToDouble(ctinyint) / 0.0) (type: double), (UDFToDouble(cbigint) / 0.0) 
(type: double), (cdouble / 0.0) (type: double)
+                    outputColumnNames: _col0, _col1, _col2, _col3
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
-                        projectedOutputColumns: [12]
-                        selectExpressions: DoubleColDivideDoubleScalar(col 5, 
val 0.0) -> 12:double
+                        projectedOutputColumns: [13, 14, 15, 12]
+                        selectExpressions: DoubleColDivideDoubleScalar(col 12, 
val 0.0)(children: CastLongToDouble(col 2) -> 12:double) -> 13:double, 
DoubleColDivideDoubleScalar(col 12, val 0.0)(children: CastLongToDouble(col 0) 
-> 12:double) -> 14:double, DoubleColDivideDoubleScalar(col 12, val 
0.0)(children: CastLongToDouble(col 3) -> 12:double) -> 15:double, 
DoubleColDivideDoubleScalar(col 5, val 0.0) -> 12:double
                     Statistics: Num rows: 12288 Data size: 377237 Basic stats: 
COMPLETE Column stats: NONE
                     Limit
                       Number of rows: 100
@@ -66,114 +66,114 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: select cdouble / 0.0 from alltypesorc limit 100
+PREHOOK: query: select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from 
alltypesorc limit 100
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-POSTHOOK: query: select cdouble / 0.0 from alltypesorc limit 100
+POSTHOOK: query: select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 
from alltypesorc limit 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
 PREHOOK: query: explain vectorization expression
 select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / 
(cbigint - 988888L) 
 from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 
limit 100
@@ -610,3 +610,221 @@ POSTHOOK: Input: default@alltypesorc
 0.0    NULL    NULL    NULL    NULL    NULL
 0.0    NULL    NULL    NULL    NULL    NULL
 0.0    NULL    NULL    NULL    NULL    NULL
+PREHOOK: query: explain vectorization expression
+select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as c1, (cbigint / 
(cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, (cint % (cint - 
528534767)) as c4, (cbigint % (cbigint - 1018195815)), (ctinyint % ctinyint) as 
c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+PREHOOK: type: QUERY
+POSTHOOK: query: explain vectorization expression
+select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as c1, (cbigint / 
(cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, (cint % (cint - 
528534767)) as c4, (cbigint % (cbigint - 1018195815)), (ctinyint % ctinyint) as 
c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+POSTHOOK: type: QUERY
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (SORT, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  Statistics: Num rows: 12288 Data size: 377237 Basic stats: 
COMPLETE Column stats: NONE
+                  TableScan Vectorization:
+                      native: true
+                      projectedOutputColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 
10, 11]
+                  Filter Operator
+                    Filter Vectorization:
+                        className: VectorFilterOperator
+                        native: true
+                        predicateExpression: FilterExprOrExpr(children: 
FilterLongColGreaterLongScalar(col 2, val 500000000) -> boolean, 
FilterDoubleColGreaterDoubleScalar(col 5, val 1.0E9) -> boolean, 
FilterLongColEqualLongScalar(col 0, val 0) -> boolean) -> boolean
+                    predicate: ((cint > 500000000) or (cdouble > 1.0E9) or 
(ctinyint = 0)) (type: boolean)
+                    Statistics: Num rows: 12288 Data size: 377237 Basic stats: 
COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: cint (type: int), cbigint (type: bigint), 
ctinyint (type: tinyint), (cint / (cint - 528534767)) (type: double), (cbigint 
/ (cbigint - 1018195815)) (type: double), (ctinyint / ctinyint) (type: double), 
(cint % (cint - 528534767)) (type: int), (cbigint % (cbigint - 1018195815)) 
(type: bigint), (ctinyint % ctinyint) (type: tinyint)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8
+                      Select Vectorization:
+                          className: VectorSelectOperator
+                          native: true
+                          projectedOutputColumns: [2, 3, 0, 13, 14, 15, 16, 
17, 12]
+                          selectExpressions: LongColDivideLongColumn(col 2, 
col 12)(children: LongColSubtractLongScalar(col 2, val 528534767) -> 12:long) 
-> 13:double, LongColDivideLongColumn(col 3, col 12)(children: 
LongColSubtractLongScalar(col 3, val 1018195815) -> 12:long) -> 14:double, 
LongColDivideLongColumn(col 0, col 0) -> 15:double, LongColModuloLongColumn(col 
2, col 12)(children: LongColSubtractLongScalar(col 2, val 528534767) -> 
12:long) -> 16:long, LongColModuloLongColumn(col 3, col 12)(children: 
LongColSubtractLongScalar(col 3, val 1018195815) -> 12:long) -> 17:long, 
LongColModuloLongColumn(col 0, col 0) -> 12:long
+                      Statistics: Num rows: 12288 Data size: 377237 Basic 
stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col3 (type: double), _col4 (type: 
double)
+                        sort order: ++
+                        Reduce Sink Vectorization:
+                            className: VectorReduceSinkOperator
+                            native: false
+                            nativeConditionsMet: 
hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine 
spark IN [tez, spark] IS true, No DISTINCT columns IS true, BinarySortableSerDe 
for keys IS true, LazyBinarySerDe for values IS true
+                            nativeConditionsNotMet: No TopN IS false
+                        Statistics: Num rows: 12288 Data size: 377237 Basic 
stats: COMPLETE Column stats: NONE
+                        TopN Hash Memory Usage: 0.1
+                        value expressions: _col0 (type: int), _col1 (type: 
bigint), _col2 (type: tinyint), _col5 (type: double), _col6 (type: int), _col7 
(type: bigint), _col8 (type: tinyint)
+            Execution mode: vectorized
+            Map Vectorization:
+                enabled: true
+                enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
+                groupByVectorOutput: true
+                inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+                allNative: false
+                usesVectorUDFAdaptor: false
+                vectorized: true
+        Reducer 2 
+            Execution mode: vectorized
+            Reduce Vectorization:
+                enabled: true
+                enableConditionsMet: hive.vectorized.execution.reduce.enabled 
IS true, hive.execution.engine spark IN [tez, spark] IS true
+                groupByVectorOutput: true
+                allNative: false
+                usesVectorUDFAdaptor: false
+                vectorized: true
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: int), VALUE._col1 (type: 
bigint), VALUE._col2 (type: tinyint), KEY.reducesinkkey0 (type: double), 
KEY.reducesinkkey1 (type: double), VALUE._col3 (type: double), VALUE._col4 
(type: int), VALUE._col5 (type: bigint), VALUE._col6 (type: tinyint)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8
+                Select Vectorization:
+                    className: VectorSelectOperator
+                    native: true
+                    projectedOutputColumns: [2, 3, 4, 0, 1, 5, 6, 7, 8]
+                Statistics: Num rows: 12288 Data size: 377237 Basic stats: 
COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 100
+                  Limit Vectorization:
+                      className: VectorLimitOperator
+                      native: true
+                  Statistics: Num rows: 100 Data size: 3000 Basic stats: 
COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    File Sink Vectorization:
+                        className: VectorFileSinkOperator
+                        native: false
+                    Statistics: Num rows: 100 Data size: 3000 Basic stats: 
COMPLETE Column stats: NONE
+                    table:
+                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 100
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as 
c1, (cbigint / (cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, 
(cint % (cint - 528534767)) as c4, (cbigint % (cbigint - 1018195815)), 
(ctinyint % ctinyint) as c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select cint, cbigint, ctinyint, (cint / (cint - 528534767)) 
as c1, (cbigint / (cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, 
(cint % (cint - 528534767)) as c4, (cbigint % (cbigint - 1018195815)), 
(ctinyint % ctinyint) as c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+528534767      NULL    -50     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    33      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -28     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    31      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -34     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    29      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    31      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    NULL    NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    -11     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    61      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    16      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    62      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -23     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -51     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -11     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -48     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -62     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -45     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    40      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    39      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -32     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -56     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -7      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    24      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    36      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -23     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -55     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -11     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    51      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -24     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -44     NULL    NULL    1.0     NULL    NULL    0
+NULL   1018195815      0       NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    24      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    4       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -57     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -22     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    28      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -16     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    46      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    29      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -56     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -16     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    38      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -54     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -23     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -19     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    40      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    53      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -34     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    5       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    51      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -4      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    61      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    19      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -33     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    53      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    18      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    30      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -36     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    34      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -55     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -40     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    21      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    61      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -59     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    0       NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    -21     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -33     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -30     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -5      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -53     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    34      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -5      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    27      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    NULL    NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    -21     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    43      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    41      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -28     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -5      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    13      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -45     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    10      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -22     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    38      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -48     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    2       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -37     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -43     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    36      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -1      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -12     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    0       NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    26      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -22     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    9       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -13     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    38      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -4      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -47     NULL    NULL    1.0     NULL    NULL    0

http://git-wip-us.apache.org/repos/asf/hive/blob/33dc63bb/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out 
b/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out
index cb7dc90..960ecb9 100644
--- a/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out
+++ b/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out
@@ -1,8 +1,8 @@
 PREHOOK: query: explain vectorization expression
-select cdouble / 0.0 from alltypesorc limit 100
+select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from alltypesorc 
limit 100
 PREHOOK: type: QUERY
 POSTHOOK: query: explain vectorization expression
-select cdouble / 0.0 from alltypesorc limit 100
+select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from alltypesorc 
limit 100
 POSTHOOK: type: QUERY
 PLAN VECTORIZATION:
   enabled: true
@@ -21,31 +21,31 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesorc
-                  Statistics: Num rows: 12288 Data size: 73400 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 12288 Data size: 220184 Basic stats: 
COMPLETE Column stats: COMPLETE
                   TableScan Vectorization:
                       native: true
                       projectedOutputColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 
10, 11]
                   Select Operator
-                    expressions: (cdouble / 0.0) (type: double)
-                    outputColumnNames: _col0
+                    expressions: (UDFToDouble(cint) / 0.0) (type: double), 
(UDFToDouble(ctinyint) / 0.0) (type: double), (UDFToDouble(cbigint) / 0.0) 
(type: double), (cdouble / 0.0) (type: double)
+                    outputColumnNames: _col0, _col1, _col2, _col3
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
-                        projectedOutputColumns: [12]
-                        selectExpressions: DoubleColDivideDoubleScalar(col 5, 
val 0.0) -> 12:double
-                    Statistics: Num rows: 12288 Data size: 98304 Basic stats: 
COMPLETE Column stats: COMPLETE
+                        projectedOutputColumns: [13, 14, 15, 12]
+                        selectExpressions: DoubleColDivideDoubleScalar(col 12, 
val 0.0)(children: CastLongToDouble(col 2) -> 12:double) -> 13:double, 
DoubleColDivideDoubleScalar(col 12, val 0.0)(children: CastLongToDouble(col 0) 
-> 12:double) -> 14:double, DoubleColDivideDoubleScalar(col 12, val 
0.0)(children: CastLongToDouble(col 3) -> 12:double) -> 15:double, 
DoubleColDivideDoubleScalar(col 5, val 0.0) -> 12:double
+                    Statistics: Num rows: 12288 Data size: 393216 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Limit
                       Number of rows: 100
                       Limit Vectorization:
                           className: VectorLimitOperator
                           native: true
-                      Statistics: Num rows: 100 Data size: 800 Basic stats: 
COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 100 Data size: 3200 Basic stats: 
COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
                         File Sink Vectorization:
                             className: VectorFileSinkOperator
                             native: false
-                        Statistics: Num rows: 100 Data size: 800 Basic stats: 
COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 100 Data size: 3200 Basic stats: 
COMPLETE Column stats: COMPLETE
                         table:
                             input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                             output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -66,114 +66,114 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: select cdouble / 0.0 from alltypesorc limit 100
+PREHOOK: query: select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 from 
alltypesorc limit 100
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-POSTHOOK: query: select cdouble / 0.0 from alltypesorc limit 100
+POSTHOOK: query: select cint / 0, ctinyint / 0, cbigint / 0, cdouble / 0.0 
from alltypesorc limit 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
-NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
+NULL   NULL    NULL    NULL
 PREHOOK: query: explain vectorization expression
 select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / 
(cbigint - 988888L) 
 from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 
limit 100
@@ -612,3 +612,222 @@ POSTHOOK: Input: default@alltypesorc
 0.0    NULL    NULL    NULL    NULL    NULL
 0.0    NULL    NULL    NULL    NULL    NULL
 0.0    NULL    NULL    NULL    NULL    NULL
+PREHOOK: query: explain vectorization expression
+select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as c1, (cbigint / 
(cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, (cint % (cint - 
528534767)) as c4, (cbigint % (cbigint - 1018195815)), (ctinyint % ctinyint) as 
c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+PREHOOK: type: QUERY
+POSTHOOK: query: explain vectorization expression
+select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as c1, (cbigint / 
(cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, (cint % (cint - 
528534767)) as c4, (cbigint % (cbigint - 1018195815)), (ctinyint % ctinyint) as 
c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+POSTHOOK: type: QUERY
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  Statistics: Num rows: 12288 Data size: 220184 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  TableScan Vectorization:
+                      native: true
+                      projectedOutputColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 
10, 11]
+                  Filter Operator
+                    Filter Vectorization:
+                        className: VectorFilterOperator
+                        native: true
+                        predicateExpression: FilterExprOrExpr(children: 
FilterLongColGreaterLongScalar(col 2, val 500000000) -> boolean, 
FilterDoubleColGreaterDoubleScalar(col 5, val 1.0E9) -> boolean, 
FilterLongColEqualLongScalar(col 0, val 0) -> boolean) -> boolean
+                    predicate: ((cint > 500000000) or (cdouble > 1.0E9) or 
(ctinyint = 0)) (type: boolean)
+                    Statistics: Num rows: 4227 Data size: 75752 Basic stats: 
COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: cint (type: int), cbigint (type: bigint), 
ctinyint (type: tinyint), (cint / (cint - 528534767)) (type: double), (cbigint 
/ (cbigint - 1018195815)) (type: double), (ctinyint / ctinyint) (type: double), 
(cint % (cint - 528534767)) (type: int), (cbigint % (cbigint - 1018195815)) 
(type: bigint), (ctinyint % ctinyint) (type: tinyint)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8
+                      Select Vectorization:
+                          className: VectorSelectOperator
+                          native: true
+                          projectedOutputColumns: [2, 3, 0, 13, 14, 15, 16, 
17, 12]
+                          selectExpressions: LongColDivideLongColumn(col 2, 
col 12)(children: LongColSubtractLongScalar(col 2, val 528534767) -> 12:long) 
-> 13:double, LongColDivideLongColumn(col 3, col 12)(children: 
LongColSubtractLongScalar(col 3, val 1018195815) -> 12:long) -> 14:double, 
LongColDivideLongColumn(col 0, col 0) -> 15:double, LongColModuloLongColumn(col 
2, col 12)(children: LongColSubtractLongScalar(col 2, val 528534767) -> 
12:long) -> 16:long, LongColModuloLongColumn(col 3, col 12)(children: 
LongColSubtractLongScalar(col 3, val 1018195815) -> 12:long) -> 17:long, 
LongColModuloLongColumn(col 0, col 0) -> 12:long
+                      Statistics: Num rows: 4227 Data size: 219576 Basic 
stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col3 (type: double), _col4 (type: 
double)
+                        sort order: ++
+                        Reduce Sink Vectorization:
+                            className: VectorReduceSinkOperator
+                            native: false
+                            nativeConditionsMet: 
hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine 
tez IN [tez, spark] IS true, No DISTINCT columns IS true, BinarySortableSerDe 
for keys IS true, LazyBinarySerDe for values IS true
+                            nativeConditionsNotMet: No TopN IS false
+                        Statistics: Num rows: 4227 Data size: 219576 Basic 
stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+                        value expressions: _col0 (type: int), _col1 (type: 
bigint), _col2 (type: tinyint), _col5 (type: double), _col6 (type: int), _col7 
(type: bigint), _col8 (type: tinyint)
+            Execution mode: vectorized
+            Map Vectorization:
+                enabled: true
+                enabledConditionsMet: 
hive.vectorized.use.vectorized.input.format IS true
+                groupByVectorOutput: true
+                inputFileFormats: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+                allNative: false
+                usesVectorUDFAdaptor: false
+                vectorized: true
+        Reducer 2 
+            Execution mode: vectorized
+            Reduce Vectorization:
+                enabled: true
+                enableConditionsMet: hive.vectorized.execution.reduce.enabled 
IS true, hive.execution.engine tez IN [tez, spark] IS true
+                groupByVectorOutput: true
+                allNative: false
+                usesVectorUDFAdaptor: false
+                vectorized: true
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: int), VALUE._col1 (type: 
bigint), VALUE._col2 (type: tinyint), KEY.reducesinkkey0 (type: double), 
KEY.reducesinkkey1 (type: double), VALUE._col3 (type: double), VALUE._col4 
(type: int), VALUE._col5 (type: bigint), VALUE._col6 (type: tinyint)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8
+                Select Vectorization:
+                    className: VectorSelectOperator
+                    native: true
+                    projectedOutputColumns: [2, 3, 4, 0, 1, 5, 6, 7, 8]
+                Statistics: Num rows: 4227 Data size: 219576 Basic stats: 
COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 100
+                  Limit Vectorization:
+                      className: VectorLimitOperator
+                      native: true
+                  Statistics: Num rows: 100 Data size: 5216 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    File Sink Vectorization:
+                        className: VectorFileSinkOperator
+                        native: false
+                    Statistics: Num rows: 100 Data size: 5216 Basic stats: 
COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 100
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select cint, cbigint, ctinyint, (cint / (cint - 528534767)) as 
c1, (cbigint / (cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, 
(cint % (cint - 528534767)) as c4, (cbigint % (cbigint - 1018195815)), 
(ctinyint % ctinyint) as c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select cint, cbigint, ctinyint, (cint / (cint - 528534767)) 
as c1, (cbigint / (cbigint - 1018195815)) as c2, (ctinyint / ctinyint) as c3, 
(cint % (cint - 528534767)) as c4, (cbigint % (cbigint - 1018195815)), 
(ctinyint % ctinyint) as c3
+from alltypesorc where cint > 500000000 or cdouble > 1000000000 or ctinyint = 
0 order by c1, c2 limit 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+528534767      NULL    -50     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    33      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -28     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    31      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -34     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    29      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    31      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    NULL    NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    -11     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    61      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    16      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    62      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -23     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -51     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -11     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -48     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -62     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -45     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    40      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    39      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -32     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -56     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -7      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    24      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    36      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -23     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -55     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -11     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    51      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -24     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -44     NULL    NULL    1.0     NULL    NULL    0
+NULL   1018195815      0       NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    24      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    4       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -57     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -22     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    28      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -16     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    46      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    29      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -56     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -16     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    38      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -54     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -23     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -19     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    40      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    53      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -34     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    5       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    51      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -4      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    61      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    19      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -33     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    53      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    18      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    30      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -36     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    34      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -55     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -40     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    21      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    61      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -59     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    0       NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    -21     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -33     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -30     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -5      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -53     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    34      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -5      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    27      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    NULL    NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    -21     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    43      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    41      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -28     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -5      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    13      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -45     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    10      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -22     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    38      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -48     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    2       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -37     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -43     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    36      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -1      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -12     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    0       NULL    NULL    NULL    NULL    NULL    NULL
+528534767      NULL    26      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -22     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    9       NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -13     NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    38      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -4      NULL    NULL    1.0     NULL    NULL    0
+528534767      NULL    -47     NULL    NULL    1.0     NULL    NULL    0

Reply via email to