Modified: 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java?rev=1569850&r1=1569849&r2=1569850&view=diff
==============================================================================
--- 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
 (original)
+++ 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
 Wed Feb 19 17:43:20 2014
@@ -26,6 +26,8 @@ import static org.junit.Assert.assertTru
 import java.lang.management.ManagementFactory;
 import java.lang.management.MemoryMXBean;
 import java.lang.reflect.Constructor;
+import java.math.BigDecimal;
+import java.math.BigInteger;
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -36,6 +38,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromConcat;
 import 
org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromLongIterables;
@@ -48,6 +52,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.GroupByDesc;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -158,9 +163,9 @@ public class TestVectorGroupByOperator {
 
     return desc;
   }
-  
+
   long outputRowCount = 0;
-  
+
   @Test
   public void testMemoryPressureFlush() throws HiveException {
 
@@ -169,22 +174,22 @@ public class TestVectorGroupByOperator {
     mapColumnNames.put("Value", 1);
     VectorizationContext ctx = new VectorizationContext(mapColumnNames, 2);
 
-    GroupByDesc desc = buildKeyGroupByDesc (ctx, "max", 
-        "Value", TypeInfoFactory.longTypeInfo, 
+    GroupByDesc desc = buildKeyGroupByDesc (ctx, "max",
+        "Value", TypeInfoFactory.longTypeInfo,
         "Key", TypeInfoFactory.longTypeInfo);
-    
+
     // Set the memory treshold so that we get 100Kb before we need to flush.
     MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
     long maxMemory = memoryMXBean.getHeapMemoryUsage().getMax();
-    
+
     float treshold = 100.0f*1024.0f/maxMemory;
     desc.setMemoryThreshold(treshold);
 
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
-    
+
     FakeCaptureOutputOperator out = 
FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
     vgo.initialize(null, null);
-    
+
     this.outputRowCount = 0;
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
       @Override
@@ -192,7 +197,7 @@ public class TestVectorGroupByOperator {
         ++outputRowCount;
       }
     });
-          
+
     Iterable<Object> it = new Iterable<Object>() {
       @Override
       public Iterator<Object> iterator() {
@@ -215,7 +220,7 @@ public class TestVectorGroupByOperator {
         };
       }
     };
-    
+
     FakeVectorRowBatchFromObjectIterables data = new 
FakeVectorRowBatchFromObjectIterables(
         100,
         new String[] {"long", "long"},
@@ -223,7 +228,7 @@ public class TestVectorGroupByOperator {
         it);
 
     // The 'it' data source will produce data w/o ever ending
-    // We want to see that memory pressure kicks in and some 
+    // We want to see that memory pressure kicks in and some
     // entries in the VGBY are flushed.
     long countRowsProduced = 0;
     for (VectorizedRowBatch unit: data) {
@@ -237,7 +242,7 @@ public class TestVectorGroupByOperator {
       // It should not go beyond 100k/16 (key+data)
       assertTrue(countRowsProduced < 100*1024/16);
     }
-    
+
     assertTrue(0 < outputRowCount);
   }
 
@@ -596,6 +601,178 @@ public class TestVectorGroupByOperator {
   }
 
   @Test
+  public void testCountDecimal() throws HiveException {
+    testAggregateDecimal(
+        "count",
+        2,
+        Arrays.asList(new Object[]{
+                new Decimal128(1),
+                new Decimal128(2),
+                new Decimal128(3)}),
+       3L);
+  }
+
+  @Test
+  public void testMaxDecimal() throws HiveException {
+    testAggregateDecimal(
+        "max",
+        2,
+        Arrays.asList(new Object[]{
+                new Decimal128(1),
+                new Decimal128(2),
+                new Decimal128(3)}),
+       new Decimal128(3));
+    testAggregateDecimal(
+            "max",
+            2,
+            Arrays.asList(new Object[]{
+                    new Decimal128(3),
+                    new Decimal128(2),
+                    new Decimal128(1)}),
+           new Decimal128(3));
+    testAggregateDecimal(
+            "max",
+            2,
+            Arrays.asList(new Object[]{
+                    new Decimal128(2),
+                    new Decimal128(3),
+                    new Decimal128(1)}),
+           new Decimal128(3));
+  }
+
+  @Test
+  public void testMinDecimal() throws HiveException {
+    testAggregateDecimal(
+        "min",
+        2,
+        Arrays.asList(new Object[]{
+                new Decimal128(1),
+                new Decimal128(2),
+                new Decimal128(3)}),
+       new Decimal128(1));
+    testAggregateDecimal(
+            "min",
+            2,
+            Arrays.asList(new Object[]{
+                    new Decimal128(3),
+                    new Decimal128(2),
+                    new Decimal128(1)}),
+           new Decimal128(1));
+
+    testAggregateDecimal(
+          "min",
+          2,
+          Arrays.asList(new Object[]{
+                  new Decimal128(2),
+                  new Decimal128(1),
+                  new Decimal128(3)}),
+         new Decimal128(1));
+  }
+
+  @Test
+  public void testSumDecimal() throws HiveException {
+    testAggregateDecimal(
+        "sum",
+        2,
+        Arrays.asList(new Object[]{
+                new Decimal128(1),
+                new Decimal128(2),
+                new Decimal128(3)}),
+       new Decimal128(1+2+3));
+  }
+
+  @Test
+  public void testAvgDecimal() throws HiveException {
+    testAggregateDecimal(
+        "avg",
+        2,
+        Arrays.asList(new Object[]{
+                new Decimal128(1),
+                new Decimal128(2),
+                new Decimal128(3)}),
+       HiveDecimal.create((1+2+3)/3));
+  }
+
+  @Test
+  public void testAvgDecimalNegative() throws HiveException {
+    testAggregateDecimal(
+            "avg",
+            2,
+            Arrays.asList(new Object[]{
+                    new Decimal128(-1),
+                    new Decimal128(-2),
+                    new Decimal128(-3)}),
+           HiveDecimal.create((-1-2-3)/3));
+  }
+
+  @Test
+  public void testVarianceDecimal () throws HiveException {
+      testAggregateDecimal(
+        "variance",
+        2,
+        Arrays.asList(new Object[]{
+                new Decimal128(13),
+                new Decimal128(5),
+                new Decimal128(7),
+                new Decimal128(19)}),
+        (double) 30);
+  }
+
+  @Test
+  public void testVarSampDecimal () throws HiveException {
+      testAggregateDecimal(
+        "var_samp",
+        2,
+        Arrays.asList(new Object[]{
+                new Decimal128(13),
+                new Decimal128(5),
+                new Decimal128(7),
+                new Decimal128(19)}),
+        (double) 40);
+  }
+
+  @Test
+  public void testStdPopDecimal () throws HiveException {
+      testAggregateDecimal(
+        "stddev_pop",
+        2,
+        Arrays.asList(new Object[]{
+                new Decimal128(13),
+                new Decimal128(5),
+                new Decimal128(7),
+                new Decimal128(19)}),
+        (double) Math.sqrt(30));
+  }
+
+  @Test
+  public void testStdSampDecimal () throws HiveException {
+      testAggregateDecimal(
+        "stddev_samp",
+        2,
+        Arrays.asList(new Object[]{
+                new Decimal128(13),
+                new Decimal128(5),
+                new Decimal128(7),
+                new Decimal128(19)}),
+        (double) Math.sqrt(40));
+  }
+
+  @Test
+  public void testDecimalKeyTypeAggregate() throws HiveException {
+    testKeyTypeAggregate(
+        "sum",
+        new FakeVectorRowBatchFromObjectIterables(
+            2,
+            new String[] {"decimal(38,0)", "bigint"},
+            Arrays.asList(new Object[]{
+                    new Decimal128(1),null,
+                    new Decimal128(1), null}),
+            Arrays.asList(new Object[]{13L,null,7L, 19L})),
+        buildHashMap(HiveDecimal.create(1), 20L, null, 19L));
+  }
+
+
+  @Test
   public void testCountString() throws HiveException {
     testAggregateString(
         "count",
@@ -1655,6 +1832,9 @@ public class TestVectorGroupByOperator {
         } else if (key instanceof BooleanWritable) {
           BooleanWritable bwKey = (BooleanWritable)key;
           keyValue = bwKey.get();
+        } else if (key instanceof HiveDecimalWritable) {
+            HiveDecimalWritable hdwKey = (HiveDecimalWritable)key;
+            keyValue = hdwKey.getHiveDecimal();
         } else {
           Assert.fail(String.format("Not implemented key output type %s: %s",
               key.getClass().getName(), key));
@@ -1755,6 +1935,19 @@ public class TestVectorGroupByOperator {
     testAggregateLongKeyIterable (aggregateName, fdr, expected);
   }
 
+  public void testAggregateDecimal (
+          String aggregateName,
+          int batchSize,
+          Iterable<Object> values,
+          Object expected) throws HiveException {
+
+        @SuppressWarnings("unchecked")
+        FakeVectorRowBatchFromObjectIterables fdr = new 
FakeVectorRowBatchFromObjectIterables(
+            batchSize, new String[] {"Decimal"}, values);
+        testAggregateDecimalIterable (aggregateName, fdr, expected);
+      }
+
+
   public void testAggregateString (
       String aggregateName,
       int batchSize,
@@ -1832,6 +2025,15 @@ public class TestVectorGroupByOperator {
         assertEquals (key, (Double) expected, (Double) arr[0]);
       } else if (arr[0] instanceof Long) {
         assertEquals (key, (Long) expected, (Long) arr[0]);
+      } else if (arr[0] instanceof HiveDecimalWritable) {
+        HiveDecimalWritable hdw = (HiveDecimalWritable) arr[0];
+        HiveDecimal hd = hdw.getHiveDecimal();
+        Decimal128 d128 = (Decimal128)expected;
+        assertEquals (key, d128.toBigDecimal(), hd.bigDecimalValue());
+      } else if (arr[0] instanceof HiveDecimal) {
+          HiveDecimal hd = (HiveDecimal) arr[0];
+          Decimal128 d128 = (Decimal128)expected;
+          assertEquals (key, d128.toBigDecimal(), hd.bigDecimalValue());
       } else {
         Assert.fail("Unsupported result type: " + arr[0].getClass().getName());
       }
@@ -1853,11 +2055,16 @@ public class TestVectorGroupByOperator {
         assertEquals (2, vals.length);
 
         assertEquals (true, vals[0] instanceof LongWritable);
-        assertEquals (true, vals[1] instanceof DoubleWritable);
         LongWritable lw = (LongWritable) vals[0];
-        DoubleWritable dw = (DoubleWritable) vals[1];
         assertFalse (lw.get() == 0L);
-        assertEquals (key, (Double) expected, (Double) (dw.get() / lw.get()));
+
+        if (vals[1] instanceof DoubleWritable) {
+          DoubleWritable dw = (DoubleWritable) vals[1];
+          assertEquals (key, (Double) expected, (Double) (dw.get() / 
lw.get()));
+        } else if (vals[1] instanceof HiveDecimalWritable) {
+          HiveDecimalWritable hdw = (HiveDecimalWritable) vals[1];
+          assertEquals (key, (HiveDecimal) expected, 
hdw.getHiveDecimal().divide(HiveDecimal.create(lw.get())));
+        }
       }
     }
 
@@ -1935,6 +2142,7 @@ public class TestVectorGroupByOperator {
       {"var_samp", VarianceSampValidator.class},
       {"std", StdValidator.class},
       {"stddev", StdValidator.class},
+      {"stddev_pop", StdValidator.class},
       {"stddev_samp", StdSampValidator.class},
   };
 
@@ -2015,6 +2223,38 @@ public class TestVectorGroupByOperator {
     validator.validate("_total", expected, result);
   }
 
+  public void testAggregateDecimalIterable (
+          String aggregateName,
+          Iterable<VectorizedRowBatch> data,
+          Object expected) throws HiveException {
+        Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
+        mapColumnNames.put("A", 0);
+        VectorizationContext ctx = new VectorizationContext(mapColumnNames, 1);
+
+        GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A",
+            TypeInfoFactory.getDecimalTypeInfo(30, 4));
+
+        VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+
+        FakeCaptureOutputOperator out = 
FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+        vgo.initialize(null, null);
+
+        for (VectorizedRowBatch unit: data) {
+          vgo.processOp(unit,  0);
+        }
+        vgo.close(false);
+
+        List<Object> outBatchList = out.getCapturedRows();
+        assertNotNull(outBatchList);
+        assertEquals(1, outBatchList.size());
+
+        Object result = outBatchList.get(0);
+
+        Validator validator = getValidator(aggregateName);
+        validator.validate("_total", expected, result);
+      }
+
+
   public void testAggregateDoubleIterable (
       String aggregateName,
       Iterable<VectorizedRowBatch> data,

Modified: 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java?rev=1569850&r1=1569849&r2=1569850&view=diff
==============================================================================
--- 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
 (original)
+++ 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
 Wed Feb 19 17:43:20 2014
@@ -23,8 +23,10 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
+import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
@@ -138,6 +140,18 @@ public class FakeVectorRowBatchFromObjec
             dcv.vector[row] = Double.valueOf(value.toString());
           }
         };
+      } else if (types[i].toLowerCase().startsWith("decimal")) {
+            batch.cols[i] = new DecimalColumnVector(batchSize, 38, 0);
+            columnAssign[i] = new ColumnVectorAssign() {
+                @Override
+                public void assign(
+                        ColumnVector columnVector,
+                        int row,
+                        Object value) {
+                    DecimalColumnVector dcv = (DecimalColumnVector) 
columnVector;
+                    dcv.vector[row] = (Decimal128)value;
+                }
+            };
       } else {
         throw new HiveException("Unimplemented type " + types[i]);
       }

Added: hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_aggregate.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_aggregate.q?rev=1569850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_aggregate.q 
(added)
+++ hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_aggregate.q 
Wed Feb 19 17:43:20 2014
@@ -0,0 +1,20 @@
+CREATE TABLE decimal_vgby STORED AS ORC AS 
+       SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS 
cdecimal1, 
+       CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+       cint
+       FROM alltypesorc;
+
+SET hive.vectorized.execution.enabled=true;
+
+EXPLAIN SELECT cint,
+       COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), 
AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+       COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), 
AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+       FROM decimal_vgby
+       GROUP BY cint
+       HAVING COUNT(*) > 1;
+SELECT cint,
+       COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), 
AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+       COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), 
AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+       FROM decimal_vgby
+       GROUP BY cint
+       HAVING COUNT(*) > 1;
\ No newline at end of file

Added: hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_mapjoin.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_mapjoin.q?rev=1569850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_mapjoin.q 
(added)
+++ hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_mapjoin.q Wed 
Feb 19 17:43:20 2014
@@ -0,0 +1,19 @@
+CREATE TABLE decimal_mapjoin STORED AS ORC AS 
+  SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
+  CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+  cint
+  FROM alltypesorc;
+ 
+SET hive.auto.convert.join=true;
+SET hive.auto.convert.join.nonconditionaltask=true;
+SET hive.auto.convert.join.nonconditionaltask.size=1000000000;
+SET hive.vectorized.execution.enabled=true;
+
+EXPLAIN SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2
+  FROM decimal_mapjoin l
+  JOIN decimal_mapjoin r ON l.cint = r.cint
+  WHERE l.cint = 6981;
+SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2
+  FROM decimal_mapjoin l
+  JOIN decimal_mapjoin r ON l.cint = r.cint
+  WHERE l.cint = 6981;
\ No newline at end of file

Added: 
hive/trunk/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out?rev=1569850&view=auto
==============================================================================
--- 
hive/trunk/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out 
(added)
+++ 
hive/trunk/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out 
Wed Feb 19 17:43:20 2014
@@ -0,0 +1,109 @@
+PREHOOK: query: CREATE TABLE decimal_vgby STORED AS ORC AS 
+       SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS 
cdecimal1, 
+       CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+       cint
+       FROM alltypesorc
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@alltypesorc
+POSTHOOK: query: CREATE TABLE decimal_vgby STORED AS ORC AS 
+       SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS 
cdecimal1, 
+       CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+       cint
+       FROM alltypesorc
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@alltypesorc
+POSTHOOK: Output: default@decimal_vgby
+PREHOOK: query: EXPLAIN SELECT cint,
+       COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), 
AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+       COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), 
AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+       FROM decimal_vgby
+       GROUP BY cint
+       HAVING COUNT(*) > 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT cint,
+       COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), 
AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+       COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), 
AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+       FROM decimal_vgby
+       GROUP BY cint
+       HAVING COUNT(*) > 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: decimal_vgby
+            Statistics: Num rows: 12288 Data size: 2165060 Basic stats: 
COMPLETE Column stats: NONE
+            Select Operator
+              expressions: cint (type: int), cdecimal1 (type: decimal(20,10)), 
cdecimal2 (type: decimal(23,14))
+              outputColumnNames: cint, cdecimal1, cdecimal2
+              Statistics: Num rows: 12288 Data size: 2165060 Basic stats: 
COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: count(cdecimal1), max(cdecimal1), 
min(cdecimal1), sum(cdecimal1), avg(cdecimal1), stddev_pop(cdecimal1), 
stddev_samp(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), 
sum(cdecimal2), avg(cdecimal2), stddev_pop(cdecimal2), stddev_samp(cdecimal2), 
count()
+                keys: cint (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15
+                Statistics: Num rows: 12288 Data size: 2165060 Basic stats: 
COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: int)
+                  Statistics: Num rows: 12288 Data size: 2165060 Basic stats: 
COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: bigint), _col2 (type: 
decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), 
_col5 (type: struct<count:bigint,sum:decimal(24,14)>), _col6 (type: 
struct<count:bigint,sum:double,variance:double>), _col7 (type: 
struct<count:bigint,sum:double,variance:double>), _col8 (type: bigint), _col9 
(type: decimal(23,14)), _col10 (type: decimal(23,14)), _col11 (type: 
decimal(33,14)), _col12 (type: struct<count:bigint,sum:decimal(27,18)>), _col13 
(type: struct<count:bigint,sum:double,variance:double>), _col14 (type: 
struct<count:bigint,sum:double,variance:double>), _col15 (type: bigint)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0), max(VALUE._col1), 
min(VALUE._col2), sum(VALUE._col3), avg(VALUE._col4), stddev_pop(VALUE._col5), 
stddev_samp(VALUE._col6), count(VALUE._col7), max(VALUE._col8), 
min(VALUE._col9), sum(VALUE._col10), avg(VALUE._col11), 
stddev_pop(VALUE._col12), stddev_samp(VALUE._col13), count(VALUE._col14)
+          keys: KEY._col0 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15
+          Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE 
Column stats: NONE
+          Filter Operator
+            predicate: (_col15 > 1) (type: boolean)
+            Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE 
Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: int), _col1 (type: bigint), _col2 
(type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: 
decimal(30,10)), _col5 (type: decimal(24,14)), _col6 (type: double), _col7 
(type: double), _col8 (type: bigint), _col9 (type: decimal(23,14)), _col10 
(type: decimal(23,14)), _col11 (type: decimal(33,14)), _col12 (type: 
decimal(27,18)), _col13 (type: double), _col14 (type: double)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14
+              Statistics: Num rows: 2048 Data size: 360843 Basic stats: 
COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 2048 Data size: 360843 Basic stats: 
COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: SELECT cint,
+       COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), 
AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+       COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), 
AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+       FROM decimal_vgby
+       GROUP BY cint
+       HAVING COUNT(*) > 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_vgby
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT cint,
+       COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), 
AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+       COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), 
AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+       FROM decimal_vgby
+       GROUP BY cint
+       HAVING COUNT(*) > 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_vgby
+#### A masked pattern was here ####
+NULL   3072    9318.4351351351 -4298.1513513514        5018444.1081079808      
1633.60810810806667     5695.4830821353335      5696.410307714474       3072    
11160.715384615385      -5147.907692307693      6010604.3076923073536   
1956.576923076922966667 6821.495748565141       6822.606289190906
+-3728  6       5831542.269248378       -3367.6517567568        
5817556.0411483778      16510.89638306946651    2174330.2092403853      
2381859.406131774       6       6984454.211097692       -4033.445769230769      
6967702.8672438458471   1161283.811207307641183333      2604201.2704476737      
2852759.5602156054
+-563   2       -515.621072973  -3367.6517567568        -3883.2728297298        
-1941.6364148649        1426.0153418919 2016.6902366556312      2       
-617.5607769230769      -4033.445769230769      -4651.0065461538459     
-2325.50327307692295    1707.9424961538462      2415.395441814127
+762    2       5831542.269248378       1531.2194054054 5833073.4886537834      
2916536.7443268917      2915005.524921486       4122440.347736469       2       
6984454.211097692       1833.9456923076925      6986288.1567899996925   
3493144.07839499984625  3491310.132702692       4937458.140118757
+6981   3       5831542.269248378       -515.621072973  5830511.027102432       
1943503.67570081066667  2749258.4550124914      3367140.192906513       3       
6984454.211097692       -617.5607769230769      6983219.0895438458462   
2327739.696514615282066667      3292794.4113115156      4032833.0678006653
+253665376      1024    9767.0054054054 -9779.5486486487        
-347484.0818378374      -339.33992366976309     5708.9563478862 
5711.745967572779       1024    11697.969230769231      -11712.99230769231      
-416182.64030769233089  -406.428359675480791885 6837.632716002934       
6840.973851172274
+528534767      1024    5831542.269248378       -9777.1594594595        
11646372.8607481068     11373.41099682432305    257528.9298820665       
257654.76860439766      1024    6984454.211097692       -11710.130769230771     
13948892.79980307629003 13621.965624807691689482        308443.1074570801       
308593.82484083984
+626923679      1024    9723.4027027027 -9778.9513513514        
10541.0525297287        10.29399661106318       5742.09145323734        
5744.897264034267       1024    11645.746153846154      -11712.276923076923     
12625.04759999997746    12.329148046874977988   6877.318722794877       
6880.679250101604

Added: 
hive/trunk/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out?rev=1569850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out 
(added)
+++ hive/trunk/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out 
Wed Feb 19 17:43:20 2014
@@ -0,0 +1,206 @@
+PREHOOK: query: CREATE TABLE decimal_mapjoin STORED AS ORC AS 
+  SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
+  CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+  cint
+  FROM alltypesorc
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@alltypesorc
+POSTHOOK: query: CREATE TABLE decimal_mapjoin STORED AS ORC AS 
+  SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
+  CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+  cint
+  FROM alltypesorc
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@alltypesorc
+POSTHOOK: Output: default@decimal_mapjoin
+PREHOOK: query: EXPLAIN SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2
+  FROM decimal_mapjoin l
+  JOIN decimal_mapjoin r ON l.cint = r.cint
+  WHERE l.cint = 6981
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2
+  FROM decimal_mapjoin l
+  JOIN decimal_mapjoin r ON l.cint = r.cint
+  WHERE l.cint = 6981
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        l 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        l 
+          TableScan
+            alias: l
+            Statistics: Num rows: 12288 Data size: 2165060 Basic stats: 
COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (cint = 6981) (type: boolean)
+              Statistics: Num rows: 6144 Data size: 1082530 Basic stats: 
COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                condition expressions:
+                  0 {cdecimal1} {cint}
+                  1 {cdecimal2} {cint}
+                keys:
+                  0 cint (type: int)
+                  1 cint (type: int)
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: r
+            Statistics: Num rows: 12288 Data size: 2165060 Basic stats: 
COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (cint = 6981) (type: boolean)
+              Statistics: Num rows: 6144 Data size: 1082530 Basic stats: 
COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                condition expressions:
+                  0 {cdecimal1} {cint}
+                  1 {cdecimal2} {cint}
+                keys:
+                  0 cint (type: int)
+                  1 cint (type: int)
+                outputColumnNames: _col1, _col3, _col8, _col9
+                Statistics: Num rows: 6758 Data size: 1190783 Basic stats: 
COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col3 (type: int), _col9 (type: int), _col1 
(type: decimal(20,10)), _col8 (type: decimal(23,14))
+                  outputColumnNames: _col0, _col1, _col2, _col3
+                  Statistics: Num rows: 6758 Data size: 1190783 Basic stats: 
COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 6758 Data size: 1190783 Basic stats: 
COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+      Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2
+  FROM decimal_mapjoin l
+  JOIN decimal_mapjoin r ON l.cint = r.cint
+  WHERE l.cint = 6981
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_mapjoin
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT l.cint, r.cint, l.cdecimal1, r.cdecimal2
+  FROM decimal_mapjoin l
+  JOIN decimal_mapjoin r ON l.cint = r.cint
+  WHERE l.cint = 6981
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_mapjoin
+#### A masked pattern was here ####
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    5831542.269248378       NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    -515.621072973  NULL
+6981   6981    -515.621072973  NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    5831542.269248378       NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    -515.621072973  NULL
+6981   6981    -515.621072973  NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    5831542.269248378       NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    -515.621072973  NULL
+6981   6981    -515.621072973  NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    5831542.269248378       NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    -515.621072973  NULL
+6981   6981    -515.621072973  NULL
+6981   6981    NULL    6984454.211097692
+6981   6981    NULL    6984454.211097692
+6981   6981    NULL    6984454.211097692
+6981   6981    NULL    6984454.211097692
+6981   6981    5831542.269248378       6984454.211097692
+6981   6981    NULL    6984454.211097692
+6981   6981    NULL    6984454.211097692
+6981   6981    NULL    6984454.211097692
+6981   6981    -515.621072973  6984454.211097692
+6981   6981    -515.621072973  6984454.211097692
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    5831542.269248378       NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    -515.621072973  NULL
+6981   6981    -515.621072973  NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    5831542.269248378       NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    -515.621072973  NULL
+6981   6981    -515.621072973  NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    5831542.269248378       NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    NULL    NULL
+6981   6981    -515.621072973  NULL
+6981   6981    -515.621072973  NULL
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    5831542.269248378       -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    -515.621072973  -617.5607769230769
+6981   6981    -515.621072973  -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    5831542.269248378       -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    NULL    -617.5607769230769
+6981   6981    -515.621072973  -617.5607769230769
+6981   6981    -515.621072973  -617.5607769230769

Modified: 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java?rev=1569850&r1=1569849&r2=1569850&view=diff
==============================================================================
--- 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
 (original)
+++ 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
 Wed Feb 19 17:43:20 2014
@@ -148,4 +148,21 @@ public class HiveDecimalWritable impleme
   public int hashCode() {
     return getHiveDecimal().hashCode();
   }
+
+  /* (non-Javadoc)
+   * In order to update a Decimal128 fast (w/o allocation) we need to expose 
access to the
+   * internal storage bytes and scale.  
+   * @return
+   */
+  public byte[] getInternalStorage() {
+    return internalStorage;
+  }
+  
+  /* (non-Javadoc)
+   * In order to update a Decimal128 fast (w/o allocation) we need to expose 
access to the
+   * internal storage bytes and scale.  
+   */
+  public int getScale() {
+    return scale;
+  }
 }

Added: 
hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java?rev=1569850&view=auto
==============================================================================
--- 
hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
 (added)
+++ 
hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
 Wed Feb 19 17:43:20 2014
@@ -0,0 +1,220 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.io;
+
+import junit.framework.Assert;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+
+import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hive.common.util.Decimal128FastBuffer;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Unit tests for tsting the fast allocation-free conversion
+ * between HiveDecimalWritable and Decimal128
+ */
+public class TestHiveDecimalWritable {
+
+    private Decimal128FastBuffer scratch;
+
+    @Before
+    public void setUp() throws Exception {
+      scratch = new Decimal128FastBuffer();
+    }
+
+    private void doTestFastStreamForHiveDecimal(String valueString) {
+      BigDecimal value = new BigDecimal(valueString);
+      Decimal128 dec = new Decimal128();
+      dec.update(value);
+
+      HiveDecimalWritable witness = new HiveDecimalWritable();
+      witness.set(HiveDecimal.create(value));
+
+      int bufferUsed = dec.fastSerializeForHiveDecimal(scratch);
+      HiveDecimalWritable hdw = new HiveDecimalWritable();
+      hdw.set(scratch.getBytes(bufferUsed), dec.getScale());
+
+      HiveDecimal hd = hdw.getHiveDecimal();
+
+      BigDecimal readValue = hd.bigDecimalValue();
+
+      Assert.assertEquals(value, readValue);
+
+      // Now test fastUpdate from the same serialized HiveDecimal
+      Decimal128 decRead = new Decimal128().fastUpdateFromInternalStorage(
+              witness.getInternalStorage(), (short) witness.getScale());
+
+      Assert.assertEquals(dec, decRead);
+
+      // Test fastUpdate from it's own (not fully compacted) serialized output
+      Decimal128 decReadSelf = new Decimal128().fastUpdateFromInternalStorage(
+              hdw.getInternalStorage(), (short) hdw.getScale());
+      Assert.assertEquals(dec, decReadSelf);
+    }
+
+    @Test
+    public void testFastStreamForHiveDecimal() {
+
+      doTestFastStreamForHiveDecimal("0");
+      doTestFastStreamForHiveDecimal("-0");
+      doTestFastStreamForHiveDecimal("1");
+      doTestFastStreamForHiveDecimal("-1");
+      doTestFastStreamForHiveDecimal("2");
+      doTestFastStreamForHiveDecimal("-2");
+      doTestFastStreamForHiveDecimal("127");
+      doTestFastStreamForHiveDecimal("-127");
+      doTestFastStreamForHiveDecimal("128");
+      doTestFastStreamForHiveDecimal("-128");
+      doTestFastStreamForHiveDecimal("255");
+      doTestFastStreamForHiveDecimal("-255");
+      doTestFastStreamForHiveDecimal("256");
+      doTestFastStreamForHiveDecimal("-256");
+      doTestFastStreamForHiveDecimal("65535");
+      doTestFastStreamForHiveDecimal("-65535");
+      doTestFastStreamForHiveDecimal("65536");
+      doTestFastStreamForHiveDecimal("-65536");
+
+      doTestFastStreamForHiveDecimal("10");
+      doTestFastStreamForHiveDecimal("1000");
+      doTestFastStreamForHiveDecimal("1000000");
+      doTestFastStreamForHiveDecimal("1000000000");
+      doTestFastStreamForHiveDecimal("1000000000000");
+      doTestFastStreamForHiveDecimal("1000000000000000");
+      doTestFastStreamForHiveDecimal("1000000000000000000");
+      doTestFastStreamForHiveDecimal("1000000000000000000000");
+      doTestFastStreamForHiveDecimal("1000000000000000000000000");
+      doTestFastStreamForHiveDecimal("1000000000000000000000000000");
+      doTestFastStreamForHiveDecimal("1000000000000000000000000000000");
+
+      doTestFastStreamForHiveDecimal("-10");
+      doTestFastStreamForHiveDecimal("-1000");
+      doTestFastStreamForHiveDecimal("-1000000");
+      doTestFastStreamForHiveDecimal("-1000000000");
+      doTestFastStreamForHiveDecimal("-1000000000000");
+      doTestFastStreamForHiveDecimal("-1000000000000000000");
+      doTestFastStreamForHiveDecimal("-1000000000000000000000");
+      doTestFastStreamForHiveDecimal("-1000000000000000000000000");
+      doTestFastStreamForHiveDecimal("-1000000000000000000000000000");
+      doTestFastStreamForHiveDecimal("-1000000000000000000000000000000");
+
+
+      doTestFastStreamForHiveDecimal("0.01");
+      doTestFastStreamForHiveDecimal("-0.01");
+      doTestFastStreamForHiveDecimal("0.02");
+      doTestFastStreamForHiveDecimal("-0.02");
+      doTestFastStreamForHiveDecimal("0.0127");
+      doTestFastStreamForHiveDecimal("-0.0127");
+      doTestFastStreamForHiveDecimal("0.0128");
+      doTestFastStreamForHiveDecimal("-0.0128");
+      doTestFastStreamForHiveDecimal("0.0255");
+      doTestFastStreamForHiveDecimal("-0.0255");
+      doTestFastStreamForHiveDecimal("0.0256");
+      doTestFastStreamForHiveDecimal("-0.0256");
+      doTestFastStreamForHiveDecimal("0.065535");
+      doTestFastStreamForHiveDecimal("-0.065535");
+      doTestFastStreamForHiveDecimal("0.065536");
+      doTestFastStreamForHiveDecimal("-0.065536");
+
+      doTestFastStreamForHiveDecimal("0.101");
+      doTestFastStreamForHiveDecimal("0.10001");
+      doTestFastStreamForHiveDecimal("0.10000001");
+      doTestFastStreamForHiveDecimal("0.10000000001");
+      doTestFastStreamForHiveDecimal("0.10000000000001");
+      doTestFastStreamForHiveDecimal("0.10000000000000001");
+      doTestFastStreamForHiveDecimal("0.10000000000000000001");
+      doTestFastStreamForHiveDecimal("0.10000000000000000000001");
+      doTestFastStreamForHiveDecimal("0.10000000000000000000000001");
+      doTestFastStreamForHiveDecimal("0.10000000000000000000000000001");
+      doTestFastStreamForHiveDecimal("0.10000000000000000000000000000001");
+
+      doTestFastStreamForHiveDecimal("-0.101");
+      doTestFastStreamForHiveDecimal("-0.10001");
+      doTestFastStreamForHiveDecimal("-0.10000001");
+      doTestFastStreamForHiveDecimal("-0.10000000001");
+      doTestFastStreamForHiveDecimal("-0.10000000000001");
+      doTestFastStreamForHiveDecimal("-0.10000000000000000001");
+      doTestFastStreamForHiveDecimal("-0.10000000000000000000001");
+      doTestFastStreamForHiveDecimal("-0.10000000000000000000000001");
+      doTestFastStreamForHiveDecimal("-0.10000000000000000000000000001");
+      doTestFastStreamForHiveDecimal("-0.10000000000000000000000000000001");
+
+      doTestFastStreamForHiveDecimal(Integer.toString(Integer.MAX_VALUE));
+      doTestFastStreamForHiveDecimal(Integer.toString(Integer.MIN_VALUE));
+      doTestFastStreamForHiveDecimal(Long.toString(Long.MAX_VALUE));
+      doTestFastStreamForHiveDecimal(Long.toString(Long.MIN_VALUE));
+      doTestFastStreamForHiveDecimal(Decimal128.MAX_VALUE.toFormalString());
+      doTestFastStreamForHiveDecimal(Decimal128.MIN_VALUE.toFormalString());
+
+            // Test known serialization tricky values
+      int[] values = new int[] {
+              0x80,
+              0x8000,
+              0x800000,
+              0x80000000,
+              0x81,
+                    0x8001,
+                    0x800001,
+                    0x80000001,
+              0x7f,
+              0x7fff,
+              0x7fffff,
+              0x7fffffff,
+              0xff,
+              0xffff,
+              0xffffff,
+              0xffffffff};
+
+
+      for(int value: values) {
+          for (int i = 0; i < 4; ++i) {
+              int[] pos = new int[] {1, 0, 0, 0, 0};
+              int[] neg = new int[] {0xff, 0, 0, 0, 0};
+
+              pos[i+1] = neg[i+1] = value;
+
+              doTestDecimalWithBoundsCheck(new Decimal128().update32(pos, 0));
+              doTestDecimalWithBoundsCheck(new Decimal128().update32(neg, 0));
+              doTestDecimalWithBoundsCheck(new Decimal128().update64(pos, 0));
+              doTestDecimalWithBoundsCheck(new Decimal128().update64(neg, 0));
+              doTestDecimalWithBoundsCheck(new Decimal128().update96(pos, 0));
+              doTestDecimalWithBoundsCheck(new Decimal128().update96(neg, 0));
+              doTestDecimalWithBoundsCheck(new Decimal128().update128(pos, 0));
+              doTestDecimalWithBoundsCheck(new Decimal128().update128(neg, 0));
+          }
+      }
+    }
+
+    void doTestDecimalWithBoundsCheck(Decimal128 value) {
+       if ((value.compareTo(Decimal128.MAX_VALUE)) > 0 ||
+           (value.compareTo(Decimal128.MIN_VALUE)) < 0) {
+             // Ignore this one, out of bounds and HiveDecimal will NPE
+             return;
+       }
+       doTestFastStreamForHiveDecimal(value.toFormalString());
+    }
+
+}
+


Reply via email to