[CARBONDATA-2791]Fix Encoding for Double if exceeds LONG.Max_value

If Factor(decimalcount) * absMaxValue exceeds LONG.MAX_VALUE, then go for 
direct compression.

This closes #2569


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/91837a6f
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/91837a6f
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/91837a6f

Branch: refs/heads/external-format
Commit: 91837a6fb0353a80c7be56640045ae821c7d6477
Parents: fc8510a
Author: Indhumathi27 <indhumathi...@gmail.com>
Authored: Fri Jul 27 12:22:25 2018 +0530
Committer: ravipesala <ravi.pes...@gmail.com>
Committed: Sun Jul 29 21:26:54 2018 +0530

----------------------------------------------------------------------
 .../page/encoding/DefaultEncodingFactory.java   | 27 ++++++++++++--------
 .../complexType/TestAdaptiveComplexType.scala   | 17 ++++++++++++
 2 files changed, 33 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/91837a6f/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingFactory.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingFactory.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingFactory.java
index fa8bfad..1cc2ba8 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingFactory.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingFactory.java
@@ -300,18 +300,23 @@ public class DefaultEncodingFactory extends 
EncodingFactory {
       return new DirectCompressCodec(DataTypes.DOUBLE);
     } else {
       // double
-      long max = (long) (Math.pow(10, decimalCount) * absMaxValue);
-      DataType adaptiveDataType = fitLongMinMax(max, 0);
-      DataType deltaDataType = compareMinMaxAndSelectDataType(
-          (long) (Math.pow(10, decimalCount) * (maxValue - minValue)));
-      if (adaptiveDataType.getSizeInBytes() > deltaDataType.getSizeInBytes()) {
-        return new AdaptiveDeltaFloatingCodec(srcDataType, deltaDataType, 
stats);
-      } else if (adaptiveDataType.getSizeInBytes() < 
DataTypes.DOUBLE.getSizeInBytes() || (
-          (isComplexPrimitive) && (adaptiveDataType.getSizeInBytes() == 
DataTypes.DOUBLE
-              .getSizeInBytes()))) {
-        return new AdaptiveFloatingCodec(srcDataType, adaptiveDataType, stats);
-      } else {
+      // If absMaxValue exceeds LONG.MAX_VALUE, then go for direct compression
+      if ((Math.pow(10, decimalCount) * absMaxValue) > Long.MAX_VALUE) {
         return new DirectCompressCodec(DataTypes.DOUBLE);
+      } else {
+        long max = (long) (Math.pow(10, decimalCount) * absMaxValue);
+        DataType adaptiveDataType = fitLongMinMax(max, 0);
+        DataType deltaDataType = compareMinMaxAndSelectDataType(
+            (long) (Math.pow(10, decimalCount) * (maxValue - minValue)));
+        if (adaptiveDataType.getSizeInBytes() > 
deltaDataType.getSizeInBytes()) {
+          return new AdaptiveDeltaFloatingCodec(srcDataType, deltaDataType, 
stats);
+        } else if (adaptiveDataType.getSizeInBytes() < 
DataTypes.DOUBLE.getSizeInBytes() || (
+            (isComplexPrimitive) && (adaptiveDataType.getSizeInBytes() == 
DataTypes.DOUBLE
+                .getSizeInBytes()))) {
+          return new AdaptiveFloatingCodec(srcDataType, adaptiveDataType, 
stats);
+        } else {
+          return new DirectCompressCodec(DataTypes.DOUBLE);
+        }
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/91837a6f/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
index 6b0a13f..7fff15d 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
@@ -551,4 +551,21 @@ trait TestAdaptiveComplexType extends QueryTest {
       Seq(Row(1, Row(true, "abc", mutable.WrappedArray.make(Array(false, true, 
false))))))
   }
 
+  test("test Double with large decimalcount") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(array1 
array<struct<double1:double,double2:double,double3:double>>) " +
+      "stored by 'carbondata'")
+    sql(
+      "insert into adaptive 
values('10.35:40000.35:1.7976931348623157$67890985.888:65.5656:200')," +
+      "('20.25:50000.25:4.945464565654656546546546324$10000000:300000:3000')")
+    checkExistence(sql("select * from adaptive"), true, 
"1.0E7,300000.0,3000.0")
+    sql("Drop table if exists adaptive")
+    sql("create table adaptive(struct_arr struct<array_db1:array<double>>) 
stored by 'carbondata'")
+    sql("insert into adaptive 
values('5555555.9559:12345678991234567:3444.999')")
+    checkExistence(sql("select * from adaptive"),
+      true,
+      "5555555.9559, 1.2345678991234568E16, 3444.999")
+  }
+
 }

Reply via email to