null value handled during compaction

Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/8ed7931b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/8ed7931b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/8ed7931b

Branch: refs/heads/branch-1.1
Commit: 8ed7931bae6858a47c4989dde7223b20eecc7803
Parents: 4397d05
Author: rahulforallp <[email protected]>
Authored: Tue Apr 18 12:26:51 2017 +0530
Committer: rahulforallp <[email protected]>
Committed: Tue Apr 18 12:26:51 2017 +0530

----------------------------------------------------------------------
 .../rowreader/AddColumnTestCases.scala          | 22 ++++++++++++++++++++
 .../merger/CompactionResultSortProcessor.java   |  7 ++++---
 2 files changed, 26 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/8ed7931b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/AddColumnTestCases.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/AddColumnTestCases.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/AddColumnTestCases.scala
index e6fb265..06f480b 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/AddColumnTestCases.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/AddColumnTestCases.scala
@@ -131,6 +131,28 @@ class AddColumnTestCases extends QueryTest with 
BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS carbon_table")
   }
 
+
+  test("test add column compaction") {
+    sql("DROP TABLE IF EXISTS carbon_table")
+    sql(
+      "CREATE TABLE carbon_table(intField int,stringField string,charField 
string,timestampField " +
+      "timestamp)STORED BY 'carbondata' TBLPROPERTIES" +
+      "('DICTIONARY_EXCLUDE'='charField')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO 
TABLE carbon_table " +
+        
s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO 
TABLE carbon_table " +
+        
s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO 
TABLE carbon_table " +
+        
s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO 
TABLE carbon_table " +
+        
s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+    sql("Alter table carbon_table add columns(decimalField decimal(6,2))")
+
+    sql("Alter table carbon_table compact 'minor'")
+
+    sql("DROP TABLE IF EXISTS carbon_table")
+  }
+
   override def afterAll {
     sql("DROP TABLE IF EXISTS addcolumntest")
     sql("drop table if exists hivetable")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/8ed7931b/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java
----------------------------------------------------------------------
diff --git 
a/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java
 
b/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java
index ebf3683..8c1f577 100644
--- 
a/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java
+++ 
b/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java
@@ -184,8 +184,7 @@ public class CompactionResultSortProcessor extends 
AbstractResultProcessor {
    *
    * @param resultIteratorList
    */
-  private void processResult(List<RawResultIterator> resultIteratorList)
-      throws Exception {
+  private void processResult(List<RawResultIterator> resultIteratorList) 
throws Exception {
     for (RawResultIterator resultIterator : resultIteratorList) {
       while (resultIterator.hasNext()) {
         addRowForSorting(prepareRowObjectForSorting(resultIterator.next()));
@@ -250,7 +249,9 @@ public class CompactionResultSortProcessor extends 
AbstractResultProcessor {
   private Object getConvertedMeasureValue(Object value, char aggType) {
     switch (aggType) {
       case CarbonCommonConstants.BIG_DECIMAL_MEASURE:
-        value = ((org.apache.spark.sql.types.Decimal) 
value).toJavaBigDecimal();
+        if (value != null) {
+          value = ((org.apache.spark.sql.types.Decimal) 
value).toJavaBigDecimal();
+        }
         return value;
       default:
         return value;

Reply via email to