This is an automated email from the ASF dual-hosted git repository.

kunalkapoor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 3b29bcb  [CARBONDATA-4284] Load/insert after alter add column on 
partition table with complex column fails
3b29bcb is described below

commit 3b29bcb1057166c85e05b67a061ec616e809831d
Author: ShreelekhyaG <[email protected]>
AuthorDate: Mon Sep 13 18:44:19 2021 +0530

    [CARBONDATA-4284] Load/insert after alter add column on partition table 
with complex column fails
    
    Why is this PR needed?
    Insert after alter add column on partition table with complex column fails 
with bufferUnderFlowException
    List of columns order in TableSchema is different after alter add column.
    Ex: If partition is of dimension type, when table is created the schema 
columns order is as
    dimension columns(partition column also) + complex column
    After alter add, we are changing the order of columns in schema by moving 
the partition column to last.
    complex column + partition column
    Due to this change in order, while fillDimensionAndMeasureDetails, the 
indexing is wrong as it
    expects complex column to be last always which causes 
bufferUnderFlowException while flattening complex row.
    
    What changes were proposed in this PR?
    After alter add, removed changes to add partition column at last.
    
    This closes #4215
---
 .../spark/sql/execution/command/carbonTableSchemaCommon.scala |  5 -----
 .../spark/testsuite/alterTable/TestAlterTableAddColumns.scala | 11 +++++++++++
 2 files changed, 11 insertions(+), 5 deletions(-)

diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index 2096e93..dff6ac1 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -387,11 +387,6 @@ class AlterTableColumnSchemaGenerator(
 
     allColumns = CarbonScalaUtil.reArrangeColumnSchema(allColumns)
 
-    if (tableInfo.getFactTable.getPartitionInfo != null) {
-      val par = tableInfo.getFactTable.getPartitionInfo.getColumnSchemaList
-      allColumns = allColumns.filterNot(b => par.contains(b)) ++= par.asScala
-    }
-
     def getLocalDictColumnList(tableProperties: mutable.Map[String, String],
         columns: mutable.ListBuffer[ColumnSchema]): 
(mutable.ListBuffer[ColumnSchema],
       mutable.ListBuffer[ColumnSchema]) = {
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableAddColumns.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableAddColumns.scala
index f5bfb32..bd4a112 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableAddColumns.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableAddColumns.scala
@@ -481,6 +481,17 @@ class TestAlterTableAddColumns extends QueryTest with 
BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS alter_com")
   }
 
+  test("test add column to partition table with complex column") {
+    sql("drop table if exists alter_com")
+    sql("create table alter_com(id int, map1 map<int,int>) " +
+        "partitioned by(name string) stored as carbondata")
+    sql("insert into alter_com values( 1,map(1,2),'sh')")
+    sql("ALTER TABLE alter_com ADD COLUMNS(intF int)")
+    sql("insert into alter_com values(1,map(1,2),1,'df')")
+    checkAnswer(sql("select * from alter_com"),
+      Seq(Row(1, Map(1 -> 2), null, "sh"), Row(1, Map(1 -> 2), 1, "df")))
+  }
+
   test("Validate default values of complex columns added by alter command") {
     sql("DROP TABLE IF EXISTS alter_com")
     sql("CREATE TABLE alter_com(doubleField double, arr1 array<long> ) STORED 
AS carbondata")

Reply via email to