carbondata git commit: [CARBONDATA-2142] Fixed Pre-Aggregate datamap creation issue

2018-02-08 Thread ravipesala
Repository: carbondata
Updated Branches:
  refs/heads/branch-1.3 449668ad9 -> a3b97f384


[CARBONDATA-2142] Fixed Pre-Aggregate datamap creation issue

Fixed Reverting changes issue in case of create pre-aggregate data… map 
creation is failing
Removed look-up while creating the pre-aggregate data map
Removed unused code

This closes #1943


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/a3b97f38
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/a3b97f38
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/a3b97f38

Branch: refs/heads/branch-1.3
Commit: a3b97f38412cf96ee041b6ebfbd7c39af54e391d
Parents: 449668a
Author: kumarvishal 
Authored: Mon Feb 5 15:17:02 2018 +0530
Committer: ravipesala 
Committed: Thu Feb 8 20:21:56 2018 +0530

--
 .../CreatePreAggregateTableCommand.scala| 28 ++--
 .../spark/sql/hive/CarbonHiveMetaStore.scala|  2 +-
 2 files changed, 3 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/carbondata/blob/a3b97f38/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
--
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
index 231a001..17d6882 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
@@ -97,8 +97,6 @@ case class CreatePreAggregateTableCommand(
   None,
   isAlterFlow = false,
   None)
-
-
 // updating the relation identifier, this will be stored in child table
 // which can be used during dropping of pre-aggreate table as parent table 
will
 // also get updated
@@ -138,16 +136,8 @@ case class CreatePreAggregateTableCommand(
   parentTableIdentifier.table,
   childSchema,
   sparkSession)
-// After updating the parent carbon table with data map entry extract the 
latest table object
-// to be used in further create process.
-parentTable = CarbonEnv.getCarbonTable(parentTableIdentifier.database,
-  parentTableIdentifier.table)(sparkSession)
 val updatedLoadQuery = if (timeSeriesFunction.isDefined) {
-  val dataMap = parentTable.getTableInfo.getDataMapSchemaList.asScala
-.filter(p => p.getDataMapName
-  .equalsIgnoreCase(dataMapName)).head
-.asInstanceOf[AggregationDataMapSchema]
-  
PreAggregateUtil.createTimeSeriesSelectQueryFromMain(dataMap.getChildSchema,
+  
PreAggregateUtil.createTimeSeriesSelectQueryFromMain(childSchema.getChildSchema,
 parentTable.getTableName,
 parentTable.getDatabaseName)
 }
@@ -156,11 +146,8 @@ case class CreatePreAggregateTableCommand(
 }
 val dataFrame = sparkSession.sql(new 
CarbonSpark2SqlParser().addPreAggLoadFunction(
   updatedLoadQuery)).drop("preAggLoad")
-val dataMap = parentTable.getTableInfo.getDataMapSchemaList.asScala
-  .filter(dataMap => 
dataMap.getDataMapName.equalsIgnoreCase(dataMapName)).head
-  .asInstanceOf[AggregationDataMapSchema]
 loadCommand = PreAggregateUtil.createLoadCommandForChild(
-  dataMap.getChildSchema.getListOfColumns,
+  childSchema.getChildSchema.getListOfColumns,
   tableIdentifier,
   dataFrame,
   false,
@@ -191,17 +178,6 @@ case class CreatePreAggregateTableCommand(
   throw new UnsupportedOperationException(
 "Cannot create pre-aggregate table when insert is in progress on main 
table")
 } else if (loadAvailable.nonEmpty) {
-  val updatedQuery = if (timeSeriesFunction.isDefined) {
-val dataMap = parentTable.getTableInfo.getDataMapSchemaList.asScala
-  .filter(p => p.getDataMapName
-.equalsIgnoreCase(dataMapName)).head
-  .asInstanceOf[AggregationDataMapSchema]
-
PreAggregateUtil.createTimeSeriesSelectQueryFromMain(dataMap.getChildSchema,
-  parentTable.getTableName,
-  parentTable.getDatabaseName)
-  } else {
-queryString
-  }
   // Passing segmentToLoad as * because we want to load all the segments 
into the
   // pre-aggregate table even if the user has set some segments on the 
parent table.
   loadCommand.dataFrame = Some(PreAggregateUtil

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a3b97f38/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala

carbondata git commit: [CARBONDATA-2142] Fixed Pre-Aggregate datamap creation issue

2018-02-08 Thread ravipesala
Repository: carbondata
Updated Branches:
  refs/heads/master f97db6877 -> 5a4fe1673


[CARBONDATA-2142] Fixed Pre-Aggregate datamap creation issue

Fixed Reverting changes issue in case of create pre-aggregate data… map 
creation is failing
Removed look-up while creating the pre-aggregate data map
Removed unused code

This closes #1943


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/5a4fe167
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/5a4fe167
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/5a4fe167

Branch: refs/heads/master
Commit: 5a4fe1673a6f49696e6e0145dc6f0decc0e25c5a
Parents: f97db68
Author: kumarvishal 
Authored: Mon Feb 5 15:17:02 2018 +0530
Committer: ravipesala 
Committed: Thu Feb 8 20:19:48 2018 +0530

--
 .../CreatePreAggregateTableCommand.scala| 28 ++--
 .../spark/sql/hive/CarbonHiveMetaStore.scala|  2 +-
 2 files changed, 3 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/carbondata/blob/5a4fe167/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
--
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
index 231a001..17d6882 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
@@ -97,8 +97,6 @@ case class CreatePreAggregateTableCommand(
   None,
   isAlterFlow = false,
   None)
-
-
 // updating the relation identifier, this will be stored in child table
 // which can be used during dropping of pre-aggreate table as parent table 
will
 // also get updated
@@ -138,16 +136,8 @@ case class CreatePreAggregateTableCommand(
   parentTableIdentifier.table,
   childSchema,
   sparkSession)
-// After updating the parent carbon table with data map entry extract the 
latest table object
-// to be used in further create process.
-parentTable = CarbonEnv.getCarbonTable(parentTableIdentifier.database,
-  parentTableIdentifier.table)(sparkSession)
 val updatedLoadQuery = if (timeSeriesFunction.isDefined) {
-  val dataMap = parentTable.getTableInfo.getDataMapSchemaList.asScala
-.filter(p => p.getDataMapName
-  .equalsIgnoreCase(dataMapName)).head
-.asInstanceOf[AggregationDataMapSchema]
-  
PreAggregateUtil.createTimeSeriesSelectQueryFromMain(dataMap.getChildSchema,
+  
PreAggregateUtil.createTimeSeriesSelectQueryFromMain(childSchema.getChildSchema,
 parentTable.getTableName,
 parentTable.getDatabaseName)
 }
@@ -156,11 +146,8 @@ case class CreatePreAggregateTableCommand(
 }
 val dataFrame = sparkSession.sql(new 
CarbonSpark2SqlParser().addPreAggLoadFunction(
   updatedLoadQuery)).drop("preAggLoad")
-val dataMap = parentTable.getTableInfo.getDataMapSchemaList.asScala
-  .filter(dataMap => 
dataMap.getDataMapName.equalsIgnoreCase(dataMapName)).head
-  .asInstanceOf[AggregationDataMapSchema]
 loadCommand = PreAggregateUtil.createLoadCommandForChild(
-  dataMap.getChildSchema.getListOfColumns,
+  childSchema.getChildSchema.getListOfColumns,
   tableIdentifier,
   dataFrame,
   false,
@@ -191,17 +178,6 @@ case class CreatePreAggregateTableCommand(
   throw new UnsupportedOperationException(
 "Cannot create pre-aggregate table when insert is in progress on main 
table")
 } else if (loadAvailable.nonEmpty) {
-  val updatedQuery = if (timeSeriesFunction.isDefined) {
-val dataMap = parentTable.getTableInfo.getDataMapSchemaList.asScala
-  .filter(p => p.getDataMapName
-.equalsIgnoreCase(dataMapName)).head
-  .asInstanceOf[AggregationDataMapSchema]
-
PreAggregateUtil.createTimeSeriesSelectQueryFromMain(dataMap.getChildSchema,
-  parentTable.getTableName,
-  parentTable.getDatabaseName)
-  } else {
-queryString
-  }
   // Passing segmentToLoad as * because we want to load all the segments 
into the
   // pre-aggregate table even if the user has set some segments on the 
parent table.
   loadCommand.dataFrame = Some(PreAggregateUtil

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5a4fe167/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala