Repository: carbondata Updated Branches: refs/heads/master d19f01855 -> fef3384b8
[CARBONDATA-1592]added new parameters for create and load events added new parameters for create and load events, which will help for some specific validations on table during create and load This closes #1615 Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/fef3384b Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/fef3384b Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/fef3384b Branch: refs/heads/master Commit: fef3384b86bbfccf360d549f988bfb9aea1e5131 Parents: d19f018 Author: akashrn5 <[email protected]> Authored: Tue Dec 5 18:52:35 2017 +0530 Committer: Jacky Li <[email protected]> Committed: Thu Dec 7 17:14:22 2017 +0800 ---------------------------------------------------------------------- .../scala/org/apache/carbondata/events/CreateTableEvents.scala | 4 +++- .../main/scala/org/apache/carbondata/events/LoadEvents.scala | 6 ++++-- .../execution/command/management/CarbonLoadDataCommand.scala | 4 +++- .../sql/execution/command/table/CarbonCreateTableCommand.scala | 2 +- 4 files changed, 11 insertions(+), 5 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/carbondata/blob/fef3384b/integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala index 44db43a..abb1ede 100644 --- a/integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala +++ b/integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala @@ -20,6 +20,7 @@ package org.apache.carbondata.events import org.apache.spark.sql._ import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier +import org.apache.carbondata.core.metadata.schema.table.TableInfo /** * Class for handling operations before start of a load process. @@ -27,7 +28,8 @@ import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier */ case class CreateTablePreExecutionEvent( sparkSession: SparkSession, - identifier: AbsoluteTableIdentifier) extends Event with TableEventInfo + identifier: AbsoluteTableIdentifier, + tableInfo: Option[TableInfo]) extends Event with TableEventInfo /** * Class for handling operations after data load completion and before final http://git-wip-us.apache.org/repos/asf/carbondata/blob/fef3384b/integration/spark-common/src/main/scala/org/apache/carbondata/events/LoadEvents.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/LoadEvents.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/events/LoadEvents.scala index d87a1af..12f2922 100644 --- a/integration/spark-common/src/main/scala/org/apache/carbondata/events/LoadEvents.scala +++ b/integration/spark-common/src/main/scala/org/apache/carbondata/events/LoadEvents.scala @@ -31,8 +31,10 @@ case class LoadTablePreExecutionEvent(sparkSession: SparkSession, carbonLoadModel: CarbonLoadModel, factPath: String, isDataFrameDefined: Boolean, - optionsFinal: scala.collection - .mutable.Map[String, String]) extends Event with LoadEventInfo + optionsFinal: scala.collection.mutable.Map[String, String], + // userProvidedOptions are needed if we need only the load options given by user + userProvidedOptions: Map[String, String], + isOverWriteTable: Boolean) extends Event with LoadEventInfo /** * Class for handling operations after data load completion and before final http://git-wip-us.apache.org/repos/asf/carbondata/blob/fef3384b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala index 47467df..63a625b 100644 --- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala +++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala @@ -148,7 +148,9 @@ case class CarbonLoadDataCommand( carbonLoadModel, factPath, dataFrame.isDefined, - optionsFinal) + optionsFinal, + options, + isOverwriteTable) OperationListenerBus.getInstance.fireEvent(loadTablePreExecutionEvent, operationContext) // First system has to partition the data first and then call the load data LOGGER.info(s"Initiating Direct Load for the Table : ($dbName.$tableName)") http://git-wip-us.apache.org/repos/asf/carbondata/blob/fef3384b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala index d967bf2..3ab221c 100644 --- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala +++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala @@ -75,7 +75,7 @@ case class CarbonCreateTableCommand( val operationContext = new OperationContext val createTablePreExecutionEvent: CreateTablePreExecutionEvent = - CreateTablePreExecutionEvent(sparkSession, tableIdentifier) + CreateTablePreExecutionEvent(sparkSession, tableIdentifier, Some(tableInfo)) OperationListenerBus.getInstance.fireEvent(createTablePreExecutionEvent, operationContext) val catalog = CarbonEnv.getInstance(sparkSession).carbonMetastore val carbonSchemaString = catalog.generateTableSchemaString(tableInfo, tableIdentifier)
