Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1489#discussion_r150504460
--- Diff:
integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
---
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.datamap
+
+import scala.collection.JavaConverters._
+import scala.collection.mutable.ListBuffer
+
+import org.apache.spark.sql.{CarbonEnv, GetDB, Row, SparkSession}
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
+import org.apache.spark.sql.execution.command.{DataProcessCommand,
RunnableCommand, SchemaProcessCommand}
+import
org.apache.spark.sql.execution.command.preaaggregate.PreAggregateUtil
+import org.apache.spark.sql.hive.CarbonRelation
+
+import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.core.datamap.DataMapStoreManager
+import org.apache.carbondata.core.locks.{CarbonLockUtil, ICarbonLock,
LockUsage}
+import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier,
CarbonTableIdentifier}
+import
org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl
+import org.apache.carbondata.core.metadata.schema.table.CarbonTable
+import org.apache.carbondata.events._
+
+
+case class CarbonDropDataMapCommand(
+ dataMapName: String,
+ ifExistsSet: Boolean,
+ databaseNameOp: Option[String],
+ tableName: String)
+ extends RunnableCommand with SchemaProcessCommand with
DataProcessCommand {
+
+ override def run(sparkSession: SparkSession): Seq[Row] = {
+ processSchema(sparkSession)
+ processData(sparkSession)
+ }
+
+ override def processSchema(sparkSession: SparkSession): Seq[Row] = {
+ val LOGGER: LogService =
LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val dbName = GetDB.getDatabaseName(databaseNameOp, sparkSession)
+ val identifier = TableIdentifier(tableName, Option(dbName))
+ val carbonTableIdentifier = new CarbonTableIdentifier(dbName,
tableName, "")
+ val locksToBeAcquired = List(LockUsage.METADATA_LOCK)
+ val carbonEnv = CarbonEnv.getInstance(sparkSession)
+ val catalog = carbonEnv.carbonMetastore
+ val tableIdentifier =
+
AbsoluteTableIdentifier.from(CarbonEnv.getInstance(sparkSession).storePath,
+ dbName.toLowerCase, tableName.toLowerCase)
+
catalog.checkSchemasModifiedTimeAndReloadTables(tableIdentifier.getStorePath)
+ val carbonLocks: scala.collection.mutable.ListBuffer[ICarbonLock] =
ListBuffer()
+ try {
+ locksToBeAcquired foreach {
+ lock => carbonLocks +=
CarbonLockUtil.getLockObject(carbonTableIdentifier, lock)
+ }
+ LOGGER.audit(s"Deleting datamap [$dataMapName] under table
[$tableName]")
+ val carbonTable: Option[CarbonTable] =
+ catalog.getTableFromMetadataCache(dbName, tableName) match {
+ case Some(tableMeta) => Some(tableMeta.carbonTable)
+ case None => try {
+ Some(catalog.lookupRelation(identifier)(sparkSession)
+ .asInstanceOf[CarbonRelation].metaData.carbonTable)
+ } catch {
+ case ex: NoSuchTableException =>
+ if (!ifExistsSet) {
+ throw ex
+ }
+ None
+ }
+ }
+ if (carbonTable.isDefined &&
carbonTable.get.getTableInfo.getDataMapSchemaList.size() > 0) {
+ val dataMapSchema =
carbonTable.get.getTableInfo.getDataMapSchemaList.asScala.zipWithIndex.
+ find(_._1.getDataMapName.equalsIgnoreCase(dataMapName))
+ if (dataMapSchema.isDefined) {
+
+ val operationContext = new OperationContext
+ val dropDataMapPreEvent =
+ DropDataMapPreEvent(
+ Some(dataMapSchema.get._1),
+ ifExistsSet,
+ sparkSession)
+ OperationListenerBus.getInstance.fireEvent(dropDataMapPreEvent,
operationContext)
+
+
carbonTable.get.getTableInfo.getDataMapSchemaList.remove(dataMapSchema.get._2)
+ val schemaConverter = new ThriftWrapperSchemaConverterImpl
+ PreAggregateUtil
+ .updateSchemaInfo(carbonTable.get,
--- End diff --
move to previous line, move parameter to next line
---