Github user kumarvishal09 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164381676
--- Diff:
integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
---
@@ -79,32 +79,27 @@ case class CarbonCreateDataMapCommand(
}
createPreAggregateTableCommands.flatMap(_.processMetadata(sparkSession))
} else {
- val dataMapSchema = new DataMapSchema(dataMapName, dmClassName)
- dataMapSchema.setProperties(new java.util.HashMap[String,
String](dmproperties.asJava))
- val dbName =
CarbonEnv.getDatabaseName(tableIdentifier.database)(sparkSession)
- // upadting the parent table about dataschema
- PreAggregateUtil.updateMainTable(dbName, tableIdentifier.table,
dataMapSchema, sparkSession)
+ throw new UnsupportedDataMapException(dmClassName)
}
LOGGER.audit(s"DataMap $dataMapName successfully added to Table ${
tableIdentifier.table }")
Seq.empty
}
override def processData(sparkSession: SparkSession): Seq[Row] = {
- if
(dmClassName.equals("org.apache.carbondata.datamap.AggregateDataMapHandler") ||
- dmClassName.equalsIgnoreCase("preaggregate")) {
+ if (dmClassName.equalsIgnoreCase(PREAGGREGATE.getName) ||
+ dmClassName.equalsIgnoreCase(TIMESERIES.getName)) {
createPreAggregateTableCommands.flatMap(_.processData(sparkSession))
} else {
- Seq.empty
+ throw new UnsupportedDataMapException(dmClassName)
}
}
override def undoMetadata(sparkSession: SparkSession, exception:
Exception): Seq[Row] = {
- if
(dmClassName.equals("org.apache.carbondata.datamap.AggregateDataMapHandler") ||
- dmClassName.equalsIgnoreCase("preaggregate")) {
- val timeHierarchyString =
dmproperties.get(CarbonCommonConstants.TIMESERIES_HIERARCHY)
+ if (dmClassName.equalsIgnoreCase(PREAGGREGATE.getName) ||
--- End diff --
Same as above comment
---