Github user ravipesala commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/1489#discussion_r150514307
  
    --- Diff: 
integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
 ---
    @@ -79,24 +79,26 @@ case class CreatePreAggregateTableCommand(
         tableModel.parentTable = Some(parentTable)
         tableModel.dataMapRelation = Some(fieldRelationMap)
         CarbonCreateTableCommand(tableModel, createDSTable = true).run 
(sparkSession)
    -      try {
    +    try {
           val relation = CarbonEnv.getInstance(sparkSession).carbonMetastore.
    -      lookupRelation( 
tableIdentifier)(sparkSession).asInstanceOf[CarbonRelation]
    -      val tableInfo= relation.tableMeta.carbonTable.getTableInfo
    -          // child schema object which will be updated on parent table 
about the
    -          val childSchema = tableInfo.getFactTable
    -            .buildChildSchema(dataMapName,"", tableInfo.getDatabaseName, 
queryString, "AGGREGATION")
    -      childSchema.setProperties(dmproperties.asJava)    // upadting the 
parent table about child table
    -          PreAggregateUtil.updateMainTable(parentDbName, parentTableName, 
childSchema, sparkSession)
    -        val loadAvailable = PreAggregateUtil
    -            .checkMainTableLoad(parentTable)
    -          if (loadAvailable) {
    -            sparkSession.sql(s"insert into ${ cm.databaseName }.${ 
cm.tableName } $queryString")
    -          }} catch {
    -          case e: Exception =>
    -            sparkSession.
    -              sql(s"""DROP TABLE IF EXISTS 
${tableModel.databaseName}.${tableModel.tableName}""")
    -            throw e
    +        
lookupRelation(tableIdentifier)(sparkSession).asInstanceOf[CarbonRelation]
    +      val tableInfo = relation.tableMeta.carbonTable.getTableInfo
    +      // child schema object which will be updated on parent table about 
the
    +      val childSchema = tableInfo.getFactTable
    +        .buildChildSchema(dataMapName, "", tableInfo.getDatabaseName, 
queryString, "AGGREGATION")
    +      dmproperties.foreach(f => childSchema.getProperties.put(f._1, f._2))
    +      // updating the parent table about child table
    +      PreAggregateUtil.updateMainTable(parentDbName, parentTableName, 
childSchema, sparkSession)
    +      val loadAvailable = PreAggregateUtil.checkMainTableLoad(parentTable)
    +      if (loadAvailable) {
    +        sparkSession
    +          .sql(s"insert into ${ tableModel.databaseName }.${ 
tableModel.tableName } $queryString")
    --- End diff --
    
    we can but since we require to add some code to use programmatical we added 
sql query. We can take this refactor in future PR as we need to change in other 
places like load as well. 


---

Reply via email to