Github user ravipesala commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/1469#discussion_r153062720
  
    --- Diff: 
integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
 ---
    @@ -184,10 +126,86 @@ class CarbonSqlAstBuilder(conf: SQLConf) extends 
SparkSqlAstBuilder(conf) {
         }
       }
     
    -  private def needToConvertToLowerCase(key: String): Boolean = {
    -    val noConvertList = Array("LIST_INFO", "RANGE_INFO")
    -    !noConvertList.exists(x => x.equalsIgnoreCase(key));
    +  def getPropertyKeyValues(ctx: TablePropertyListContext): Map[String, 
String]
    +  = {
    +    Option(ctx).map(visitPropertyKeyValues)
    +      .getOrElse(Map.empty)
       }
     
    +  def createCarbontable(tableHeader: CreateTableHeaderContext,
    +      skewSpecContext: SkewSpecContext,
    +      bucketSpecContext: BucketSpecContext,
    +      partitionColumns: ColTypeListContext,
    +      columns : ColTypeListContext,
    +      tablePropertyList : TablePropertyListContext) : LogicalPlan = {
    +    // val parser = new CarbonSpark2SqlParser
    +
    +    val (name, temp, ifNotExists, external) = 
visitCreateTableHeader(tableHeader)
    +    // TODO: implement temporary tables
    +    if (temp) {
    +      throw new ParseException(
    +        "CREATE TEMPORARY TABLE is not supported yet. " +
    +        "Please use CREATE TEMPORARY VIEW as an alternative.", tableHeader)
    +    }
    +    if (skewSpecContext != null) {
    +      operationNotAllowed("CREATE TABLE ... SKEWED BY", skewSpecContext)
    +    }
    +    if (bucketSpecContext != null) {
    +      operationNotAllowed("CREATE TABLE ... CLUSTERED BY", 
bucketSpecContext)
    +    }
    +    val partitionByStructFields = 
Option(partitionColumns).toSeq.flatMap(visitColTypeList)
    +    val partitionerFields = partitionByStructFields.map { structField =>
    +      PartitionerField(structField.name, 
Some(structField.dataType.toString), null)
    +    }
    +    val cols = Option(columns).toSeq.flatMap(visitColTypeList)
    +    val properties = getPropertyKeyValues(tablePropertyList)
    +
    +    // Ensuring whether no duplicate name is used in table definition
    +    val colNames = cols.map(_.name)
    +    if (colNames.length != colNames.distinct.length) {
    +      val duplicateColumns = colNames.groupBy(identity).collect {
    +        case (x, ys) if ys.length > 1 => "\"" + x + "\""
    +      }
    +      operationNotAllowed(s"Duplicated column names found in table 
definition of $name: " +
    --- End diff --
    
    Indentation is wrong


---

Reply via email to