Github user kunal642 commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/1601#discussion_r155712770
  
    --- Diff: 
integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
 ---
    @@ -232,6 +233,30 @@ class CarbonHelperSqlAstBuilder(conf: SQLConf, parser: 
CarbonSpark2SqlParser)
         CarbonCreateTableCommand(tableModel, tablePath)
       }
     
    +  private def validateTableProperties(properties: Map[String, String]): 
Map[String, String] = {
    +    var isSupported = true
    +    val invalidOptions = StringBuilder.newBuilder
    +    val tableProperties = Seq("DICTIONARY_INCLUDE", "DICTIONARY_EXCLUDE", 
"NO_INVERTED_INDEX",
    +      "SORT_COLUMNS", "TABLE_BLOCKSIZE", "STREAMING", "SORT_SCOPE", 
"COMMENT", "PARTITION_TYPE",
    +      "NUM_PARTITIONS", "RANGE_INFO", "LIST_INFO", "BUCKETNUMBER", 
"BUCKETCOLUMNS", "TABLENAME")
    +    val tblProperties: Map[String, String] = properties.filter { property 
=>
    +      if (!(tableProperties.exists(prop => 
prop.equalsIgnoreCase(property._1))
    --- End diff --
    
    we can just iterate over the map and check if any of the properties are 
invalid. No need to create new tblProperties.
    Instead of filter, use collect to get invalid properties and check if any 
property is returned.


---

Reply via email to