Github user manishgupta88 commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/2980#discussion_r240147303
  
    --- Diff: 
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateDDLForComplexMapType.scala
 ---
    @@ -0,0 +1,452 @@
    +/*
    +
    +    Licensed to the Apache Software Foundation (ASF) under one or more
    +    contributor license agreements. See the NOTICE file distributed with
    +    this work for additional information regarding copyright ownership.
    +    The ASF licenses this file to You under the Apache License, Version 2.0
    +    (the "License"); you may not use this file except in compliance with
    +    the License. You may obtain a copy of the License at
    +    *
    +    http://www.apache.org/licenses/LICENSE-2.0
    +    *
    +    Unless required by applicable law or agreed to in writing, software
    +    distributed under the License is distributed on an "AS IS" BASIS,
    +    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 
implied.
    +    See the License for the specific language governing permissions and
    +    limitations under the License.
    +    */
    +package 
org.apache.carbondata.spark.testsuite.createTable.TestCreateDDLForComplexMapType
    +
    +import java.io.File
    +import java.util
    +
    +import org.apache.hadoop.conf.Configuration
    +import org.apache.spark.sql.{AnalysisException, Row}
    +import org.apache.spark.sql.test.util.QueryTest
    +import org.scalatest.BeforeAndAfterAll
    +
    +import 
org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk
    +
    +class TestCreateDDLForComplexMapType extends QueryTest with 
BeforeAndAfterAll {
    +  private val conf: Configuration = new Configuration(false)
    +
    +  val rootPath = new File(this.getClass.getResource("/").getPath
    +                          + "../../../..").getCanonicalPath
    +
    +  val path = s"$rootPath/examples/spark2/src/main/resources/maptest2.csv"
    +
    +  private def checkForLocalDictionary(dimensionRawColumnChunks: util
    +  .List[DimensionRawColumnChunk]): Boolean = {
    +    var isLocalDictionaryGenerated = false
    +    import scala.collection.JavaConversions._
    +    for (dimensionRawColumnChunk <- dimensionRawColumnChunks) {
    +      if (dimensionRawColumnChunk.getDataChunkV3
    +        .isSetLocal_dictionary) {
    +        isLocalDictionaryGenerated = true
    +      }
    --- End diff --
    
    You can directly use scala filter operation to check for local dictionary


---

Reply via email to