Github user gvramana commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1686#discussion_r158286004
--- Diff:
integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala
---
@@ -0,0 +1,424 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.carbondata.spark.util
+
+import java.io.File
+
+import scala.collection.JavaConverters._
+
+import org.apache.spark.sql.common.util.Spark2QueryTest
+import org.apache.spark.sql.hive.CarbonRelation
+import org.apache.spark.sql.test.Spark2TestQueryExecutor
+import org.apache.spark.sql.{CarbonEnv, SparkSession}
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.cache.{Cache, CacheProvider, CacheType}
+import org.apache.carbondata.core.cache.dictionary.{Dictionary,
DictionaryColumnUniqueIdentifier}
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
+
+/**
+ * Test Case for
org.apache.carbondata.integration.spark.util.GlobalDictionaryUtil
+ */
+class DictionaryLRUCacheTestCase extends Spark2QueryTest with
BeforeAndAfterAll {
+ var spark : SparkSession = null
+ var path : String = null
+
+ def createCarbonSession(appName: String): SparkSession = {
+ val rootPath = new File(this.getClass.getResource("/").getPath
+ + "../../../..").getCanonicalPath
+ val storeLocation = s"$rootPath/examples/spark2/target/store"
+ val warehouse = s"$rootPath/examples/spark2/target/warehouse"
+
+ CarbonProperties.getInstance()
+ .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
"yyyy/MM/dd HH:mm:ss")
+ .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
+
.addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_LOADING, "true")
+ .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, "")
+ .addProperty(CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE,
"1")
+
.addProperty(CarbonCommonConstants.CARBON_MAX_EXECUTOR_LRU_CACHE_SIZE, "1")
+
+ import org.apache.spark.sql.CarbonSession._
+ spark = SparkSession
+ .builder()
+ .master("local")
+ .appName("CarbonSessionExample")
+ .config("spark.sql.warehouse.dir", warehouse)
+ .config("spark.driver.host", "localhost")
+ .getOrCreateCarbonSession(storeLocation)
+ spark.sparkContext.setLogLevel("WARN")
+ spark
+ }
+
+ def checkDictionaryAccessCount(databaseName: String, tableName: String):
Unit = {
+ val carbonTable =
CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
+ .lookupRelation(Option(databaseName),
tableName)(Spark2TestQueryExecutor.spark)
+ .asInstanceOf[CarbonRelation].carbonTable
+ val absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier
+
+ val dimensions = carbonTable.getAllDimensions.asScala.toList
+ dimensions.foreach { dim =>
+ val columnIdentifier = dim.getColumnIdentifier
+ // Check the dictionary cache access.
+ val identifier: DictionaryColumnUniqueIdentifier = new
DictionaryColumnUniqueIdentifier(
+ absoluteTableIdentifier,
+ columnIdentifier,
+ columnIdentifier.getDataType)
+
+ val isDictExists: Boolean =
CarbonUtil.isFileExistsForGivenColumn(identifier)
+ var dictionary: Dictionary = null
+ if (isDictExists) {
--- End diff --
If dictionary not exist assert
---