[CARBONDATA-2720] Remove dead code

For acturate coverage results and easy maintainance

This closes #2354


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/f9114036
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/f9114036
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/f9114036

Branch: refs/heads/carbonstore
Commit: f911403673fdae89c8537293ed55ef515b1362ef
Parents: d267c40
Author: sraghunandan <carbondatacontributi...@gmail.com>
Authored: Thu Jul 12 12:23:24 2018 +0530
Committer: Venkata Ramana G <ramana.gollam...@huawei.com>
Committed: Thu Jul 12 12:23:24 2018 +0530

----------------------------------------------------------------------
 .../sql/MalformedCarbonCommandException.java    |   8 -
 .../common/logging/LogServiceFactory.java       |   5 -
 core/pom.xml                                    |   1 +
 .../carbondata/core/cache/CacheProvider.java    |   4 -
 .../apache/carbondata/core/cache/CacheType.java |  16 +-
 .../AbstractColumnDictionaryInfo.java           |  18 -
 .../cache/dictionary/ColumnDictionaryInfo.java  |  13 -
 .../dictionary/ColumnReverseDictionaryInfo.java |  18 +
 .../dictionary/DictionaryBuilderException.java  |  18 -
 .../dictionary/ManageDictionaryAndBTree.java    |  48 --
 .../BlockletLevelDeleteDeltaDataCache.java      |  56 ---
 .../core/constants/CarbonCommonConstants.java   |   2 -
 .../core/datamap/DataMapProvider.java           |  10 +-
 .../datamap/DistributableDataMapFormat.java     |  26 -
 .../dev/expr/DataMapDistributableWrapper.java   |   4 -
 .../core/datastore/SegmentTaskIndexStore.java   | 481 -------------------
 .../datastore/TableSegmentUniqueIdentifier.java |  53 --
 .../carbondata/core/datastore/TableSpec.java    |  42 +-
 .../core/datastore/block/AbstractIndex.java     |   5 -
 .../core/datastore/block/SegmentProperties.java | 224 +--------
 .../core/datastore/block/SegmentTaskIndex.java  |  53 --
 .../block/SegmentTaskIndexWrapper.java          | 129 -----
 .../datastore/chunk/DimensionColumnPage.java    |  16 +-
 .../impl/FixedLengthDimensionColumnPage.java    |  16 +-
 .../impl/VariableLengthDimensionColumnPage.java |  16 +-
 .../chunk/store/ColumnPageWrapper.java          |  12 +-
 .../datastore/columnar/ColumnGroupModel.java    |  88 ----
 .../datastore/columnar/ColumnWithRowId.java     |  14 -
 .../core/datastore/page/EncodedTablePage.java   |  12 -
 .../page/encoding/DefaultEncodingFactory.java   |   7 +-
 .../adaptive/AdaptiveFloatingCodec.java         |   6 -
 .../dimension/legacy/IndexStorageEncoder.java   |   4 -
 .../core/datastore/page/key/TablePageKey.java   |  16 -
 .../core/datastore/row/CarbonRow.java           |   8 -
 .../devapi/DictionaryGenerationException.java   |  52 --
 .../core/devapi/GeneratingBiDictionary.java     |  48 --
 .../dictionary/client/DictionaryClient.java     |   6 +-
 .../generator/TableDictionaryGenerator.java     |   4 -
 .../generator/key/DictionaryMessage.java        |   5 -
 .../dictionary/server/DictionaryServer.java     |  18 +-
 .../service/DictionaryServiceProvider.java      |   2 +-
 .../InvalidConfigurationException.java          |  29 --
 .../core/indexstore/schema/CarbonRowSchema.java |   4 +-
 .../core/keygenerator/KeyGenerator.java         |   7 -
 .../DirectDictionaryKeyGeneratorFactory.java    |   1 -
 .../mdkey/AbstractKeyGenerator.java             |  30 --
 .../keygenerator/mdkey/NumberCompressor.java    |  34 --
 .../carbondata/core/locks/CarbonLockUtil.java   |   4 +-
 .../carbondata/core/locks/ZookeeperInit.java    |   4 -
 .../core/memory/HeapMemoryAllocator.java        |   1 -
 .../carbondata/core/memory/MemoryBlock.java     |   2 +-
 .../carbondata/core/memory/MemoryLocation.java  |   9 -
 .../compressor/ChunkCompressorMeta.java         |  34 --
 .../ThriftWrapperSchemaConverterImpl.java       |   6 +-
 .../datatype/DecimalConverterFactory.java       |  14 -
 .../core/metadata/schema/table/CarbonTable.java |  37 +-
 .../schema/table/TableSchemaBuilder.java        |   1 -
 .../schema/table/column/CarbonColumn.java       |  13 -
 .../schema/table/column/CarbonDimension.java    |  21 +-
 .../table/column/CarbonImplicitDimension.java   |   9 +-
 .../schema/table/column/ColumnSchema.java       |  33 --
 .../core/mutate/CarbonUpdateUtil.java           |  11 +-
 .../mutate/data/DeleteDeltaCacheLoaderIntf.java |  27 --
 .../core/readcommitter/ReadCommittedScope.java  |  15 +-
 .../scan/collector/ResultCollectorFactory.java  |   3 +
 .../exception/QueryExecutionException.java      |  46 --
 .../executor/impl/AbstractQueryExecutor.java    |  19 +-
 .../scan/executor/infos/BlockExecutionInfo.java |  20 -
 .../scan/executor/infos/KeyStructureInfo.java   |  98 ----
 .../core/scan/executor/util/QueryUtil.java      | 176 +------
 .../exception/FilterIllegalMemberException.java |  29 --
 .../exception/FilterUnsupportedException.java   |  20 -
 .../expression/logical/FalseExpression.java     |   5 +-
 .../expression/logical/RangeExpression.java     |   5 +-
 .../scan/expression/logical/TrueExpression.java |   5 +-
 .../carbondata/core/scan/filter/FilterUtil.java |   4 +-
 .../executer/RangeValueFilterExecuterImpl.java  |   2 +-
 .../executer/RowLevelFilterExecuterImpl.java    |  42 +-
 .../executer/ValueBasedFilterExecuterImpl.java  |  49 --
 .../scan/filter/intf/FilterOptimizerImpl.java   |  33 --
 .../filter/optimizer/RangeFilterOptmizer.java   |   5 +-
 .../TrueConditionalResolverImpl.java            |   1 -
 .../core/scan/result/BlockletScannedResult.java |  24 +-
 .../result/impl/FilterQueryScannedResult.java   |   6 +-
 .../impl/NonFilterQueryScannedResult.java       |   3 +-
 .../scan/scanner/impl/BlockletFullScanner.java  |   1 -
 .../core/scan/wrappers/ByteArrayWrapper.java    |   7 -
 .../DriverQueryStatisticsRecorderImpl.java      |  18 +-
 .../core/stats/QueryStatisticsModel.java        |   1 -
 .../SegmentUpdateStatusManager.java             |   4 +-
 .../util/AbstractDataFileFooterConverter.java   |   2 -
 .../carbondata/core/util/CarbonMergerUtil.java  |  65 ---
 .../core/util/CarbonMetadataUtil.java           |  28 +-
 .../carbondata/core/util/CarbonProperties.java  |   4 -
 .../apache/carbondata/core/util/CarbonUtil.java |  69 +--
 .../core/util/DataFileFooterConverter2.java     |   7 +-
 .../apache/carbondata/core/util/NodeHolder.java | 457 ------------------
 .../carbondata/core/util/TaskMetricsMap.java    |   4 -
 .../core/util/path/CarbonTablePath.java         |  50 --
 .../carbondata/core/writer/ByteArrayHolder.java |  75 ---
 .../core/writer/CarbonDeleteDeltaWriter.java    |   7 -
 .../writer/CarbonDeleteDeltaWriterImpl.java     |  32 --
 .../carbondata/core/writer/ThriftWriter.java    |  20 -
 .../carbondata/events/OperationListenerBus.java |  34 --
 .../core/cache/CacheProviderTest.java           |  34 --
 .../dictionary/AbstractDictionaryCacheTest.java |   6 +-
 .../datastore/SegmentTaskIndexStoreTest.java    |  90 ----
 .../datastore/block/SegmentPropertiesTest.java  |  31 +-
 .../block/SegmentPropertiesTestUtil.java        |  15 -
 .../datastore/block/SegmentTaskIndexTest.java   |  77 ---
 .../impl/FixedLengthDimensionDataChunkTest.java |  10 +-
 .../datastore/page/encoding/RLECodecSuite.java  | 158 ------
 .../datastore/page/encoding/RLECodecTest.java   | 158 ++++++
 .../dictionary/client/DictionaryClientTest.java |   4 +-
 ...ncrementalColumnDictionaryGeneratorTest.java |   4 +-
 .../ServerDictionaryGeneratorTest.java          |   4 +-
 .../generator/TableDictionaryGeneratorTest.java |   4 +-
 .../core/metadata/CarbonMetadataTest.java       |  13 +-
 .../ThriftWrapperSchemaConverterImplTest.java   | 130 -----
 .../metadata/schema/table/CarbonTableTest.java  |   3 +-
 .../table/CarbonTableWithComplexTypesTest.java  |   5 -
 .../impl/RawBasedResultCollectorTest.java       | 130 -----
 .../core/scan/executor/util/QueryUtilTest.java  |  36 --
 .../scan/executor/util/RestructureUtilTest.java |  10 +-
 .../scan/expression/ColumnExpressionTest.java   |   1 +
 .../conditional/EqualToExpressionUnitTest.java  |  51 ++
 .../GreaterThanEqualToExpressionUnitTest.java   | 205 ++++++++
 .../GreaterThanExpressionUnitTest.java          | 203 ++++++++
 .../LessThanEqualToExpressionUnitTest.java      | 175 +++++++
 .../conditional/LessThanExpressionUnitTest.java | 204 ++++++++
 .../conditional/ListExpressionUnitTest.java     |  22 +
 .../NotEqualsExpressionUnitTest.java            | 204 +++++++-
 .../conditional/NotInExpressionUnitTest.java    |  30 ++
 .../expression/logical/AndExpressionTest.java   |  68 +++
 .../expression/logical/FalseExpressionTest.java |  12 +
 .../expression/logical/OrExpressionTest.java    |  68 +++
 .../expression/logical/RangeExpressionTest.java | 148 ++++++
 .../expression/logical/TrueExpressionTest.java  |  57 +++
 .../filter/FilterExpressionProcessorTest.java   |   1 -
 .../core/scan/filter/FilterUtilTest.java        |   8 +-
 .../executer/IncludeFilterExecuterImplTest.java |   2 +-
 .../scan/wrappers/ByteArrayWrapperTest.java     |  57 +++
 .../core/util/CarbonMergerUtilTest.java         |  41 --
 .../carbondata/core/util/CarbonUtilTest.java    |  53 +-
 .../core/util/RangeFilterProcessorTest.java     |   8 +-
 .../writer/CarbonDictionaryWriterImplTest.java  |   6 +-
 .../lucene/LuceneCoarseGrainDataMap.java        | 238 ---------
 .../lucene/LuceneCoarseGrainDataMapFactory.java |  95 ----
 datamap/mv/core/pom.xml                         |   1 +
 .../carbondata/mv/rewrite/MatchConditions.scala |  28 --
 hadoop/pom.xml                                  |   1 +
 .../apache/carbondata/hadoop/CacheClient.java   |  48 --
 .../hadoop/internal/CarbonFormatType.java       |  22 -
 .../hadoop/internal/CarbonInputSplit.java       |  32 --
 .../hadoop/internal/segment/Segment.java        |  70 ---
 .../hadoop/testutil/StoreCreator.java           |  15 -
 integration/hive/pom.xml                        |   1 +
 integration/presto/pom.xml                      |   1 +
 .../presto/CarbondataColumnHandle.java          |  18 -
 .../carbondata/presto/CarbondataMetadata.java   |  14 +-
 .../presto/util/CarbonDataStoreCreator.scala    |  25 -
 integration/spark-common-test/pom.xml           |  41 +-
 .../src/test/resources/encoding_types.csv       |   3 +
 .../test/resources/short_int_as_target_type.csv |   3 -
 .../aggquery/IntegerDataTypeTestCase.scala      |  32 +-
 .../allqueries/DoubleDataTypeTest.scala         |   8 +
 .../testsuite/bigdecimal/TestBigDecimal.scala   |   9 +
 .../MajorCompactionIgnoreInMinorTest.scala      |   9 -
 .../testsuite/datamap/FGDataMapTestCase.scala   |   2 +
 integration/spark-common/pom.xml                |   1 +
 .../carbondata/events/AlterTableEvents.scala    |  17 +-
 .../carbondata/events/CleanFilesEvents.scala    |   8 -
 .../apache/carbondata/spark/CarbonOption.scala  |   5 -
 .../load/DataLoadProcessorStepOnSpark.scala     |   3 +-
 .../spark/rdd/CarbonGlobalDictionaryRDD.scala   |   3 -
 .../carbondata/spark/rdd/CarbonMergerRDD.scala  |  17 +-
 .../spark/rdd/NewCarbonDataLoadRDD.scala        |  17 -
 .../carbondata/spark/util/CarbonScalaUtil.scala |  88 ----
 .../carbondata/spark/util/CommonUtil.scala      | 155 ------
 .../spark/util/DataTypeConverterUtil.scala      |  22 -
 .../spark/util/GlobalDictionaryUtil.scala       |  72 ---
 .../streaming/StreamSinkFactory.scala           |   9 +-
 .../apache/spark/rdd/UpdateCoalescedRDD.scala   |  89 ----
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala |  76 ---
 .../command/carbonTableSchemaCommon.scala       |  30 --
 .../streaming/CarbonAppendableStreamSink.scala  |   7 +-
 integration/spark2/pom.xml                      |   1 +
 .../datamap/IndexDataMapProvider.java           |  17 -
 .../datamap/PreAggregateDataMapProvider.java    |  15 -
 .../CarbonAlterTableCompactionCommand.scala     |   4 +-
 .../management/CarbonLoadDataCommand.scala      |   7 +-
 .../command/timeseries/TimeSeriesUtil.scala     |  45 --
 .../sql/parser/CarbonSpark2SqlParser.scala      |   1 -
 pom.xml                                         |   2 +
 processing/pom.xml                              |   1 +
 .../exception/DataLoadingException.java         |  24 -
 .../exception/MultipleMatchingException.java    |  24 -
 .../exception/SliceMergerException.java         |  23 -
 .../loading/AbstractDataLoadProcessorStep.java  |  43 +-
 .../processing/loading/events/LoadEvents.java   |  60 +--
 .../exception/BadRecordFoundException.java      |   8 -
 .../loading/exception/NoRetryException.java     |  19 -
 .../loading/sort/SortScopeOptions.java          |   7 +-
 .../sort/impl/ParallelReadMergeSorterImpl.java  |   8 -
 ...allelReadMergeSorterWithColumnRangeImpl.java |   8 -
 .../UnsafeBatchParallelReadMergeSorterImpl.java |   9 -
 .../impl/UnsafeParallelReadMergeSorterImpl.java |   8 -
 ...allelReadMergeSorterWithColumnRangeImpl.java |   8 -
 .../unsafe/merger/UnsafeIntermediateMerger.java |   8 -
 .../CarbonRowDataWriterProcessorStepImpl.java   |  12 +-
 .../steps/DataConverterProcessorStepImpl.java   |  23 +-
 .../steps/DataWriterBatchProcessorStepImpl.java |  14 +-
 .../steps/DataWriterProcessorStepImpl.java      |  11 +-
 .../loading/steps/InputProcessorStepImpl.java   |   4 -
 .../InputProcessorStepWithNoConverterImpl.java  |   4 -
 .../steps/JsonInputProcessorStepImpl.java       |   5 -
 .../loading/steps/SortProcessorStepImpl.java    |  12 -
 .../processing/merger/CarbonCompactionUtil.java |  32 --
 .../processing/merger/CarbonDataMergerUtil.java |  17 +-
 .../merger/CompactionResultSortProcessor.java   |   3 +-
 .../partition/impl/DefaultLoadBalancer.java     |  63 ---
 .../partition/impl/PartitionMultiFileImpl.java  |  44 --
 .../impl/SampleDataPartitionerImpl.java         |  40 --
 .../partition/spliter/RowResultProcessor.java   |   7 +-
 .../exception/AlterPartitionSliceException.java |  78 ---
 .../processing/splits/TableSplit.java           | 124 -----
 .../store/CarbonDataFileAttributes.java         |   4 -
 .../store/CarbonFactDataHandlerColumnar.java    |  24 +-
 .../store/CarbonFactDataHandlerModel.java       |  26 -
 .../store/CarbonFactHandlerFactory.java         |  15 +-
 .../util/CarbonDataProcessorUtil.java           |  45 +-
 231 files changed, 2025 insertions(+), 6223 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/common/src/main/java/org/apache/carbondata/common/exceptions/sql/MalformedCarbonCommandException.java
----------------------------------------------------------------------
diff --git 
a/common/src/main/java/org/apache/carbondata/common/exceptions/sql/MalformedCarbonCommandException.java
 
b/common/src/main/java/org/apache/carbondata/common/exceptions/sql/MalformedCarbonCommandException.java
index 5fe3ce8..0394d29 100644
--- 
a/common/src/main/java/org/apache/carbondata/common/exceptions/sql/MalformedCarbonCommandException.java
+++ 
b/common/src/main/java/org/apache/carbondata/common/exceptions/sql/MalformedCarbonCommandException.java
@@ -59,14 +59,6 @@ public class MalformedCarbonCommandException extends 
Exception {
   }
 
   /**
-   * getLocalizedMessage
-   */
-  @Override
-  public String getLocalizedMessage() {
-    return super.getLocalizedMessage();
-  }
-
-  /**
    * getMessage
    */
   public String getMessage() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java
----------------------------------------------------------------------
diff --git 
a/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java
 
b/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java
index 410844e..cf84d2d 100644
--- 
a/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java
+++ 
b/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java
@@ -36,9 +36,4 @@ public final class LogServiceFactory {
   public static LogService getLogService(final String className) {
     return new StandardLogService(className);
   }
-
-  public static LogService getLogService() {
-    return new StandardLogService();
-  }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index c145c3b..71fa66c 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -31,6 +31,7 @@
 
   <properties>
     <dev.path>${basedir}/../dev</dev.path>
+    <jacoco.append>true</jacoco.append>
   </properties>
 
   <dependencies>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java 
b/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
index 0ee4f25..292d9eb 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
@@ -28,7 +28,6 @@ import 
org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentif
 import org.apache.carbondata.core.cache.dictionary.ForwardDictionaryCache;
 import org.apache.carbondata.core.cache.dictionary.ReverseDictionaryCache;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.SegmentTaskIndexStore;
 import org.apache.carbondata.core.indexstore.BlockletDataMapIndexStore;
 import org.apache.carbondata.core.util.CarbonProperties;
 
@@ -140,9 +139,6 @@ public class CacheProvider {
     } else if (cacheType.equals(CacheType.FORWARD_DICTIONARY)) {
       cacheObject =
           new ForwardDictionaryCache<DictionaryColumnUniqueIdentifier, 
Dictionary>(carbonLRUCache);
-    } else if (cacheType.equals(cacheType.DRIVER_BTREE)) {
-      cacheObject =
-          new SegmentTaskIndexStore(carbonLRUCache);
     } else if (cacheType.equals(cacheType.DRIVER_BLOCKLET_DATAMAP)) {
       cacheObject = new BlockletDataMapIndexStore(carbonLRUCache);
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/cache/CacheType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/CacheType.java 
b/core/src/main/java/org/apache/carbondata/core/cache/CacheType.java
index 9cc2320..df32b6e 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/CacheType.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/CacheType.java
@@ -21,8 +21,6 @@ import org.apache.carbondata.core.cache.dictionary.Dictionary;
 import 
org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
 import org.apache.carbondata.core.datastore.TableSegmentUniqueIdentifier;
 import org.apache.carbondata.core.datastore.block.AbstractIndex;
-import org.apache.carbondata.core.datastore.block.SegmentTaskIndexWrapper;
-import org.apache.carbondata.core.datastore.block.TableBlockUniqueIdentifier;
 
 /**
  * class which defines different cache types. cache type can be dictionary 
cache for
@@ -46,19 +44,7 @@ public class CacheType<K, V> {
   /**
    * Executor BTree cache which maintains size of BTree metadata
    */
-  public static final CacheType<TableBlockUniqueIdentifier, AbstractIndex> 
EXECUTOR_BTREE =
-      new CacheType("executor_btree");
-
-  /**
-   * Executor BTree cache which maintains size of BTree metadata
-   */
-  public static final CacheType<TableSegmentUniqueIdentifier, 
SegmentTaskIndexWrapper>
-      DRIVER_BTREE = new CacheType("driver_btree");
-
-  /**
-   * Executor BTree cache which maintains size of BTree metadata
-   */
-  public static final CacheType<TableSegmentUniqueIdentifier, 
SegmentTaskIndexWrapper>
+  public static final CacheType<TableSegmentUniqueIdentifier, AbstractIndex>
       DRIVER_BLOCKLET_DATAMAP = new CacheType("driver_blocklet_datamap");
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
index 7e8a1c8..c138cc8 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
@@ -219,24 +219,6 @@ public abstract class AbstractColumnDictionaryInfo 
implements DictionaryInfo {
   }
 
   /**
-   * This method will set the sort order index of a dictionary column.
-   * Sort order index if the index of dictionary values after they are sorted.
-   *
-   * @param sortOrderIndex
-   */
-  @Override public void setSortOrderIndex(List<Integer> sortOrderIndex) {
-  }
-
-  /**
-   * This method will set the sort reverse index of a dictionary column.
-   * Sort reverse index is the index of dictionary values before they are 
sorted.
-   *
-   * @param sortReverseOrderIndex
-   */
-  @Override public void setSortReverseOrderIndex(List<Integer> 
sortReverseOrderIndex) {
-  }
-
-  /**
    * This method will find and return the dictionary value for a given 
surrogate key.
    * Applicable scenarios:
    * 1. Query final result preparation : While convert the final result which 
will

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
index ad1d201..e7c68a5 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
@@ -18,9 +18,7 @@
 package org.apache.carbondata.core.cache.dictionary;
 
 import java.nio.charset.Charset;
-import java.text.SimpleDateFormat;
 import java.util.ArrayList;
-import java.util.Date;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicReference;
 
@@ -301,17 +299,6 @@ public class ColumnDictionaryInfo extends 
AbstractColumnDictionaryInfo {
             (Double.parseDouble(dictionaryVal)), 
(Double.parseDouble(memberVal)));
       } else if (dataType == DataTypes.LONG) {
         return Long.compare((Long.parseLong(dictionaryVal)), 
(Long.parseLong(memberVal)));
-      } else if (dataType == DataTypes.BOOLEAN) {
-        return Boolean.compare(
-            (Boolean.parseBoolean(dictionaryVal)), 
(Boolean.parseBoolean(memberVal)));
-      } else if (dataType == DataTypes.DATE || dataType == 
DataTypes.TIMESTAMP) {
-        String format = CarbonUtil.getFormatFromProperty(dataType);
-        SimpleDateFormat parser = new SimpleDateFormat(format);
-        Date dateToStr;
-        Date dictionaryDate;
-        dateToStr = parser.parse(memberVal);
-        dictionaryDate = parser.parse(dictionaryVal);
-        return dictionaryDate.compareTo(dateToStr);
       } else if (DataTypes.isDecimal(dataType)) {
         java.math.BigDecimal javaDecValForDictVal = new 
java.math.BigDecimal(dictionaryVal);
         java.math.BigDecimal javaDecValForMemberVal = new 
java.math.BigDecimal(memberVal);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnReverseDictionaryInfo.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnReverseDictionaryInfo.java
 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnReverseDictionaryInfo.java
index 591fdf5..42d5b2e 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnReverseDictionaryInfo.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnReverseDictionaryInfo.java
@@ -111,4 +111,22 @@ public class ColumnReverseDictionaryInfo extends 
AbstractColumnDictionaryInfo {
   private void createDictionaryByteArrayToSurrogateKeyMap(int initialMapSize) {
     dictionaryByteArrayToSurrogateKeyMap = new 
ConcurrentHashMap<>(initialMapSize);
   }
+
+  /**
+   * This method will set the sort order index of a dictionary column.
+   * Sort order index if the index of dictionary values after they are sorted.
+   *
+   * @param sortOrderIndex
+   */
+  @Override public void setSortOrderIndex(List<Integer> sortOrderIndex) {
+  }
+
+  /**
+   * This method will set the sort reverse index of a dictionary column.
+   * Sort reverse index is the index of dictionary values before they are 
sorted.
+   *
+   * @param sortReverseOrderIndex
+   */
+  @Override public void setSortReverseOrderIndex(List<Integer> 
sortReverseOrderIndex) {
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryBuilderException.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryBuilderException.java
 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryBuilderException.java
index 2bb984e..ed6394e 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryBuilderException.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryBuilderException.java
@@ -39,23 +39,5 @@ public class DictionaryBuilderException extends IOException {
     super(msg);
     this.msg = msg;
   }
-
-  /**
-   * getLocalizedMessage
-   */
-  @Override public String getLocalizedMessage() {
-    return super.getLocalizedMessage();
-  }
-
-  /**
-   * getMessage
-   */
-  public String getMessage() {
-    return this.msg;
-  }
-
-  @Override public String toString() {
-    return "DictionaryBuilderException: " + msg;
-  }
 }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java
 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java
index b54fb14..2450f85 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java
@@ -25,7 +25,6 @@ import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
 import org.apache.carbondata.core.cache.CacheType;
-import org.apache.carbondata.core.datastore.TableSegmentUniqueIdentifier;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFileFilter;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -34,8 +33,6 @@ import org.apache.carbondata.core.metadata.ColumnIdentifier;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
-import org.apache.carbondata.core.statusmanager.SegmentStatusManager;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 /**
@@ -96,15 +93,6 @@ public class ManageDictionaryAndBTree {
    * @param carbonTable
    */
   public static void clearBTreeAndDictionaryLRUCache(CarbonTable carbonTable) {
-    // clear Btree cache from LRU cache
-    LoadMetadataDetails[] loadMetadataDetails =
-        SegmentStatusManager.readLoadMetadata(carbonTable.getMetadataPath());
-    String[] segments = new String[loadMetadataDetails.length];
-    int i = 0;
-    for (LoadMetadataDetails loadMetadataDetail : loadMetadataDetails) {
-      segments[i++] = loadMetadataDetail.getLoadName();
-    }
-    invalidateBTreeCache(carbonTable.getAbsoluteTableIdentifier(), segments);
     // clear dictionary cache from LRU cache
     List<CarbonDimension> dimensions =
         carbonTable.getDimensionByTableName(carbonTable.getTableName());
@@ -131,40 +119,4 @@ public class ManageDictionaryAndBTree {
     dictCache = 
CacheProvider.getInstance().createCache(CacheType.FORWARD_DICTIONARY);
     dictCache.invalidate(dictionaryColumnUniqueIdentifier);
   }
-
-  /**
-   * This method will remove the BTree instances from LRU cache
-   *
-   * @param absoluteTableIdentifier
-   * @param segments
-   */
-  public static void invalidateBTreeCache(AbsoluteTableIdentifier 
absoluteTableIdentifier,
-      String[] segments) {
-    Cache<Object, Object> driverBTreeCache =
-        CacheProvider.getInstance().createCache(CacheType.DRIVER_BTREE);
-    for (String segmentNo : segments) {
-      TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier =
-          new TableSegmentUniqueIdentifier(absoluteTableIdentifier, segmentNo);
-      driverBTreeCache.invalidate(tableSegmentUniqueIdentifier);
-    }
-  }
-
-  /**
-   * This method will remove the BTree instances from LRU cache for all the 
segments
-   *
-   * @param carbonTable
-   */
-  public static void invalidateBTreeCache(CarbonTable carbonTable) {
-    LoadMetadataDetails[] loadMetadataDetails =
-        SegmentStatusManager.readLoadMetadata(carbonTable.getMetadataPath());
-    if (loadMetadataDetails.length > 0) {
-      String[] segments = new String[loadMetadataDetails.length];
-      int loadCounter = 0;
-      for (LoadMetadataDetails loadMetadataDetail : loadMetadataDetails) {
-        segments[loadCounter++] = loadMetadataDetail.getLoadName();
-      }
-      invalidateBTreeCache(carbonTable.getAbsoluteTableIdentifier(), segments);
-    }
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/cache/update/BlockletLevelDeleteDeltaDataCache.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/cache/update/BlockletLevelDeleteDeltaDataCache.java
 
b/core/src/main/java/org/apache/carbondata/core/cache/update/BlockletLevelDeleteDeltaDataCache.java
deleted file mode 100644
index abad924..0000000
--- 
a/core/src/main/java/org/apache/carbondata/core/cache/update/BlockletLevelDeleteDeltaDataCache.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.cache.update;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-
-import org.roaringbitmap.RoaringBitmap;
-
-/**
- * This class maintains delete delta data cache of each blocklet along with 
the block timestamp
- */
-public class BlockletLevelDeleteDeltaDataCache {
-  private Map<Integer, RoaringBitmap> deleteDelataDataCache =
-      new HashMap<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-  private String timeStamp;
-
-  public BlockletLevelDeleteDeltaDataCache(Map<Integer, Integer[]> 
deleteDeltaFileData,
-      String timeStamp) {
-    for (Map.Entry<Integer, Integer[]> entry : deleteDeltaFileData.entrySet()) 
{
-      int[] dest = new int[entry.getValue().length];
-      int i = 0;
-      for (Integer val : entry.getValue()) {
-        dest[i++] = val.intValue();
-      }
-      deleteDelataDataCache.put(entry.getKey(), RoaringBitmap.bitmapOf(dest));
-    }
-    this.timeStamp = timeStamp;
-  }
-
-  public boolean contains(int key, Integer pageId) {
-    return deleteDelataDataCache.get(pageId).contains(key);
-  }
-
-  public String getCacheTimeStamp() {
-    return timeStamp;
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
 
b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 7a64513..3e2843c 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -917,8 +917,6 @@ public final class CarbonCommonConstants {
    */
   public static final String DATABASE_DEFAULT_NAME = "default";
 
-  // tblproperties
-  public static final String COLUMN_GROUPS = "column_groups";
   public static final String DICTIONARY_EXCLUDE = "dictionary_exclude";
   public static final String DICTIONARY_INCLUDE = "dictionary_include";
   public static final String LONG_STRING_COLUMNS = "long_string_columns";

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/datamap/DataMapProvider.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapProvider.java 
b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapProvider.java
index 168159b..086a1c0 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapProvider.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapProvider.java
@@ -84,7 +84,7 @@ public abstract class DataMapProvider {
    * This is called when user creates datamap, for example "CREATE DATAMAP dm 
ON TABLE mainTable"
    * Implementation should initialize data for datamap, like creating data 
folders
    */
-  public abstract void initData();
+  public void initData() { }
 
   /**
    * Opposite operation of {@link #initMeta(String)}.
@@ -111,13 +111,17 @@ public abstract class DataMapProvider {
   /**
    * Build the datamap incrementally by loading specified segment data
    */
-  public abstract void incrementalBuild(String[] segmentIds) throws 
IOException;
+  public void incrementalBuild(String[] segmentIds) {
+    throw new UnsupportedOperationException();
+  }
 
   /**
    * Provide the datamap catalog instance or null if this datamap not required 
to rewrite
    * the query.
    */
-  public abstract DataMapCatalog createDataMapCatalog();
+  public DataMapCatalog createDataMapCatalog() {
+    return null;
+  }
 
   public abstract DataMapFactory getDataMapFactory();
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java
 
b/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java
index 010c6c2..762d89c 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java
@@ -29,10 +29,7 @@ import 
org.apache.carbondata.core.datamap.dev.expr.DataMapExprWrapper;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
 import org.apache.carbondata.core.indexstore.PartitionSpec;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
-import org.apache.carbondata.core.util.ObjectSerializationUtil;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
@@ -45,8 +42,6 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 public class DistributableDataMapFormat extends FileInputFormat<Void, 
ExtendedBlocklet> implements
     Serializable {
 
-  private static final String FILTER_EXP = 
"mapreduce.input.distributed.datamap.filter";
-
   private CarbonTable table;
 
   private DataMapExprWrapper dataMapExprWrapper;
@@ -72,27 +67,6 @@ public class DistributableDataMapFormat extends 
FileInputFormat<Void, ExtendedBl
     this.isJobToClearDataMaps = isJobToClearDataMaps;
   }
 
-  public boolean isJobToClearDataMaps() {
-    return isJobToClearDataMaps;
-  }
-
-  public static void setFilterExp(Configuration configuration, 
FilterResolverIntf filterExp)
-      throws IOException {
-    if (filterExp != null) {
-      String string = ObjectSerializationUtil.convertObjectToString(filterExp);
-      configuration.set(FILTER_EXP, string);
-    }
-  }
-
-  private static FilterResolverIntf getFilterExp(Configuration configuration) 
throws IOException {
-    String filterString = configuration.get(FILTER_EXP);
-    if (filterString != null) {
-      Object toObject = 
ObjectSerializationUtil.convertStringToObject(filterString);
-      return (FilterResolverIntf) toObject;
-    }
-    return null;
-  }
-
   @Override
   public List<InputSplit> getSplits(JobContext job) throws IOException {
     List<DataMapDistributableWrapper> distributables =

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapDistributableWrapper.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapDistributableWrapper.java
 
b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapDistributableWrapper.java
index 9075032..22d2b90 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapDistributableWrapper.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapDistributableWrapper.java
@@ -42,10 +42,6 @@ public class DataMapDistributableWrapper extends InputSplit 
implements Serializa
     return distributable;
   }
 
-  public void setDistributable(DataMapDistributable distributable) {
-    this.distributable = distributable;
-  }
-
   @Override public long getLength() throws IOException, InterruptedException {
     return distributable.getLength();
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
deleted file mode 100644
index c642091..0000000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
+++ /dev/null
@@ -1,481 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.datastore;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.cache.Cache;
-import org.apache.carbondata.core.cache.CarbonLRUCache;
-import org.apache.carbondata.core.datastore.block.AbstractIndex;
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.datastore.block.SegmentTaskIndex;
-import org.apache.carbondata.core.datastore.block.SegmentTaskIndexWrapper;
-import org.apache.carbondata.core.datastore.block.TableBlockInfo;
-import org.apache.carbondata.core.datastore.exception.IndexBuilderException;
-import org.apache.carbondata.core.memory.MemoryException;
-import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
-import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.mutate.UpdateVO;
-import org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.ObjectSizeCalculator;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-import org.apache.carbondata.core.util.path.CarbonTablePath.DataFileUtil;
-
-/**
- * Class to handle loading, unloading,clearing,storing of the table
- * blocks
- */
-public class SegmentTaskIndexStore
-    implements Cache<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper> {
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(SegmentTaskIndexStore.class.getName());
-  /**
-   * CarbonLRU cache
-   */
-  protected CarbonLRUCache lruCache;
-
-  /**
-   * map of block info to lock object map, while loading the btree this will 
be filled
-   * and removed after loading the tree for that particular block info, this 
will be useful
-   * while loading the tree concurrently so only block level lock will be 
applied another
-   * block can be loaded concurrently
-   */
-  private Map<String, Object> segmentLockMap;
-
-  private Map<SegmentPropertiesWrapper, SegmentProperties> segmentProperties =
-      new HashMap<SegmentPropertiesWrapper, SegmentProperties>();
-
-  /**
-   * constructor to initialize the SegmentTaskIndexStore
-   *
-   * @param lruCache
-   */
-  public SegmentTaskIndexStore(CarbonLRUCache lruCache) {
-    this.lruCache = lruCache;
-    segmentLockMap = new ConcurrentHashMap<String, Object>();
-  }
-
-  @Override
-  public SegmentTaskIndexWrapper get(TableSegmentUniqueIdentifier 
tableSegmentUniqueIdentifier)
-      throws IOException {
-    SegmentTaskIndexWrapper segmentTaskIndexWrapper = null;
-    try {
-      segmentTaskIndexWrapper =
-          loadAndGetTaskIdToSegmentsMap(
-              tableSegmentUniqueIdentifier.getSegmentToTableBlocksInfos(),
-              CarbonTable.buildDummyTable("path"),
-              tableSegmentUniqueIdentifier);
-    } catch (IndexBuilderException e) {
-      throw new IOException(e.getMessage(), e);
-    } catch (Throwable e) {
-      throw new IOException("Problem in loading segment block.", e);
-    }
-    return segmentTaskIndexWrapper;
-  }
-
-  @Override public List<SegmentTaskIndexWrapper> getAll(
-      List<TableSegmentUniqueIdentifier> tableSegmentUniqueIdentifiers) throws 
IOException {
-    List<SegmentTaskIndexWrapper> segmentTaskIndexWrappers =
-        new ArrayList<>(tableSegmentUniqueIdentifiers.size());
-    try {
-      for (TableSegmentUniqueIdentifier segmentUniqueIdentifier : 
tableSegmentUniqueIdentifiers) {
-        segmentTaskIndexWrappers.add(get(segmentUniqueIdentifier));
-      }
-    } catch (Throwable e) {
-      for (SegmentTaskIndexWrapper segmentTaskIndexWrapper : 
segmentTaskIndexWrappers) {
-        segmentTaskIndexWrapper.clear();
-      }
-      throw new IOException("Problem in loading segment blocks.", e);
-    }
-    return segmentTaskIndexWrappers;
-  }
-
-  /**
-   * returns the SegmentTaskIndexWrapper
-   *
-   * @param tableSegmentUniqueIdentifier
-   * @return
-   */
-  @Override public SegmentTaskIndexWrapper getIfPresent(
-      TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) {
-    SegmentTaskIndexWrapper segmentTaskIndexWrapper = 
(SegmentTaskIndexWrapper) lruCache
-        .get(tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
-    if (null != segmentTaskIndexWrapper) {
-      segmentTaskIndexWrapper.incrementAccessCount();
-    }
-    return segmentTaskIndexWrapper;
-  }
-
-  /**
-   * method invalidate the segment cache for segment
-   *
-   * @param tableSegmentUniqueIdentifier
-   */
-  @Override public void invalidate(TableSegmentUniqueIdentifier 
tableSegmentUniqueIdentifier) {
-    
lruCache.remove(tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
-  }
-
-  @Override
-  public void put(TableSegmentUniqueIdentifier key, SegmentTaskIndexWrapper 
value)
-      throws IOException, MemoryException {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * returns block timestamp value from the given task
-   * @param taskKey
-   * @param listOfUpdatedFactFiles
-   * @return
-   */
-  private String getTimeStampValueFromBlock(String taskKey, List<String> 
listOfUpdatedFactFiles) {
-    for (String blockName : listOfUpdatedFactFiles) {
-      if (taskKey.equals(CarbonTablePath.DataFileUtil.getTaskNo(blockName))) {
-        blockName = blockName.substring(blockName.lastIndexOf('-') + 1, 
blockName.lastIndexOf('.'));
-        return blockName;
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Below method will be used to load the segment of segments
-   * One segment may have multiple task , so  table segment will be loaded
-   * based on task id and will return the map of taksId to table segment
-   * map
-   *
-   * @param segmentToTableBlocksInfos segment id to block info
-   * @param table   table handle
-   * @return map of taks id to segment mapping
-   * @throws IOException
-   */
-  private SegmentTaskIndexWrapper loadAndGetTaskIdToSegmentsMap(
-      Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos,
-      CarbonTable table,
-      TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) throws 
IOException {
-    // task id to segment map
-    Iterator<Map.Entry<String, List<TableBlockInfo>>> 
iteratorOverSegmentBlocksInfos =
-        segmentToTableBlocksInfos.entrySet().iterator();
-    Map<TaskBucketHolder, AbstractIndex> taskIdToSegmentIndexMap = null;
-    SegmentTaskIndexWrapper segmentTaskIndexWrapper = null;
-    SegmentUpdateStatusManager updateStatusManager = new 
SegmentUpdateStatusManager(table);
-    String segmentId = null;
-    TaskBucketHolder taskBucketHolder = null;
-    try {
-      while (iteratorOverSegmentBlocksInfos.hasNext()) {
-        // Initialize the UpdateVO to Null for each segment.
-        UpdateVO updateVO = null;
-        // segment id to table block mapping
-        Map.Entry<String, List<TableBlockInfo>> next = 
iteratorOverSegmentBlocksInfos.next();
-        // group task id to table block info mapping for the segment
-        Map<TaskBucketHolder, List<TableBlockInfo>> taskIdToTableBlockInfoMap =
-            mappedAndGetTaskIdToTableBlockInfo(segmentToTableBlocksInfos);
-        segmentId = next.getKey();
-        // updateVO is only required when Updates Or Delete performed on the 
Table.
-        if (updateStatusManager.getUpdateStatusDetails().length != 0) {
-          // get the existing map of task id to table segment map
-          updateVO = updateStatusManager.getInvalidTimestampRange(segmentId);
-        }
-        // check if segment is already loaded, if segment is already loaded
-        //no need to load the segment block
-        String lruCacheKey = 
tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier();
-        segmentTaskIndexWrapper = (SegmentTaskIndexWrapper) 
lruCache.get(lruCacheKey);
-        if ((segmentTaskIndexWrapper == null) || ((null != updateVO)
-            && (tableSegmentUniqueIdentifier.isSegmentUpdated()))) {
-          // get the segment loader lock object this is to avoid
-          // same segment is getting loaded multiple times
-          // in case of concurrent query
-          Object segmentLoderLockObject = segmentLockMap.get(lruCacheKey);
-          if (null == segmentLoderLockObject) {
-            segmentLoderLockObject = addAndGetSegmentLock(lruCacheKey);
-          }
-          // acquire lock to lod the segment
-          synchronized (segmentLoderLockObject) {
-            segmentTaskIndexWrapper = (SegmentTaskIndexWrapper) 
lruCache.get(lruCacheKey);
-            if ((null == segmentTaskIndexWrapper) || ((null != updateVO)
-                && (tableSegmentUniqueIdentifier.isSegmentUpdated()))) {
-              // if the segment is updated then get the existing block task id 
map details
-              // so that the same can be updated after loading the btree.
-              if (tableSegmentUniqueIdentifier.isSegmentUpdated()
-                  && null != segmentTaskIndexWrapper) {
-                taskIdToSegmentIndexMap = 
segmentTaskIndexWrapper.getTaskIdToTableSegmentMap();
-              } else {
-                // creating a map of take if to table segment
-                taskIdToSegmentIndexMap = new HashMap<TaskBucketHolder, 
AbstractIndex>();
-                segmentTaskIndexWrapper = new 
SegmentTaskIndexWrapper(taskIdToSegmentIndexMap);
-                segmentTaskIndexWrapper.incrementAccessCount();
-              }
-              Iterator<Map.Entry<TaskBucketHolder, List<TableBlockInfo>>> 
iterator =
-                  taskIdToTableBlockInfoMap.entrySet().iterator();
-              AbsoluteTableIdentifier absoluteTableIdentifier = 
table.getAbsoluteTableIdentifier();
-              long requiredSize =
-                  calculateRequiredSize(taskIdToTableBlockInfoMap, 
absoluteTableIdentifier);
-              segmentTaskIndexWrapper.setMemorySize(requiredSize);
-              boolean canAddToLruCache =
-                  lruCache.tryPut(lruCacheKey, requiredSize);
-              if (canAddToLruCache) {
-                while (iterator.hasNext()) {
-                  Map.Entry<TaskBucketHolder, List<TableBlockInfo>> 
taskToBlockInfoList =
-                      iterator.next();
-                  taskBucketHolder = taskToBlockInfoList.getKey();
-                  taskIdToSegmentIndexMap.put(taskBucketHolder,
-                      loadBlocks(taskBucketHolder, 
taskToBlockInfoList.getValue(),
-                          absoluteTableIdentifier));
-                }
-                long updatedRequiredSize =
-                    ObjectSizeCalculator.estimate(segmentTaskIndexWrapper, 
requiredSize);
-                // update the actual size of object
-                segmentTaskIndexWrapper.setMemorySize(updatedRequiredSize);
-                if (!lruCache.put(lruCacheKey, segmentTaskIndexWrapper, 
updatedRequiredSize)) {
-                  throw new IndexBuilderException(
-                          "Can not load the segment. No Enough space 
available.");
-                }
-
-              } else {
-                throw new IndexBuilderException(
-                    "Can not load the segment. No Enough space available.");
-              }
-
-              // Refresh the Timestamp for those tables which underwent 
through IUD Operations.
-              if (null != updateVO) {
-                // set the latest timestamp.
-                segmentTaskIndexWrapper
-                    
.setRefreshedTimeStamp(updateVO.getCreatedOrUpdatedTimeStamp());
-              } else {
-                segmentTaskIndexWrapper.setRefreshedTimeStamp(0L);
-              }
-              // tableSegmentMapTemp.put(next.getKey(), 
taskIdToSegmentIndexMap);
-              // removing from segment lock map as once segment is loaded
-              // if concurrent query is coming for same segment
-              // it will wait on the lock so after this segment will be already
-              // loaded so lock is not required, that is why removing the
-              // the lock object as it wont be useful
-              segmentLockMap.remove(lruCacheKey);
-            } else {
-              segmentTaskIndexWrapper.incrementAccessCount();
-            }
-          }
-        } else {
-          segmentTaskIndexWrapper.incrementAccessCount();
-        }
-      }
-    } catch (IndexBuilderException e) {
-      LOGGER.error("Problem while loading the segment");
-      throw e;
-    }
-    return segmentTaskIndexWrapper;
-  }
-
-  private long calculateRequiredSize(
-      Map<TaskBucketHolder, List<TableBlockInfo>> taskIdToTableBlockInfoMap,
-      AbsoluteTableIdentifier absoluteTableIdentifier) {
-    Iterator<Map.Entry<TaskBucketHolder, List<TableBlockInfo>>> iterator =
-        taskIdToTableBlockInfoMap.entrySet().iterator();
-    TaskBucketHolder taskBucketHolder;
-    long driverBTreeSize = 0;
-    while (iterator.hasNext()) {
-      Map.Entry<TaskBucketHolder, List<TableBlockInfo>> taskToBlockInfoList = 
iterator.next();
-      taskBucketHolder = taskToBlockInfoList.getKey();
-      driverBTreeSize += CarbonUtil
-          .calculateDriverBTreeSize(taskBucketHolder.taskNo, 
taskBucketHolder.bucketNumber,
-              taskToBlockInfoList.getValue(), absoluteTableIdentifier);
-    }
-    return driverBTreeSize;
-  }
-
-  /**
-   * Below method will be used to get the task id to all the table block info 
belongs to
-   * that task id mapping
-   *
-   * @param segmentToTableBlocksInfos segment if to table blocks info map
-   * @return task id to table block info mapping
-   */
-  private Map<TaskBucketHolder, List<TableBlockInfo>> 
mappedAndGetTaskIdToTableBlockInfo(
-      Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos) {
-    Map<TaskBucketHolder, List<TableBlockInfo>> taskIdToTableBlockInfoMap =
-        new ConcurrentHashMap<>();
-    Iterator<Entry<String, List<TableBlockInfo>>> iterator =
-        segmentToTableBlocksInfos.entrySet().iterator();
-    while (iterator.hasNext()) {
-      Entry<String, List<TableBlockInfo>> next = iterator.next();
-      List<TableBlockInfo> value = next.getValue();
-      for (TableBlockInfo blockInfo : value) {
-        String taskNo = DataFileUtil.getTaskNo(blockInfo.getFilePath());
-        String bucketNo = DataFileUtil.getBucketNo(blockInfo.getFilePath());
-        TaskBucketHolder bucketHolder = new TaskBucketHolder(taskNo, bucketNo);
-        List<TableBlockInfo> list = 
taskIdToTableBlockInfoMap.get(bucketHolder);
-        if (null == list) {
-          list = new ArrayList<TableBlockInfo>();
-          taskIdToTableBlockInfoMap.put(bucketHolder, list);
-        }
-        list.add(blockInfo);
-      }
-
-    }
-    return taskIdToTableBlockInfoMap;
-  }
-
-  /**
-   * Below method will be used to get the segment level lock object
-   *
-   * @param segmentId
-   * @return lock object
-   */
-  private synchronized Object addAndGetSegmentLock(String segmentId) {
-    // get the segment lock object if it is present then return
-    // otherwise add the new lock and return
-    Object segmentLoderLockObject = segmentLockMap.get(segmentId);
-    if (null == segmentLoderLockObject) {
-      segmentLoderLockObject = new Object();
-      segmentLockMap.put(segmentId, segmentLoderLockObject);
-    }
-    return segmentLoderLockObject;
-  }
-
-  /**
-   * Below method will be used to load the blocks
-   *
-   * @param tableBlockInfoList
-   * @return loaded segment
-   * @throws IOException
-   */
-  private AbstractIndex loadBlocks(TaskBucketHolder taskBucketHolder,
-      List<TableBlockInfo> tableBlockInfoList, AbsoluteTableIdentifier 
tableIdentifier)
-      throws IOException {
-    // all the block of one task id will be loaded together
-    // so creating a list which will have all the data file meta data to of 
one task
-    List<DataFileFooter> footerList = CarbonUtil
-        .readCarbonIndexFile(taskBucketHolder.taskNo, 
taskBucketHolder.bucketNumber,
-            tableBlockInfoList, tableIdentifier);
-
-    // Reuse SegmentProperties object if tableIdentifier, columnsInTable and 
columnCardinality are
-    // the same.
-    List<ColumnSchema> columnsInTable = footerList.get(0).getColumnInTable();
-    int[] columnCardinality = 
footerList.get(0).getSegmentInfo().getColumnCardinality();
-    SegmentPropertiesWrapper segmentPropertiesWrapper =
-        new SegmentPropertiesWrapper(tableIdentifier, columnsInTable, 
columnCardinality);
-    SegmentProperties segmentProperties;
-    if (this.segmentProperties.containsKey(segmentPropertiesWrapper)) {
-      segmentProperties = this.segmentProperties.get(segmentPropertiesWrapper);
-    } else {
-      // create a metadata details
-      // this will be useful in query handling
-      // all the data file metadata will have common segment properties we
-      // can use first one to get create the segment properties
-      segmentProperties = new SegmentProperties(columnsInTable, 
columnCardinality);
-      this.segmentProperties.put(segmentPropertiesWrapper, segmentProperties);
-    }
-
-    AbstractIndex segment = new SegmentTaskIndex(segmentProperties);
-    // file path of only first block is passed as it all table block info path 
of
-    // same task id will be same
-    segment.buildIndex(footerList);
-    return segment;
-  }
-
-  /**
-   * The method clears the access count of table segments
-   *
-   * @param tableSegmentUniqueIdentifiers
-   */
-  @Override
-  public void clearAccessCount(List<TableSegmentUniqueIdentifier> 
tableSegmentUniqueIdentifiers) {
-    for (TableSegmentUniqueIdentifier segmentUniqueIdentifier : 
tableSegmentUniqueIdentifiers) {
-      SegmentTaskIndexWrapper cacheable = (SegmentTaskIndexWrapper) lruCache
-          .get(segmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
-      cacheable.clear();
-    }
-  }
-
-  public static class TaskBucketHolder implements Serializable {
-
-    public String taskNo;
-
-    public String bucketNumber;
-
-    public TaskBucketHolder(String taskNo, String bucketNumber) {
-      this.taskNo = taskNo;
-      this.bucketNumber = bucketNumber;
-    }
-
-    @Override public boolean equals(Object o) {
-      if (this == o) return true;
-      if (o == null || getClass() != o.getClass()) return false;
-
-      TaskBucketHolder that = (TaskBucketHolder) o;
-
-      if (taskNo != null ? !taskNo.equals(that.taskNo) : that.taskNo != null) 
return false;
-      return bucketNumber != null ?
-          bucketNumber.equals(that.bucketNumber) :
-          that.bucketNumber == null;
-
-    }
-
-    @Override public int hashCode() {
-      int result = taskNo != null ? taskNo.hashCode() : 0;
-      result = 31 * result + (bucketNumber != null ? bucketNumber.hashCode() : 
0);
-      return result;
-    }
-  }
-
-  /**
-   * This class wraps tableIdentifier, columnsInTable and columnCardinality as 
a key to determine
-   * whether the SegmentProperties object can be reused.
-   */
-  public static class SegmentPropertiesWrapper {
-    private AbsoluteTableIdentifier tableIdentifier;
-    private List<ColumnSchema> columnsInTable;
-    private int[] columnCardinality;
-
-    public SegmentPropertiesWrapper(AbsoluteTableIdentifier tableIdentifier,
-        List<ColumnSchema> columnsInTable, int[] columnCardinality) {
-      this.tableIdentifier = tableIdentifier;
-      this.columnsInTable = columnsInTable;
-      this.columnCardinality = columnCardinality;
-    }
-
-    @Override
-    public boolean equals(Object obj) {
-      if (!(obj instanceof SegmentPropertiesWrapper)) {
-        return false;
-      }
-      SegmentPropertiesWrapper other = (SegmentPropertiesWrapper) obj;
-      return tableIdentifier.equals(other.tableIdentifier)
-        && columnsInTable.equals(other.columnsInTable)
-        && Arrays.equals(columnCardinality, other.columnCardinality);
-    }
-
-    @Override
-    public int hashCode() {
-      return tableIdentifier.hashCode()
-        + columnsInTable.hashCode() + Arrays.hashCode(columnCardinality);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/datastore/TableSegmentUniqueIdentifier.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/TableSegmentUniqueIdentifier.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/TableSegmentUniqueIdentifier.java
index 888689b..18b856b 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/TableSegmentUniqueIdentifier.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/TableSegmentUniqueIdentifier.java
@@ -17,11 +17,7 @@
 
 package org.apache.carbondata.core.datastore;
 
-import java.util.List;
-import java.util.Map;
-
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.block.TableBlockInfo;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
 
@@ -34,13 +30,7 @@ public class TableSegmentUniqueIdentifier {
    */
   private AbsoluteTableIdentifier absoluteTableIdentifier;
 
-  /**
-   * segment to tableBlockInfo map
-   */
-  Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos;
-
   private String segmentId;
-  private  boolean isSegmentUpdated;
 
   /**
    * Constructor to initialize the class instance
@@ -53,42 +43,6 @@ public class TableSegmentUniqueIdentifier {
     this.segmentId = segmentId;
   }
 
-  public TableSegmentUniqueIdentifier(AbsoluteTableIdentifier 
absoluteTableIdentifier,
-      Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos, String 
segmentId) {
-    this.absoluteTableIdentifier = absoluteTableIdentifier;
-    this.segmentToTableBlocksInfos = segmentToTableBlocksInfos;
-    this.segmentId = segmentId;
-  }
-
-  /**
-   * returns AbsoluteTableIdentifier
-   * @return
-   */
-  public AbsoluteTableIdentifier getAbsoluteTableIdentifier() {
-    return absoluteTableIdentifier;
-  }
-
-  public void setAbsoluteTableIdentifier(AbsoluteTableIdentifier 
absoluteTableIdentifier) {
-    this.absoluteTableIdentifier = absoluteTableIdentifier;
-  }
-
-  /**
-   *  returns the segment to tableBlockInfo map
-   * @return
-   */
-  public Map<String, List<TableBlockInfo>> getSegmentToTableBlocksInfos() {
-    return segmentToTableBlocksInfos;
-  }
-
-  /**
-   * set the segment to tableBlockInfo map
-   * @param segmentToTableBlocksInfos
-   */
-  public void setSegmentToTableBlocksInfos(
-      Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos) {
-    this.segmentToTableBlocksInfos = segmentToTableBlocksInfos;
-  }
-
   public String getSegmentId() {
     return segmentId;
   }
@@ -105,13 +59,6 @@ public class TableSegmentUniqueIdentifier {
             + carbonTableIdentifier.getTableName() + 
CarbonCommonConstants.UNDERSCORE
             + carbonTableIdentifier.getTableId() + 
CarbonCommonConstants.FILE_SEPARATOR + segmentId;
   }
-  public void setIsSegmentUpdated(boolean isSegmentUpdated) {
-    this.isSegmentUpdated = isSegmentUpdated;
-  }
-
-  public boolean isSegmentUpdated() {
-    return isSegmentUpdated;
-  }
 
   /**
    * equals method to compare two objects having same

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java 
b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
index 93da81e..4d30cb0 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
@@ -50,12 +50,8 @@ public class TableSpec {
     // first calculate total number of columnar field considering column group 
and complex column
     numSimpleDimensions = 0;
     for (CarbonDimension dimension : dimensions) {
-      if (dimension.isColumnar()) {
-        if (!dimension.isComplex()) {
-          numSimpleDimensions++;
-        }
-      } else {
-        throw new UnsupportedOperationException("column group is not 
supported");
+      if (!dimension.isComplex()) {
+        numSimpleDimensions++;
       }
     }
     dimensionSpec = new DimensionSpec[dimensions.size()];
@@ -68,24 +64,22 @@ public class TableSpec {
     int dimIndex = 0;
     for (int i = 0; i < dimensions.size(); i++) {
       CarbonDimension dimension = dimensions.get(i);
-      if (dimension.isColumnar()) {
-        if (dimension.isComplex()) {
-          DimensionSpec spec = new DimensionSpec(ColumnType.COMPLEX, 
dimension);
-          dimensionSpec[dimIndex++] = spec;
-        } else if (dimension.getDataType() == DataTypes.TIMESTAMP && !dimension
-            .isDirectDictionaryEncoding()) {
-          DimensionSpec spec = new DimensionSpec(ColumnType.PLAIN_VALUE, 
dimension);
-          dimensionSpec[dimIndex++] = spec;
-        } else if (dimension.isDirectDictionaryEncoding()) {
-          DimensionSpec spec = new DimensionSpec(ColumnType.DIRECT_DICTIONARY, 
dimension);
-          dimensionSpec[dimIndex++] = spec;
-        } else if (dimension.isGlobalDictionaryEncoding()) {
-          DimensionSpec spec = new DimensionSpec(ColumnType.GLOBAL_DICTIONARY, 
dimension);
-          dimensionSpec[dimIndex++] = spec;
-        } else {
-          DimensionSpec spec = new DimensionSpec(ColumnType.PLAIN_VALUE, 
dimension);
-          dimensionSpec[dimIndex++] = spec;
-        }
+      if (dimension.isComplex()) {
+        DimensionSpec spec = new DimensionSpec(ColumnType.COMPLEX, dimension);
+        dimensionSpec[dimIndex++] = spec;
+      } else if (dimension.getDataType() == DataTypes.TIMESTAMP && !dimension
+          .isDirectDictionaryEncoding()) {
+        DimensionSpec spec = new DimensionSpec(ColumnType.PLAIN_VALUE, 
dimension);
+        dimensionSpec[dimIndex++] = spec;
+      } else if (dimension.isDirectDictionaryEncoding()) {
+        DimensionSpec spec = new DimensionSpec(ColumnType.DIRECT_DICTIONARY, 
dimension);
+        dimensionSpec[dimIndex++] = spec;
+      } else if (dimension.isGlobalDictionaryEncoding()) {
+        DimensionSpec spec = new DimensionSpec(ColumnType.GLOBAL_DICTIONARY, 
dimension);
+        dimensionSpec[dimIndex++] = spec;
+      } else {
+        DimensionSpec spec = new DimensionSpec(ColumnType.PLAIN_VALUE, 
dimension);
+        dimensionSpec[dimIndex++] = spec;
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/datastore/block/AbstractIndex.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/AbstractIndex.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/AbstractIndex.java
index 4d0e56d..7fbef8a 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/AbstractIndex.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/AbstractIndex.java
@@ -38,11 +38,6 @@ public abstract class AbstractIndex implements Cacheable {
   protected DataRefNode dataRefNode;
 
   /**
-   * total number of row present in the block
-   */
-  protected long totalNumberOfRows;
-
-  /**
    * atomic integer to maintain the access count for a column access
    */
   protected AtomicInteger accessCount = new AtomicInteger();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
index f652381..d507937 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
@@ -27,7 +27,6 @@ import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.columnar.ColumnGroupModel;
 import org.apache.carbondata.core.keygenerator.KeyGenerator;
 import org.apache.carbondata.core.keygenerator.columnar.ColumnarSplitter;
 import 
org.apache.carbondata.core.keygenerator.columnar.impl.MultiDimKeyVarLengthVariableSplitGenerator;
@@ -126,19 +125,6 @@ public class SegmentProperties {
   private int[] eachComplexDimColumnValueSize;
 
   /**
-   * below mapping will have mapping of the column group to dimensions ordinal
-   * for example if 3 dimension present in the columngroupid 0 and its ordinal 
in
-   * 2,3,4 then map will contain 0,{2,3,4}
-   */
-  private Map<Integer, KeyGenerator> columnGroupAndItsKeygenartor;
-
-  /**
-   * column group key generator dimension index will not be same as dimension 
ordinal
-   * This will have mapping with ordinal and keygenerator or mdkey index
-   */
-  private Map<Integer, Map<Integer, Integer>> columnGroupOrdinalToMdkeymapping;
-
-  /**
    * this will be used to split the fixed length key
    * this will all the information about how key was created
    * and how to split the key based on group
@@ -153,11 +139,6 @@ public class SegmentProperties {
    */
   private int numberOfNoDictionaryDimension;
 
-  /**
-   * column group model
-   */
-  private ColumnGroupModel colGroupModel;
-
   private int numberOfSortColumns = 0;
 
   private int numberOfNoDictSortColumns = 0;
@@ -176,50 +157,12 @@ public class SegmentProperties {
         new HashMap<Integer, 
Set<Integer>>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
     measuresOrdinalToChunkMapping =
         new HashMap<Integer, 
Integer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-    intialiseColGroups();
     fillOrdinalToBlockMappingForDimension();
     fillOrdinalToChunkIndexMappingForMeasureColumns();
-    fillColumnGroupAndItsCardinality(columnCardinality);
     fillKeyGeneratorDetails();
   }
 
   /**
-   * it fills column groups
-   * e.g {{1},{2,3,4},{5},{6},{7,8,9}}
-   *
-   */
-  private void intialiseColGroups() {
-    List<List<Integer>> colGrpList = new ArrayList<List<Integer>>();
-    List<Integer> group = new ArrayList<Integer>();
-    for (int i = 0; i < dimensions.size(); i++) {
-      CarbonDimension dimension = dimensions.get(i);
-      if (!dimension.hasEncoding(Encoding.DICTIONARY)) {
-        continue;
-      }
-      group.add(dimension.getOrdinal());
-      if (i < dimensions.size() - 1) {
-        int currGroupOrdinal = dimension.columnGroupId();
-        int nextGroupOrdinal = dimensions.get(i + 1).columnGroupId();
-        if (!(currGroupOrdinal == nextGroupOrdinal && currGroupOrdinal != -1)) 
{
-          colGrpList.add(group);
-          group = new ArrayList<Integer>();
-        }
-      } else {
-        colGrpList.add(group);
-      }
-
-    }
-    int[][] colGroups = new int[colGrpList.size()][];
-    for (int i = 0; i < colGroups.length; i++) {
-      colGroups[i] = new int[colGrpList.get(i).size()];
-      for (int j = 0; j < colGroups[i].length; j++) {
-        colGroups[i][j] = colGrpList.get(i).get(j);
-      }
-    }
-    this.colGroupModel = CarbonUtil.getColGroupModel(colGroups);
-  }
-
-  /**
    * below method is to fill the dimension and its mapping to file blocks all
    * the column will point to same column group
    */
@@ -227,16 +170,10 @@ public class SegmentProperties {
     int blockOrdinal = -1;
     CarbonDimension dimension = null;
     int index = 0;
-    int prvcolumnGroupId = -1;
     while (index < dimensions.size()) {
       dimension = dimensions.get(index);
-      // if column id is same as previous one then block index will be
-      // same
-      if (dimension.isColumnar() || dimension.columnGroupId() != 
prvcolumnGroupId) {
-        blockOrdinal++;
-      }
+      blockOrdinal++;
       dimensionOrdinalToChunkMapping.put(dimension.getOrdinal(), blockOrdinal);
-      prvcolumnGroupId = dimension.columnGroupId();
       index++;
     }
     index = 0;
@@ -333,9 +270,6 @@ public class SegmentProperties {
     // to store the position of dimension in surrogate key array which is
     // participating in mdkey
     int keyOrdinal = 0;
-    int previousColumnGroup = -1;
-    // to store the ordinal of the column group ordinal
-    int columnGroupOrdinal = 0;
     int counter = 0;
     int complexTypeOrdinal = -1;
     while (counter < columnsInTable.size()) {
@@ -350,36 +284,17 @@ public class SegmentProperties {
           if (columnSchema.isSortColumn()) {
             this.numberOfSortColumns++;
           }
-          if (columnSchema.isColumnar()) {
-            // if it is a columnar dimension participated in mdkey then added
-            // key ordinal and dimension ordinal
-            carbonDimension =
-                new CarbonDimension(columnSchema, dimensonOrdinal++, 
keyOrdinal++, -1, -1);
-          } else {
-            // if not columnnar then it is a column group dimension
-
-            // below code to handle first dimension of the column group
-            // in this case ordinal of the column group will be 0
-            if (previousColumnGroup != columnSchema.getColumnGroupId()) {
-              columnGroupOrdinal = 0;
-              carbonDimension = new CarbonDimension(columnSchema, 
dimensonOrdinal++, keyOrdinal++,
-                  columnGroupOrdinal++, -1);
-            }
-            // if previous dimension  column group id is same as current then
-            // then its belongs to same row group
-            else {
-              carbonDimension = new CarbonDimension(columnSchema, 
dimensonOrdinal++, keyOrdinal++,
-                  columnGroupOrdinal++, -1);
-            }
-            previousColumnGroup = columnSchema.getColumnGroupId();
-          }
+          // if it is a columnar dimension participated in mdkey then added
+          // key ordinal and dimension ordinal
+          carbonDimension =
+              new CarbonDimension(columnSchema, dimensonOrdinal++, 
keyOrdinal++, -1);
         }
         // as complex type will be stored at last so once complex type started 
all the dimension
         // will be added to complex type
         else if (isComplexDimensionStarted || 
columnSchema.getDataType().isComplexType()) {
           cardinalityIndexForComplexDimensionColumn.add(tableOrdinal);
           carbonDimension =
-              new CarbonDimension(columnSchema, dimensonOrdinal++, -1, -1, 
++complexTypeOrdinal);
+              new CarbonDimension(columnSchema, dimensonOrdinal++, -1, 
++complexTypeOrdinal);
           
carbonDimension.initializeChildDimensionsList(columnSchema.getNumberOfChild());
           complexDimensions.add(carbonDimension);
           isComplexDimensionStarted = true;
@@ -396,7 +311,7 @@ public class SegmentProperties {
           continue;
         } else {
           // for no dictionary dimension
-          carbonDimension = new CarbonDimension(columnSchema, 
dimensonOrdinal++, -1, -1, -1);
+          carbonDimension = new CarbonDimension(columnSchema, 
dimensonOrdinal++, -1, -1);
           numberOfNoDictionaryDimension++;
           if (columnSchema.isSortColumn()) {
             this.numberOfSortColumns++;
@@ -444,8 +359,7 @@ public class SegmentProperties {
       if (columnSchema.isDimensionColumn()) {
         if (columnSchema.getNumberOfChild() > 0) {
           CarbonDimension complexDimension =
-              new CarbonDimension(columnSchema, dimensionOrdinal++, -1, -1,
-                  complexDimensionOrdinal++);
+              new CarbonDimension(columnSchema, dimensionOrdinal++, -1, 
complexDimensionOrdinal++);
           
complexDimension.initializeChildDimensionsList(columnSchema.getNumberOfChild());
           parentDimension.getListOfChildDimensions().add(complexDimension);
           dimensionOrdinal =
@@ -453,8 +367,7 @@ public class SegmentProperties {
                   listOfColumns, complexDimension, complexDimensionOrdinal);
         } else {
           parentDimension.getListOfChildDimensions().add(
-              new CarbonDimension(columnSchema, dimensionOrdinal++, -1, -1,
-                  complexDimensionOrdinal++));
+              new CarbonDimension(columnSchema, dimensionOrdinal++, -1, 
complexDimensionOrdinal++));
         }
       }
     }
@@ -493,7 +406,6 @@ public class SegmentProperties {
         new ArrayList<Integer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
     List<Boolean> isDictionaryColumn =
         new ArrayList<Boolean>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-    int prvcolumnGroupId = -1;
     int counter = 0;
     while (counter < dimensions.size()) {
       CarbonDimension carbonDimension = dimensions.get(counter);
@@ -503,23 +415,8 @@ public class SegmentProperties {
         counter++;
         continue;
       }
-      // columnar column is stored individually
-      // so add one
-      if (carbonDimension.isColumnar()) {
-        dimensionPartitionList.add(1);
-        isDictionaryColumn.add(true);
-      }
-      // if in a group then need to add how many columns a selected in
-      // group
-      if (!carbonDimension.isColumnar() && carbonDimension.columnGroupId() == 
prvcolumnGroupId) {
-        // incrementing the previous value of the list as it is in same column 
group
-        dimensionPartitionList.set(dimensionPartitionList.size() - 1,
-            dimensionPartitionList.get(dimensionPartitionList.size() - 1) + 1);
-      } else if (!carbonDimension.isColumnar()) {
-        dimensionPartitionList.add(1);
-        isDictionaryColumn.add(true);
-      }
-      prvcolumnGroupId = carbonDimension.columnGroupId();
+      dimensionPartitionList.add(1);
+      isDictionaryColumn.add(true);
       counter++;
     }
     // get the partitioner
@@ -570,73 +467,6 @@ public class SegmentProperties {
   }
 
   /**
-   * Below method will be used to create a mapping of column group and its 
column cardinality this
-   * mapping will have column group id to cardinality of the dimension present 
in
-   * the column group.This mapping will be used during query execution, to 
create
-   * a mask key for the column group dimension which will be used in 
aggregation
-   * and filter query as column group dimension will be stored at the bit level
-   */
-  private void fillColumnGroupAndItsCardinality(int[] cardinality) {
-    // mapping of the column group and its ordinal
-    Map<Integer, List<Integer>> columnGroupAndOrdinalMapping =
-        new HashMap<Integer, 
List<Integer>>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-    // to store a column group
-    List<Integer> currentColumnGroup = null;
-    // current index
-    int index = 0;
-    // previous column group to check all the column of column id has bee 
selected
-    int prvColumnGroupId = -1;
-    while (index < dimensions.size()) {
-      // if dimension group id is not zero and it is same as the previous
-      // column id
-      // then we need to add ordinal of that column as it belongs to same
-      // column group
-      if (!dimensions.get(index).isColumnar()
-          && dimensions.get(index).columnGroupId() == prvColumnGroupId
-          && null != currentColumnGroup) {
-        currentColumnGroup.add(index);
-      }
-      // if column is not a columnar then new column group has come
-      // so we need to create a list of new column id group and add the
-      // ordinal
-      else if (!dimensions.get(index).isColumnar()) {
-        currentColumnGroup = new ArrayList<Integer>();
-        
columnGroupAndOrdinalMapping.put(dimensions.get(index).columnGroupId(), 
currentColumnGroup);
-        currentColumnGroup.add(index);
-      }
-      // update the column id every time,this is required to group the
-      // columns
-      // of the same column group
-      prvColumnGroupId = dimensions.get(index).columnGroupId();
-      index++;
-    }
-    // Initializing the map
-    this.columnGroupAndItsKeygenartor =
-        new HashMap<Integer, 
KeyGenerator>(columnGroupAndOrdinalMapping.size());
-    this.columnGroupOrdinalToMdkeymapping = new 
HashMap<>(columnGroupAndOrdinalMapping.size());
-    int[] columnGroupCardinality = null;
-    index = 0;
-    Iterator<Entry<Integer, List<Integer>>> iterator =
-        columnGroupAndOrdinalMapping.entrySet().iterator();
-    while (iterator.hasNext()) {
-      Entry<Integer, List<Integer>> next = iterator.next();
-      List<Integer> currentGroupOrdinal = next.getValue();
-      Map<Integer, Integer> colGrpOrdinalMdkeyMapping = new 
HashMap<>(currentGroupOrdinal.size());
-      // create the cardinality array
-      columnGroupCardinality = new int[currentGroupOrdinal.size()];
-      for (int i = 0; i < columnGroupCardinality.length; i++) {
-        // fill the cardinality
-        columnGroupCardinality[i] = cardinality[currentGroupOrdinal.get(i)];
-        colGrpOrdinalMdkeyMapping.put(currentGroupOrdinal.get(i), i);
-      }
-      this.columnGroupAndItsKeygenartor.put(next.getKey(), new 
MultiDimKeyVarLengthGenerator(
-          CarbonUtil.getDimensionBitLength(columnGroupCardinality,
-              new int[] { columnGroupCardinality.length })));
-      this.columnGroupOrdinalToMdkeymapping.put(next.getKey(), 
colGrpOrdinalMdkeyMapping);
-    }
-  }
-
-  /**
    * Below method is to get the value of each dimension column. As this method
    * will be used only once so we can merge both the dimension and complex
    * dimension array. Complex dimension will be store at last so first copy
@@ -764,13 +594,6 @@ public class SegmentProperties {
   }
 
   /**
-   * @return the columnGroupAndItsKeygenartor
-   */
-  public Map<Integer, KeyGenerator> getColumnGroupAndItsKeygenartor() {
-    return columnGroupAndItsKeygenartor;
-  }
-
-  /**
    * @return the numberOfNoDictionaryDimension
    */
   public int getNumberOfNoDictionaryDimension() {
@@ -778,31 +601,6 @@ public class SegmentProperties {
   }
 
   /**
-   * @return
-   */
-  public int[][] getColumnGroups() {
-    return colGroupModel.getColumnGroup();
-  }
-
-  /**
-   * @return colGroupModel
-   */
-  public ColumnGroupModel getColumnGroupModel() {
-    return this.colGroupModel;
-  }
-
-  /**
-   * get mdkey ordinal for given dimension ordinal of given column group
-   *
-   * @param colGrpId
-   * @param ordinal
-   * @return mdkeyordinal
-   */
-  public int getColumnGroupMdKeyOrdinal(int colGrpId, int ordinal) {
-    return columnGroupOrdinalToMdkeymapping.get(colGrpId).get(ordinal);
-  }
-
-  /**
    * @param blockIndex
    * @return It returns all dimension present in given block index
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentTaskIndex.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentTaskIndex.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentTaskIndex.java
deleted file mode 100644
index f577ede..0000000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentTaskIndex.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.datastore.block;
-
-import java.util.List;
-
-import org.apache.carbondata.core.datastore.BTreeBuilderInfo;
-import org.apache.carbondata.core.datastore.BtreeBuilder;
-import org.apache.carbondata.core.datastore.impl.btree.BlockBTreeBuilder;
-import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
-
-/**
- * Class which is responsible for loading the b+ tree block. This class will
- * persist all the detail of a table segment
- */
-public class SegmentTaskIndex extends AbstractIndex {
-
-  public SegmentTaskIndex(SegmentProperties segmentProperties) {
-    this.segmentProperties = segmentProperties;
-  }
-
-  /**
-   * Below method is store the blocks in some data structure
-   *
-   */
-  public void buildIndex(List<DataFileFooter> footerList) {
-    // create a segment builder info
-    // in case of segment create we do not need any file path and each column 
value size
-    // as Btree will be build as per min max and start key
-    BTreeBuilderInfo btreeBuilderInfo = new BTreeBuilderInfo(footerList, null);
-    BtreeBuilder blocksBuilder = new BlockBTreeBuilder();
-    // load the metadata
-    blocksBuilder.build(btreeBuilderInfo);
-    dataRefNode = blocksBuilder.get();
-    for (DataFileFooter footer : footerList) {
-      totalNumberOfRows += footer.getNumberOfRows();
-    }
-  }
-}

Reply via email to