Github user ravipesala commented on a diff in the pull request: https://github.com/apache/carbondata/pull/1329#discussion_r137213430 --- Diff: integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/MinMaxDataMap.java --- @@ -0,0 +1,193 @@ +package org.apache.carbondata.spark.testsuite.datamap; + +import java.io.ByteArrayOutputStream; +import java.io.DataOutput; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.common.logging.LogServiceFactory; +import org.apache.carbondata.core.constants.CarbonCommonConstants; +import org.apache.carbondata.core.datamap.dev.DataMap; +import org.apache.carbondata.core.datastore.block.SegmentProperties; +import org.apache.carbondata.core.datastore.block.TableBlockInfo; +import org.apache.carbondata.core.indexstore.Blocklet; +import org.apache.carbondata.core.indexstore.UnsafeMemoryDMStore; +import org.apache.carbondata.core.indexstore.blockletindex.BlockletDataMap; +import org.apache.carbondata.core.indexstore.row.DataMapRow; +import org.apache.carbondata.core.indexstore.row.DataMapRowImpl; +import org.apache.carbondata.core.indexstore.schema.DataMapSchema; +import org.apache.carbondata.core.memory.MemoryException; +import org.apache.carbondata.core.metadata.blocklet.BlockletInfo; +import org.apache.carbondata.core.metadata.blocklet.DataFileFooter; +import org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex; +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema; +import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf; +import org.apache.carbondata.core.util.CarbonUtil; +import org.apache.carbondata.core.util.DataFileFooterConverter; + +public class MinMaxDataMap implements DataMap { + + public static final String NAME = "clustered.btree.minmax"; + + private static int KEY_INDEX = 0; + + private static int MIN_VALUES_INDEX = 1; + + private static int MAX_VALUES_INDEX = 2; + + private UnsafeMemoryDMStore unsafeMemoryDMStore; + + private SegmentProperties segmentProperties; + + private int[] columnCardinality; + + + @Override public void init(String filePath) throws MemoryException, IOException { + long startTime = System.currentTimeMillis(); + DataFileFooterConverter fileFooterConverter = new DataFileFooterConverter(); + List<DataFileFooter> indexInfo = fileFooterConverter.getIndexInfo(filePath); + for (DataFileFooter fileFooter : indexInfo) { + List<ColumnSchema> columnInTable = fileFooter.getColumnInTable(); + if (segmentProperties == null) { + columnCardinality = fileFooter.getSegmentInfo().getColumnCardinality(); + segmentProperties = new SegmentProperties(columnInTable, columnCardinality); + //createSchema(segmentProperties); + } + TableBlockInfo blockInfo = fileFooter.getBlockInfo().getTableBlockInfo(); + if (fileFooter.getBlockletList() == null || fileFooter.getBlockletList().size() == 0) { +// LOGGER +// .info("Reading carbondata file footer to get blocklet info " + blockInfo.getFilePath()); + fileFooter = CarbonUtil.readMetadatFile(blockInfo); + } + + loadToUnsafe(fileFooter, segmentProperties, blockInfo.getFilePath()); + } + if (unsafeMemoryDMStore != null) { + unsafeMemoryDMStore.finishWriting(); + } +// LOGGER.info("Time taken to load blocklet datamap from file : " + filePath + "is " + +// (System.currentTimeMillis() - startTime)); + + } + + @Override public List<Blocklet> prune(FilterResolverIntf filterExp) { + return null; + } + + @Override public void clear() { + + } + + public void updateMinMaxIndex(String filePath) throws IOException, MemoryException { --- End diff -- it shouldn't be read from index file. Use the Datamap writer to write example index file and read here
---