This is an automated email from the ASF dual-hosted git repository. leirui pushed a commit to branch research/M4-visualization in repository https://gitbox.apache.org/repos/asf/iotdb.git
commit c7dfd4887e65f00346c7aa5f60baf9b72c565b71 Author: Lei Rui <[email protected]> AuthorDate: Wed Jul 5 01:47:54 2023 +0800 ni --- .../dataset/groupby/LocalGroupByExecutor4CPV.java | 94 ++++----- .../file/metadata/statistics/Statistics.java | 175 +++++++++++----- .../file/metadata/statistics/ValueIndex.java | 6 +- .../iotdb/tsfile/read/common/ChunkSuit4CPV.java | 36 ++-- .../iotdb/tsfile/read/common/IOMonitor2.java | 16 -- .../iotdb/tsfile/read/common/ValuePoint.java | 41 ++++ .../iotdb/tsfile/read/reader/page/PageReader.java | 227 ++++++++++++++++++--- .../iotdb/tsfile/encoding/SDTEncoderTest.java | 37 ++-- 8 files changed, 439 insertions(+), 193 deletions(-) diff --git a/server/src/main/java/org/apache/iotdb/db/query/dataset/groupby/LocalGroupByExecutor4CPV.java b/server/src/main/java/org/apache/iotdb/db/query/dataset/groupby/LocalGroupByExecutor4CPV.java index 55f7629159a..fd95d95c257 100644 --- a/server/src/main/java/org/apache/iotdb/db/query/dataset/groupby/LocalGroupByExecutor4CPV.java +++ b/server/src/main/java/org/apache/iotdb/db/query/dataset/groupby/LocalGroupByExecutor4CPV.java @@ -19,6 +19,14 @@ package org.apache.iotdb.db.query.dataset.groupby; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import java.util.Set; import org.apache.iotdb.db.engine.querycontext.QueryDataSource; import org.apache.iotdb.db.exception.StorageEngineException; import org.apache.iotdb.db.exception.query.QueryProcessException; @@ -45,19 +53,9 @@ import org.apache.iotdb.tsfile.read.filter.GroupByFilter; import org.apache.iotdb.tsfile.read.filter.basic.Filter; import org.apache.iotdb.tsfile.read.reader.page.PageReader; import org.apache.iotdb.tsfile.utils.Pair; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.ListIterator; -import java.util.Map; -import java.util.Set; - /** * Sql format: SELECT min_time(s0), max_time(s0), first_value(s0), last_value(s0), min_value(s0), * max_value(s0) ROM root.xx group by ([tqs,tqe),IntervalLength). Requirements: (1) Don't change the @@ -253,7 +251,7 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { } // iterate futureChunkList - ListIterator itr = futureChunkList.listIterator(); + ListIterator<ChunkSuit4CPV> itr = futureChunkList.listIterator(); while (itr.hasNext()) { ChunkSuit4CPV chunkSuit4CPV = (ChunkSuit4CPV) (itr.next()); ChunkMetadata chunkMetadata = chunkSuit4CPV.getChunkMetadata(); @@ -303,9 +301,9 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { /** * @param curStartTime closed - * @param curEndTime open - * @param startTime closed - * @param endTime open + * @param curEndTime open + * @param startTime closed + * @param endTime open */ @Override public List<AggregateResult> calcResult( @@ -355,15 +353,12 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { // currentChunkList while (currentChunkList.size() > 0) { // loop 1 // sorted by bottomValue, find BP candidate set - currentChunkList.sort( - new Comparator<ChunkSuit4CPV>() { // TODO double check the sort order logic for different - // aggregations - public int compare(ChunkSuit4CPV o1, ChunkSuit4CPV o2) { - return ((Comparable) (o1.getStatistics().getMinValue())) - .compareTo(o2.getStatistics().getMinValue()); - // NOTE here get statistics from ChunkSuit4CPV, not from ChunkSuit4CPV.ChunkMetadata - } - }); + // TODO double check the sort order logic for different aggregations + currentChunkList.sort((o1, o2) -> { + return ((Comparable) (o1.getStatistics().getMinValue())).compareTo( + o2.getStatistics().getMinValue()); + // NOTE here get statistics from ChunkSuit4CPV, not from ChunkSuit4CPV.ChunkMetadata + }); // NOTE here get statistics from ChunkSuit4CPV, not from ChunkSuit4CPV.ChunkMetadata Object value = currentChunkList.get(0).getStatistics().getMinValue(); List<ChunkSuit4CPV> candidateSet = new ArrayList<>(); @@ -376,21 +371,13 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { } } - List<ChunkSuit4CPV> nonLazyLoad = - new ArrayList<>( - candidateSet); // TODO check, whether nonLazyLoad remove affects candidateSet - nonLazyLoad.sort( - new Comparator<ChunkSuit4CPV>() { // TODO double check the sort order logic for version - public int compare(ChunkSuit4CPV o1, ChunkSuit4CPV o2) { - return new MergeReaderPriority( - o2.getChunkMetadata().getVersion(), - o2.getChunkMetadata().getOffsetOfChunkHeader()) - .compareTo( - new MergeReaderPriority( - o1.getChunkMetadata().getVersion(), - o1.getChunkMetadata().getOffsetOfChunkHeader())); - } - }); + // TODO check, whether nonLazyLoad remove affects candidateSet + List<ChunkSuit4CPV> nonLazyLoad = new ArrayList<>(candidateSet); + // TODO double check the sort order logic for version + nonLazyLoad.sort((o1, o2) -> new MergeReaderPriority(o2.getChunkMetadata().getVersion(), + o2.getChunkMetadata().getOffsetOfChunkHeader()) + .compareTo(new MergeReaderPriority(o1.getChunkMetadata().getVersion(), + o1.getChunkMetadata().getOffsetOfChunkHeader()))); while (true) { // loop 2 // if there is no chunk for lazy loading, then load all chunks in candidateSet, // and apply deleteIntervals, deleting BP no matter out of deletion or update @@ -484,7 +471,7 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { results .get(4) .updateResultUsingValues( - new long[] {candidateTimestamp}, 1, new Object[] {candidateValue}); + new long[]{candidateTimestamp}, 1, new Object[]{candidateValue}); return; // finished } else if (!isUpdate) { // verify whether the candidate point is updated @@ -513,7 +500,7 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { results .get(4) .updateResultUsingValues( - new long[] {candidateTimestamp}, 1, new Object[] {candidateValue}); + new long[]{candidateTimestamp}, 1, new Object[]{candidateValue}); return; // finished } else { // the candidate point is updated, then label the chunk as already lazy loaded, @@ -580,8 +567,8 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { new Comparator<ChunkSuit4CPV>() { // TODO double check the sort order logic for version public int compare(ChunkSuit4CPV o1, ChunkSuit4CPV o2) { return new MergeReaderPriority( - o2.getChunkMetadata().getVersion(), - o2.getChunkMetadata().getOffsetOfChunkHeader()) + o2.getChunkMetadata().getVersion(), + o2.getChunkMetadata().getOffsetOfChunkHeader()) .compareTo( new MergeReaderPriority( o1.getChunkMetadata().getVersion(), @@ -614,7 +601,8 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { .setDeleteIntervalList(chunkSuit4CPV.getChunkMetadata().getDeleteIntervalList()); } // chunk data read operation (c): get all data points - chunkSuit4CPV.getPageReader().updateBPTP(chunkSuit4CPV); +// chunkSuit4CPV.getPageReader().updateBPTP(chunkSuit4CPV); + chunkSuit4CPV.getPageReader().updateTP_withValueIndex(chunkSuit4CPV); // TODO // check if empty if (chunkSuit4CPV.statistics.getCount() == 0) { currentChunkList.remove(chunkSuit4CPV); @@ -685,7 +673,7 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { results .get(5) .updateResultUsingValues( - new long[] {candidateTimestamp}, 1, new Object[] {candidateValue}); + new long[]{candidateTimestamp}, 1, new Object[]{candidateValue}); return; // finished } else if (!isUpdate) { // verify whether the candidate point is updated @@ -714,7 +702,7 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { results .get(5) .updateResultUsingValues( - new long[] {candidateTimestamp}, 1, new Object[] {candidateValue}); + new long[]{candidateTimestamp}, 1, new Object[]{candidateValue}); return; // finished } else { // the candidate point is updated, then label the chunk as already lazy loaded, @@ -761,8 +749,8 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { return res; } else { return new MergeReaderPriority( - o2.getChunkMetadata().getVersion(), - o2.getChunkMetadata().getOffsetOfChunkHeader()) + o2.getChunkMetadata().getVersion(), + o2.getChunkMetadata().getOffsetOfChunkHeader()) .compareTo( new MergeReaderPriority( o1.getChunkMetadata().getVersion(), @@ -839,11 +827,11 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { results .get(0) .updateResultUsingValues( - new long[] {candidateTimestamp}, 1, new Object[] {candidateValue}); + new long[]{candidateTimestamp}, 1, new Object[]{candidateValue}); results .get(2) .updateResultUsingValues( - new long[] {candidateTimestamp}, 1, new Object[] {candidateValue}); + new long[]{candidateTimestamp}, 1, new Object[]{candidateValue}); return; } } @@ -872,8 +860,8 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { return res; } else { return new MergeReaderPriority( - o2.getChunkMetadata().getVersion(), - o2.getChunkMetadata().getOffsetOfChunkHeader()) + o2.getChunkMetadata().getVersion(), + o2.getChunkMetadata().getOffsetOfChunkHeader()) .compareTo( new MergeReaderPriority( o1.getChunkMetadata().getVersion(), @@ -951,11 +939,11 @@ public class LocalGroupByExecutor4CPV implements GroupByExecutor { results .get(1) .updateResultUsingValues( - new long[] {candidateTimestamp}, 1, new Object[] {candidateValue}); + new long[]{candidateTimestamp}, 1, new Object[]{candidateValue}); results .get(3) .updateResultUsingValues( - new long[] {candidateTimestamp}, 1, new Object[] {candidateValue}); + new long[]{candidateTimestamp}, 1, new Object[]{candidateValue}); return; } } diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/Statistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/Statistics.java index c712d1967f0..5c0e87cf93d 100644 --- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/Statistics.java +++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/Statistics.java @@ -18,31 +18,31 @@ */ package org.apache.iotdb.tsfile.file.metadata.statistics; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.Objects; import org.apache.iotdb.tsfile.encoding.decoder.Decoder; import org.apache.iotdb.tsfile.encoding.decoder.DeltaBinaryDecoder.IntDeltaDecoder; import org.apache.iotdb.tsfile.encoding.decoder.DoublePrecisionDecoderV2; import org.apache.iotdb.tsfile.exception.filter.StatisticsClassException; import org.apache.iotdb.tsfile.exception.write.UnknownColumnTypeException; import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType; +import org.apache.iotdb.tsfile.read.common.ValuePoint; import org.apache.iotdb.tsfile.utils.Binary; import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils; import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils; - import org.eclipse.collections.impl.list.mutable.primitive.DoubleArrayList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.util.Objects; - /** * This class is used for recording statistic information of each measurement in a delta file. While * writing processing, the processor records the statistics information. Statistics includes - * maximum, minimum and null value count up to version 0.0.1.<br> - * Each data type extends this Statistic as super class.<br> + * maximum, minimum and null value count up to version 0.0.1.<br> Each data type extends this + * Statistic as super class.<br> * <br> * For the statistics in the Unseq file TimeSeriesMetadata, only firstValue, lastValue, startTime * and endTime can be used.</br> @@ -55,7 +55,9 @@ public abstract class Statistics<T> { */ protected boolean isEmpty = true; - /** number of time-value points */ + /** + * number of time-value points + */ private int count = 0; private long startTime = Long.MAX_VALUE; @@ -65,7 +67,9 @@ public abstract class Statistics<T> { public ValueIndex valueIndex = new ValueIndex(); - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ final String OPERATION_NOT_SUPPORT_FORMAT = "%s statistics does not support operation: %s"; /** @@ -176,17 +180,23 @@ public abstract class Statistics<T> { abstract int serializeStats(OutputStream outputStream) throws IOException; - /** read data from the inputStream. */ + /** + * read data from the inputStream. + */ public abstract void deserialize(InputStream inputStream) throws IOException; public abstract void deserialize(ByteBuffer byteBuffer); // public abstract void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes); - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public abstract MinMaxInfo<T> getMinInfo(); - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public abstract MinMaxInfo<T> getMaxInfo(); public abstract T getMinValue(); @@ -197,10 +207,14 @@ public abstract class Statistics<T> { public abstract T getMaxValue(); - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public abstract long getBottomTimestamp(); - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public abstract long getTopTimestamp(); public abstract T getFirstValue(); @@ -276,7 +290,9 @@ public abstract class Statistics<T> { updateStats(value); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void update(long time, int value) { count++; if (time < this.startTime) { @@ -291,7 +307,9 @@ public abstract class Statistics<T> { updateStats(value, time); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void update(long time, long value) { count++; if (time < this.startTime) { @@ -305,7 +323,9 @@ public abstract class Statistics<T> { updateStats(value, time); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void update(long time, float value) { count++; if (time < this.startTime) { @@ -319,7 +339,9 @@ public abstract class Statistics<T> { updateStats(value, time); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void update(long time, double value) { count++; if (time < this.startTime) { @@ -357,7 +379,9 @@ public abstract class Statistics<T> { updateStats(values, batchSize); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void update(long[] time, int[] values, int batchSize) { count += batchSize; if (time[0] < startTime) { @@ -371,7 +395,9 @@ public abstract class Statistics<T> { updateStats(values, time, batchSize); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void update(long[] time, long[] values, int batchSize) { count += batchSize; if (time[0] < startTime) { @@ -385,7 +411,9 @@ public abstract class Statistics<T> { updateStats(values, time, batchSize); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void update(long[] time, float[] values, int batchSize) { count += batchSize; if (time[0] < startTime) { @@ -399,7 +427,9 @@ public abstract class Statistics<T> { updateStats(values, time, batchSize); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void update(long[] time, double[] values, int batchSize) { count += batchSize; if (time[0] < startTime) { @@ -435,10 +465,14 @@ public abstract class Statistics<T> { isEmpty = empty; } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public abstract void updateMinInfo(T val, long timestamp); - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public abstract void updateMaxInfo(T val, long timestamp); void updateStats(boolean value) { @@ -495,22 +529,30 @@ public abstract class Statistics<T> { } } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void updateStats(int value, long timestamp) { throw new UnsupportedOperationException(); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void updateStats(long value, long timestamp) { throw new UnsupportedOperationException(); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void updateStats(float value, long timestamp) { throw new UnsupportedOperationException(); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ public void updateStats(double value, long timestamp) { throw new UnsupportedOperationException(); } @@ -523,22 +565,30 @@ public abstract class Statistics<T> { throw new UnsupportedOperationException(); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ void updateStats(int[] values, long[] timestamps, int batchSize) { throw new UnsupportedOperationException(); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ void updateStats(long[] values, long[] timestamps, int batchSize) { throw new UnsupportedOperationException(); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ void updateStats(float[] values, long[] timestamps, int batchSize) { throw new UnsupportedOperationException(); } - /** @author Yuyuan Kang */ + /** + * @author Yuyuan Kang + */ void updateStats(double[] values, long[] timestamps, int batchSize) { throw new UnsupportedOperationException(); } @@ -551,7 +601,7 @@ public abstract class Statistics<T> { * @param min min timestamp * @param max max timestamp * @author Yuyuan Kang This method with two parameters is only used by {@code unsequence} which - * updates/inserts/deletes timestamp. + * updates/inserts/deletes timestamp. */ public void updateStats(long min, long bottomTimestamp, long max, long topTimestamp) { throw new UnsupportedOperationException(); @@ -585,26 +635,31 @@ public abstract class Statistics<T> { // add the first point valueIndex.modelPointIdx_list.add(1); switch (getType()) { - case INT32: - int intV = (int) getFirstValue(); - valueIndex.modelPointVal_list.add((double) intV); - break; +// case INT32: +// int intV = (int) getFirstValue(); +// valueIndex.modelPointVal_list.add((double) intV); +// valueIndex.sortedModelPoints.add(new ValuePoint(1, (double) intV)); +// break; case INT64: long longV = (long) getFirstValue(); valueIndex.modelPointVal_list.add((double) longV); + valueIndex.sortedModelPoints.add(new ValuePoint(1, (double) longV)); break; - case FLOAT: - float floatV = (float) getFirstValue(); - valueIndex.modelPointVal_list.add((double) floatV); - break; +// case FLOAT: +// float floatV = (float) getFirstValue(); +// valueIndex.modelPointVal_list.add((double) floatV); +// valueIndex.sortedModelPoints.add(new ValuePoint(1, (double) floatV)); +// break; case DOUBLE: double doubleV = (double) getFirstValue(); valueIndex.modelPointVal_list.add(doubleV); + valueIndex.sortedModelPoints.add(new ValuePoint(1, doubleV)); break; default: - throw new IOException("unsupported"); + throw new IOException("unsupported data type"); } + // sdt point idx of the points in between the first and the last points int idxSize = ReadWriteIOUtils.readInt(buffer); if (idxSize > 0) { ByteBuffer idxBuffer = buffer.slice(); @@ -613,18 +668,24 @@ public abstract class Statistics<T> { while (idxDecoder.hasNext(idxBuffer)) { int idx = idxDecoder.readInt(idxBuffer); valueIndex.modelPointIdx_list.add(idx); + valueIndex.sortedModelPoints.add(new ValuePoint(idx, 0)); } } + // sdt point value of the points in between the first and the last points buffer.position(buffer.position() + idxSize); int valueSize = ReadWriteIOUtils.readInt(buffer); if (valueSize > 0) { ByteBuffer valueBuffer = buffer.slice(); valueBuffer.limit(valueSize); Decoder valueDecoder = new DoublePrecisionDecoderV2(); + int n = 0; while (valueDecoder.hasNext(valueBuffer)) { double value = valueDecoder.readDouble(valueBuffer); valueIndex.modelPointVal_list.add(value); + // NOTE: n+1 because the first point already added + valueIndex.sortedModelPoints.get(n + 1).value = value; + n++; } } @@ -632,24 +693,28 @@ public abstract class Statistics<T> { if (count >= 2) { // otherwise only one point no need to store again valueIndex.modelPointIdx_list.add(count); switch (getType()) { - case INT32: - int intV = (int) getLastValue(); - valueIndex.modelPointVal_list.add((double) intV); - break; +// case INT32: +// int intV = (int) getLastValue(); +// valueIndex.modelPointVal_list.add((double) intV); +// valueIndex.sortedModelPoints.add(new ValuePoint(count, (double) intV)); +// break; case INT64: long longV = (long) getLastValue(); valueIndex.modelPointVal_list.add((double) longV); + valueIndex.sortedModelPoints.add(new ValuePoint(count, (double) longV)); break; - case FLOAT: - float floatV = (float) getLastValue(); - valueIndex.modelPointVal_list.add((double) floatV); - break; +// case FLOAT: +// float floatV = (float) getLastValue(); +// valueIndex.modelPointVal_list.add((double) floatV); +// valueIndex.sortedModelPoints.add(new ValuePoint(count, (double) floatV)); +// break; case DOUBLE: double doubleV = (double) getLastValue(); valueIndex.modelPointVal_list.add(doubleV); + valueIndex.sortedModelPoints.add(new ValuePoint(count, doubleV)); break; default: - throw new IOException("unsupported"); + throw new IOException("unsupported data type"); } } @@ -657,8 +722,8 @@ public abstract class Statistics<T> { buffer.position(buffer.position() + valueSize); valueIndex.errorBound = ReadWriteIOUtils.readDouble(buffer); - // System.out.println(valueIndex.modelPointIdx_list); - // System.out.println(valueIndex.modelPointVal_list); + // sort by value from small to big + Collections.sort(valueIndex.sortedModelPoints); } void deserializeStepRegress(ByteBuffer byteBuffer) { diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/ValueIndex.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/ValueIndex.java index 1ad935e6ef8..eb506480840 100644 --- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/ValueIndex.java +++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/ValueIndex.java @@ -1,10 +1,12 @@ package org.apache.iotdb.tsfile.file.metadata.statistics; +import java.util.ArrayList; +import java.util.List; import org.apache.iotdb.tsfile.encoding.encoder.DeltaBinaryEncoder.IntDeltaEncoder; import org.apache.iotdb.tsfile.encoding.encoder.DoublePrecisionEncoderV2; import org.apache.iotdb.tsfile.encoding.encoder.SDTEncoder; +import org.apache.iotdb.tsfile.read.common.ValuePoint; import org.apache.iotdb.tsfile.utils.PublicBAOS; - import org.eclipse.collections.impl.list.mutable.primitive.DoubleArrayList; import org.eclipse.collections.impl.list.mutable.primitive.IntArrayList; @@ -20,6 +22,8 @@ public class ValueIndex { public IntArrayList modelPointIdx_list = new IntArrayList(); public DoubleArrayList modelPointVal_list = new DoubleArrayList(); + public List<ValuePoint> sortedModelPoints = new ArrayList<>(); // sorted by value in ascending order + // this is necessary, otherwise serialized twice by timeseriesMetadata and chunkMetadata // causing learn() executed more than once!! private boolean isLearned = false; diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/ChunkSuit4CPV.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/ChunkSuit4CPV.java index a43563e0410..3b5fd251521 100644 --- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/ChunkSuit4CPV.java +++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/ChunkSuit4CPV.java @@ -19,6 +19,7 @@ package org.apache.iotdb.tsfile.read.common; +import java.io.IOException; import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor; import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata; import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType; @@ -32,11 +33,10 @@ import org.apache.iotdb.tsfile.file.metadata.statistics.StepRegress; import org.apache.iotdb.tsfile.read.common.IOMonitor2.Operation; import org.apache.iotdb.tsfile.read.reader.page.PageReader; -import java.io.IOException; - public class ChunkSuit4CPV { private ChunkMetadata chunkMetadata; // fixed info, including version, dataType, stepRegress + public int modelPointsCursor = 1; // starting from 0, pointing to the right end of the model segment, moving forward as processing time spans public Statistics statistics; // dynamically updated, includes FP/LP/BP/TP info @@ -282,10 +282,10 @@ public class ChunkSuit4CPV { long timestamp = pageReader.timeBuffer.getLong(estimatedPos * 8); statistics.setStartTime(timestamp); switch (chunkMetadata.getDataType()) { - // iotdb的int类型的plain编码用的是自制的不支持random access - // case INT32: - // return new MinMaxInfo(pageReader.valueBuffer.getInt(estimatedPos * 4), - // pageReader.timeBuffer.getLong(estimatedPos * 8)); + // iotdb的int类型的plain编码用的是自制的不支持random access + // case INT32: + // return new MinMaxInfo(pageReader.valueBuffer.getInt(estimatedPos * 4), + // pageReader.timeBuffer.getLong(estimatedPos * 8)); case INT64: long longVal = pageReader.valueBuffer.getLong(pageReader.timeBufferLength + estimatedPos * 8); @@ -328,10 +328,10 @@ public class ChunkSuit4CPV { long timestamp = pageReader.timeBuffer.getLong(estimatedPos * 8); statistics.setStartTime(timestamp); switch (chunkMetadata.getDataType()) { - // iotdb的int类型的plain编码用的是自制的不支持random access - // case INT32: - // return new MinMaxInfo(pageReader.valueBuffer.getInt(estimatedPos * 4), - // pageReader.timeBuffer.getLong(estimatedPos * 8)); + // iotdb的int类型的plain编码用的是自制的不支持random access + // case INT32: + // return new MinMaxInfo(pageReader.valueBuffer.getInt(estimatedPos * 4), + // pageReader.timeBuffer.getLong(estimatedPos * 8)); case INT64: long longVal = pageReader.valueBuffer.getLong(pageReader.timeBufferLength + estimatedPos * 8); @@ -399,10 +399,10 @@ public class ChunkSuit4CPV { long timestamp = pageReader.timeBuffer.getLong(estimatedPos * 8); statistics.setEndTime(timestamp); switch (chunkMetadata.getDataType()) { - // iotdb的int类型的plain编码用的是自制的不支持random access - // case INT32: - // return new MinMaxInfo(pageReader.valueBuffer.getInt(estimatedPos * 4), - // pageReader.timeBuffer.getLong(estimatedPos * 8)); + // iotdb的int类型的plain编码用的是自制的不支持random access + // case INT32: + // return new MinMaxInfo(pageReader.valueBuffer.getInt(estimatedPos * 4), + // pageReader.timeBuffer.getLong(estimatedPos * 8)); case INT64: long longVal = pageReader.valueBuffer.getLong(pageReader.timeBufferLength + estimatedPos * 8); @@ -451,10 +451,10 @@ public class ChunkSuit4CPV { long timestamp = pageReader.timeBuffer.getLong(estimatedPos * 8); statistics.setEndTime(timestamp); switch (chunkMetadata.getDataType()) { - // iotdb的int类型的plain编码用的是自制的不支持random access - // case INT32: - // return new MinMaxInfo(pageReader.valueBuffer.getInt(estimatedPos * 4), - // pageReader.timeBuffer.getLong(estimatedPos * 8)); + // iotdb的int类型的plain编码用的是自制的不支持random access + // case INT32: + // return new MinMaxInfo(pageReader.valueBuffer.getInt(estimatedPos * 4), + // pageReader.timeBuffer.getLong(estimatedPos * 8)); case INT64: long longVal = pageReader.valueBuffer.getLong(pageReader.timeBufferLength + estimatedPos * 8); diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/IOMonitor2.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/IOMonitor2.java index cd39d9af1e0..62e79e40fac 100644 --- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/IOMonitor2.java +++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/IOMonitor2.java @@ -247,22 +247,6 @@ public class IOMonitor2 { M4_LSM_TP_SEARCH_ARRAY_c_genBPTP_cnt = 0; } - public static class ValuePoint implements Comparable<ValuePoint> { - public final int index; - public final long value; - - public ValuePoint(int index, long value) { - this.index = index; - this.value = value; - } - - @Override - public int compareTo(ValuePoint other) { - // multiplied to -1 as the author need descending sort order - return -1 * Long.valueOf(this.value).compareTo(other.value); - } - } - public static void addMeasure(Operation operation, long elapsedTimeInNanosecond) { switch (operation) { case DCP_Server_Query_Execute: diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/ValuePoint.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/ValuePoint.java new file mode 100644 index 00000000000..2e672d7a134 --- /dev/null +++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/ValuePoint.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iotdb.tsfile.read.common; + +public class ValuePoint implements Comparable<ValuePoint> { + + public int index; + public double value; + + public ValuePoint(int index, double value) { + this.index = index; + this.value = value; + } + + @Override + public int compareTo(ValuePoint other) { + // ascending order + return Double.valueOf(this.value).compareTo(other.value); + } + + public String toString() { + return "(" + index + "," + value + ")"; + } +} \ No newline at end of file diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/page/PageReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/page/PageReader.java index a75b368c00d..979678ea5b7 100644 --- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/page/PageReader.java +++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/page/PageReader.java @@ -18,56 +18,67 @@ */ package org.apache.iotdb.tsfile.read.reader.page; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; import org.apache.iotdb.tsfile.encoding.decoder.Decoder; import org.apache.iotdb.tsfile.exception.write.UnSupportedDataTypeException; import org.apache.iotdb.tsfile.file.header.PageHeader; import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata; import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType; import org.apache.iotdb.tsfile.file.metadata.statistics.DoubleStatistics; -import org.apache.iotdb.tsfile.file.metadata.statistics.FloatStatistics; import org.apache.iotdb.tsfile.file.metadata.statistics.LongStatistics; +import org.apache.iotdb.tsfile.file.metadata.statistics.MinMaxInfo; import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics; +import org.apache.iotdb.tsfile.file.metadata.statistics.ValueIndex; import org.apache.iotdb.tsfile.read.common.BatchData; import org.apache.iotdb.tsfile.read.common.BatchDataFactory; import org.apache.iotdb.tsfile.read.common.ChunkSuit4CPV; import org.apache.iotdb.tsfile.read.common.IOMonitor2; import org.apache.iotdb.tsfile.read.common.IOMonitor2.Operation; import org.apache.iotdb.tsfile.read.common.TimeRange; +import org.apache.iotdb.tsfile.read.common.ValuePoint; import org.apache.iotdb.tsfile.read.filter.basic.Filter; import org.apache.iotdb.tsfile.read.filter.operator.AndFilter; import org.apache.iotdb.tsfile.read.reader.IPageReader; import org.apache.iotdb.tsfile.utils.Binary; import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - public class PageReader implements IPageReader { private PageHeader pageHeader; protected TSDataType dataType; - /** decoder for value column */ + /** + * decoder for value column + */ protected Decoder valueDecoder; - /** decoder for time column */ + /** + * decoder for time column + */ protected Decoder timeDecoder; - /** time column in memory */ + /** + * time column in memory + */ public ByteBuffer timeBuffer; - /** value column in memory */ + /** + * value column in memory + */ public ByteBuffer valueBuffer; public int timeBufferLength; protected Filter filter; - /** A list of deleted intervals. */ + /** + * A list of deleted intervals. + */ private List<TimeRange> deleteIntervalList; private int deleteCursor = 0; @@ -111,7 +122,9 @@ public class PageReader implements IPageReader { valueBuffer.position(timeBufferLength); } - /** the chunk partially overlaps in time with the current M4 interval Ii */ + /** + * the chunk partially overlaps in time with the current M4 interval Ii + */ public void split4CPV( long startTime, long endTime, @@ -124,10 +137,10 @@ public class PageReader implements IPageReader { // endTime is excluded so -1 int numberOfSpans = (int) - Math.floor( - (Math.min(chunkMetadata.getEndTime(), endTime - 1) - curStartTime) - * 1.0 - / interval) + Math.floor( + (Math.min(chunkMetadata.getEndTime(), endTime - 1) - curStartTime) + * 1.0 + / interval) + 1; for (int n = 0; n < numberOfSpans; n++) { long leftEndIncluded = curStartTime + n * interval; @@ -164,6 +177,156 @@ public class PageReader implements IPageReader { } } + public void updateTP_withValueIndex(ChunkSuit4CPV chunkSuit4CPV) { + // NOTE: get valueIndex from chunkSuit4CPV.getChunkMetadata().getStatistics(), not chunkSuit4CPV.getStatistics()! + ValueIndex valueIndex = chunkSuit4CPV.getChunkMetadata().getStatistics().valueIndex; + + // step 1: find threshold + // iterate SDT points from value big to small to find the first point not deleted + boolean isFound = false; + double foundValue = 0; + for (ValuePoint valuePoint : valueIndex.sortedModelPoints) { + int idx = valuePoint.index; // index starting from 1 + int pos = idx - 1; // pos starting from 0 + long time = timeBuffer.getLong(pos * 8); + // check if deleted + deleteCursor = 0; // TODO check + if ((pos >= chunkSuit4CPV.startPos) && (pos <= chunkSuit4CPV.endPos) && !isDeleted(time)) { + // startPos&endPos conveys the virtual deletes of the current M4 time span + isFound = true; + foundValue = valuePoint.value; + break; + } + } + if (!isFound) { // unfortunately all sdt points are deleted + updateBPTP(chunkSuit4CPV); // then fall back to baseline method + return; + } + double threshold_LB = foundValue - valueIndex.errorBound; // near max LB + + // step 2: calculate pruned intervals for TP: UB<threshold=near max LB + // increment global chunkSuit4CPV.modelPointsCursor + int idx2; + // note that the first and last points of a chunk are stored in model points + // there must exist idx2-1 >= startPos, otherwise this chunk won't be processed for the current time span + // there must exist idx1-1 <= endPos, otherwise this chunk won't be processed for the current time span + while ((idx2 = valueIndex.modelPointIdx_list.get(chunkSuit4CPV.modelPointsCursor)) - 1 + < chunkSuit4CPV.startPos) { // TODO check + // -1 because idx starting from 1 while pos starting from 0 + chunkSuit4CPV.modelPointsCursor++; + } + // increment local cursor starting from chunkSuit4CPV.modelPointsCursor for iterating model segments for the current time span + // do not increment modelPointsCursor because the model segments for this time span may be iterated multiple times + int localCursor = chunkSuit4CPV.modelPointsCursor; + List<Integer> prune_intervals_start = new ArrayList<>(); + List<Integer> prune_intervals_end = new ArrayList<>(); + int interval_start = -1; + int interval_end = -1; + int idx1; + while ((idx1 = valueIndex.modelPointIdx_list.get(localCursor - 1)) - 1 + <= chunkSuit4CPV.endPos) { + idx2 = valueIndex.modelPointIdx_list.get(localCursor); + double v1_UB = valueIndex.modelPointVal_list.get(localCursor - 1) + valueIndex.errorBound; + double v2_UB = valueIndex.modelPointVal_list.get(localCursor) + valueIndex.errorBound; + if (v1_UB < threshold_LB && v2_UB < threshold_LB) { + if (interval_start < 0) { + interval_start = idx1; + } + interval_end = idx2; // continuous + } else if (v1_UB < threshold_LB && v2_UB >= threshold_LB) { + if (interval_start < 0) { + interval_start = idx1; + } + prune_intervals_start.add(interval_start); + prune_intervals_end.add( + (int) Math.floor((threshold_LB - v1_UB) * (idx2 - idx1) / (v2_UB - v1_UB) + idx1)); + interval_start = -1; // discontinuous + } else if (v1_UB >= threshold_LB && v2_UB < threshold_LB) { + interval_start = (int) Math.ceil( + (threshold_LB - v1_UB) * (idx2 - idx1) / (v2_UB - v1_UB) + idx1); + interval_end = idx2; // continuous + } + localCursor++; + } + if (interval_start > 0) { + prune_intervals_start.add(interval_start); + prune_intervals_end.add(interval_end); + } + + // step 3: calculate unpruned intervals + // TODO deal with time span deletes -> update search_startPos and search_endPos + // note idx starting from 1, pos starting from 0 + int search_startPos = chunkSuit4CPV.startPos; + int search_endPos = chunkSuit4CPV.endPos; + if (prune_intervals_start.size() > 0) { + // deal with time span left virtual delete -> update search_startPos + int prune_idx1 = prune_intervals_start.get(0); + if (prune_idx1 - 1 <= chunkSuit4CPV.startPos) { + // +1 for included, -1 for starting from 0 + search_startPos = Math.max(search_startPos, prune_intervals_end.get(0) + 1 - 1); + prune_intervals_start.remove(0); + prune_intervals_end.remove(0); + } + } + if (prune_intervals_start.size() > 0) { + // deal with time span right virtual delete -> update search_endPos + int prune_idx2 = prune_intervals_end.get(prune_intervals_end.size() - 1); + if (prune_idx2 - 1 >= search_endPos) { + // -1 for included, -1 for starting from 0 + search_endPos = Math.min(search_endPos, + prune_intervals_start.get(prune_intervals_start.size() - 1) - 1 - 1); + prune_intervals_start.remove(prune_intervals_start.size() - 1); + prune_intervals_end.remove(prune_intervals_end.size() - 1); + } + } + // add search_endPos+1 to the end of prune_intervals_start + // turning into search_intervals_end (excluded endpoints) + prune_intervals_start.add(search_endPos + 1); + // add search_startPos-1 to the start of prune_intervals_end + // turning into search_intervals_start (excluded endpoints) + prune_intervals_end.add(0, search_startPos - 1); + + // step 4: search unpruned intervals + // TODO deal with normal delete intervals + if (dataType == TSDataType.DOUBLE) { + double candidateTPvalue = -1; + long candidateTPtime = -1; + for (int i = 0; i < prune_intervals_start.size(); i++) { + int search_interval_start = prune_intervals_end.get(i) + 1; // included + int search_interval_end = prune_intervals_start.get(i) - 1; // included + for (int j = search_interval_start; j <= search_interval_end; j++) { // starting from 1 + double v = valueBuffer.getDouble(timeBufferLength + (j - 1) * 8); + long t = timeBuffer.getLong((j - 1) * 8); + if (v > candidateTPvalue && !isDeleted(t)) { + candidateTPvalue = v; + candidateTPtime = t; + } + } + } + chunkSuit4CPV.statistics.setMaxInfo(new MinMaxInfo(candidateTPvalue, candidateTPtime)); + } else if (dataType == TSDataType.INT64) { + long candidateTPvalue = -1; + long candidateTPtime = -1; + for (int i = 0; i < prune_intervals_start.size(); i++) { + int search_interval_start = prune_intervals_end.get(i) + 1; // included + int search_interval_end = prune_intervals_start.get(i) - 1; // included + for (int j = search_interval_start; j <= search_interval_end; j++) { // starting from 1 + long v = valueBuffer.getLong(timeBufferLength + (j - 1) * 8); + long t = timeBuffer.getLong((j - 1) * 8); + if (v > candidateTPvalue && !isDeleted(t)) { + candidateTPvalue = v; + candidateTPtime = t; + } + } + } + chunkSuit4CPV.statistics.setMaxInfo(new MinMaxInfo(candidateTPvalue, candidateTPtime)); + } else { + throw new UnSupportedDataTypeException(String.valueOf(dataType)); + } + + // TODO 注意count=0全部点删掉的情况考虑 难道是在isFound=false的时候回到原来方法执行里处理了 + } + public void updateBPTP(ChunkSuit4CPV chunkSuit4CPV) { long start = System.nanoTime(); deleteCursor = 0; // TODO DEBUG @@ -172,9 +335,9 @@ public class PageReader implements IPageReader { case INT64: statistics = new LongStatistics(); break; - case FLOAT: - statistics = new FloatStatistics(); - break; +// case FLOAT: +// statistics = new FloatStatistics(); +// break; case DOUBLE: statistics = new DoubleStatistics(); break; @@ -198,16 +361,16 @@ public class PageReader implements IPageReader { // only updateStats, actually only need to update BP and TP } break; - case FLOAT: - float aFloat = valueBuffer.getFloat(timeBufferLength + pos * 8); - if (!isDeleted(timestamp) && (filter == null || filter.satisfy(timestamp, aFloat))) { - // update statistics of chunkMetadata1 - statistics.updateStats(aFloat, timestamp); - count++; - // ATTENTION: do not use update() interface which will also update StepRegress! - // only updateStats, actually only need to update BP and TP - } - break; +// case FLOAT: +// float aFloat = valueBuffer.getFloat(timeBufferLength + pos * 8); +// if (!isDeleted(timestamp) && (filter == null || filter.satisfy(timestamp, aFloat))) { +// // update statistics of chunkMetadata1 +// statistics.updateStats(aFloat, timestamp); +// count++; +// // ATTENTION: do not use update() interface which will also update StepRegress! +// // only updateStats, actually only need to update BP and TP +// } +// break; case DOUBLE: double aDouble = valueBuffer.getDouble(timeBufferLength + pos * 8); if (!isDeleted(timestamp) && (filter == null || filter.satisfy(timestamp, aDouble))) { @@ -231,7 +394,9 @@ public class PageReader implements IPageReader { IOMonitor2.addMeasure(Operation.SEARCH_ARRAY_c_genBPTP, System.nanoTime() - start); } - /** @return the returned BatchData may be empty, but never be null */ + /** + * @return the returned BatchData may be empty, but never be null + */ @SuppressWarnings("squid:S3776") // Suppress high Cognitive Complexity warning @Override public BatchData getAllSatisfiedPageData(boolean ascending) throws IOException { diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/SDTEncoderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/SDTEncoderTest.java index 07450c8beae..f859ddab83d 100644 --- a/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/SDTEncoderTest.java +++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/SDTEncoderTest.java @@ -19,19 +19,17 @@ package org.apache.iotdb.tsfile.encoding; -import org.apache.iotdb.tsfile.encoding.encoder.SDTEncoder; -import org.apache.iotdb.tsfile.read.common.IOMonitor2.ValuePoint; - -import org.junit.Test; +import static org.junit.Assert.assertEquals; import java.io.BufferedReader; import java.io.FileReader; import java.util.ArrayList; import java.util.Arrays; -import java.util.Comparator; +import java.util.Collections; import java.util.List; - -import static org.junit.Assert.assertEquals; +import org.apache.iotdb.tsfile.encoding.encoder.SDTEncoder; +import org.apache.iotdb.tsfile.read.common.ValuePoint; +import org.junit.Test; public class SDTEncoderTest { @@ -235,8 +233,8 @@ public class SDTEncoderTest { String csvData = "D:\\full-game\\BallSpeed.csv"; // double[] eList = new double[] {500000, 400000, 300000, 200000, 160000, 100000, 50000, // 10000}; - double[] eList = new double[] {160000}; - int[] startList = new int[] {1, 200000, 300000, 400000, 500000, 600000, 700000, 800000, 900000}; + double[] eList = new double[]{160000}; + int[] startList = new int[]{1, 200000, 300000, 400000, 500000, 600000, 700000, 800000, 900000}; List<Double> elapsedTime_withValueIndex_list = new ArrayList<>(); List<Double> elapsedTime_withoutValueIndex_list = new ArrayList<>(); List<Double> traversedComplexity_list = new ArrayList<>(); @@ -304,18 +302,19 @@ public class SDTEncoderTest { // ValuePoint[] myArray = (ValuePoint[]) points.toArray(); // Arrays.sort(points.toArray()); - points.sort( - new Comparator<ValuePoint>() { - public int compare(ValuePoint o1, ValuePoint o2) { - return ((Comparable) (o1.value)).compareTo(o2.value); - } - }); +// points.sort( +// new Comparator<ValuePoint>() { +// public int compare(ValuePoint o1, ValuePoint o2) { +// return ((Comparable) (o1.value)).compareTo(o2.value); +// } +// }); + Collections.sort(points); long startTime = System.nanoTime(); // 计算maxLB traversedComplexity += selectValues.size(); - long maxVal = points.get(points.size() - 2).value; + double maxVal = points.get(points.size() - 2).value; double threshold = maxVal - e; // maxLB // System.out.println("threshold(maxLB)=" + threshold); @@ -380,7 +379,7 @@ public class SDTEncoderTest { long elapsedTime = System.nanoTime() - startTime; elapsedTime_withValueIndex += elapsedTime; // System.out.println("search with value index: " + elapsedTime / 1000000.0 + " ms"); - // System.out.println("TP=(" + candidateTPidx + "," + candidateTPvalue + ")"); + System.out.println("TP=(" + candidateTPidx + "," + candidateTPvalue + ")"); System.out.println("search interval number=" + prune_intervals_end.size()); int traversedPoints = 0; for (int i = 0; i < prune_intervals_start.size(); i++) { @@ -408,8 +407,8 @@ public class SDTEncoderTest { elapsedTime_withoutValueIndex += elapsedTime; // System.out.println("search without value index: " + elapsedTime / 1000000.0 + " // ms"); - // System.out.println("TP=(" + candidateTPidx_raw + "," + candidateTPvalue_raw + - // ")"); + System.out.println("TP=(" + candidateTPidx_raw + "," + candidateTPvalue_raw + + ")"); } elapsedTime_withValueIndex_list.add( elapsedTime_withValueIndex / startList.length / 1000000.0);
