This is an automated email from the ASF dual-hosted git repository.
sunzesong pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iotdb.git
The following commit(s) were added to refs/heads/master by this push:
new 0c39ef6 [IOTDB-1853] Fix bug about more than one TimeseriesMetadata
in TsFile (#4170)
0c39ef6 is described below
commit 0c39ef6b250c53210577ff001ab4fefd1d93954f
Author: Chen YZ <[email protected]>
AuthorDate: Sun Oct 17 02:45:26 2021 +0800
[IOTDB-1853] Fix bug about more than one TimeseriesMetadata in TsFile
(#4170)
---
.../iotdb/tsfile/write/writer/TsFileIOWriter.java | 2 +-
.../iotdb/tsfile/write/TsFileIOWriterTest.java | 142 +++++++++++++++++----
2 files changed, 117 insertions(+), 27 deletions(-)
diff --git
a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
index c402640..3759780 100644
---
a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
+++
b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
@@ -397,11 +397,11 @@ public class TsFileIOWriter {
// chunkMetadata is time column of a vector series
if (chunkMetadata.isTimeColumn()) {
Map<Path, List<IChunkMetadata>> vectorMap = vectorToPathsMap.get(path);
-
for (Map.Entry<Path, List<IChunkMetadata>> entry :
vectorMap.entrySet()) {
flushOneChunkMetadata(entry.getKey(), entry.getValue(),
vectorToPathsMap);
}
}
+ break;
}
}
diff --git
a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
index fdeb5da..b25befb 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
@@ -19,18 +19,22 @@
package org.apache.iotdb.tsfile.write;
import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
+import org.apache.iotdb.tsfile.common.constant.TsFileConstant;
import org.apache.iotdb.tsfile.constant.TestConstant;
import org.apache.iotdb.tsfile.file.MetaMarker;
import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
import org.apache.iotdb.tsfile.file.header.ChunkHeader;
import org.apache.iotdb.tsfile.file.metadata.TimeSeriesMetadataTest;
+import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
import org.apache.iotdb.tsfile.file.metadata.utils.TestHelper;
import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
import org.apache.iotdb.tsfile.read.common.Path;
import org.apache.iotdb.tsfile.write.schema.Schema;
import org.apache.iotdb.tsfile.write.schema.UnaryMeasurementSchema;
+import org.apache.iotdb.tsfile.write.schema.VectorMeasurementSchema;
import org.apache.iotdb.tsfile.write.writer.TsFileIOWriter;
import org.junit.After;
@@ -40,11 +44,16 @@ import org.junit.Test;
import java.io.File;
import java.io.IOException;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
public class TsFileIOWriterTest {
private static String tsfile =
TestConstant.BASE_OUTPUT_PATH.concat("tsfileIOWriterTest.tsfile");
private static String deviceId = "device1";
+ private static int chunkGroupNum = 2;
@Before
public void before() throws IOException {
@@ -52,26 +61,16 @@ public class TsFileIOWriterTest {
// file schema
UnaryMeasurementSchema measurementSchema =
TestHelper.createSimpleMeasurementSchema("sensor01");
+ VectorMeasurementSchema vectorMeasurementSchema =
+ new VectorMeasurementSchema(
+ "vector",
+ new String[] {"s1", "s2"},
+ new TSDataType[] {TSDataType.INT64, TSDataType.INT64});
Schema schema = new Schema();
schema.registerTimeseries(new Path(deviceId, "sensor01"),
measurementSchema);
+ schema.registerTimeseries(new Path(deviceId, "vector"),
vectorMeasurementSchema);
- // chunk statistics
- Statistics statistics =
Statistics.getStatsByType(measurementSchema.getType());
- statistics.updateStats(0L, 0L);
-
- // chunk group 1
- writer.startChunkGroup(deviceId);
- writer.startFlushChunk(
- measurementSchema.getMeasurementId(),
- measurementSchema.getCompressor(),
- measurementSchema.getType(),
- measurementSchema.getEncodingType(),
- statistics,
- 0,
- 0,
- 0);
- writer.endCurrentChunk();
- writer.endChunkGroup();
+ writeChunkGroup(writer, measurementSchema, vectorMeasurementSchema,
chunkGroupNum);
writer.setMinPlanIndex(100);
writer.setMaxPlanIndex(10000);
@@ -99,15 +98,35 @@ public class TsFileIOWriterTest {
reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + 1);
- // chunk group header
- Assert.assertEquals(MetaMarker.CHUNK_GROUP_HEADER, reader.readMarker());
- ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupHeader();
- Assert.assertEquals(deviceId, chunkGroupHeader.getDeviceID());
-
- // chunk header
- Assert.assertEquals(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER,
reader.readMarker());
- ChunkHeader header =
reader.readChunkHeader(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER);
- Assert.assertEquals(TimeSeriesMetadataTest.measurementUID,
header.getMeasurementID());
+ ChunkHeader header;
+ ChunkGroupHeader chunkGroupHeader;
+ for (int i = 0; i < chunkGroupNum; i++) {
+ // chunk group header
+ Assert.assertEquals(MetaMarker.CHUNK_GROUP_HEADER, reader.readMarker());
+ chunkGroupHeader = reader.readChunkGroupHeader();
+ Assert.assertEquals(deviceId, chunkGroupHeader.getDeviceID());
+ // ordinary chunk header
+ Assert.assertEquals(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER,
reader.readMarker());
+ header = reader.readChunkHeader(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER);
+ Assert.assertEquals(TimeSeriesMetadataTest.measurementUID,
header.getMeasurementID());
+ // vector chunk header (time)
+ Assert.assertEquals(
+ MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER |
TsFileConstant.TIME_COLUMN_MASK,
+ reader.readMarker());
+ header = reader.readChunkHeader(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER);
+ Assert.assertEquals("vector", header.getMeasurementID());
+ // vector chunk header (values)
+ Assert.assertEquals(
+ MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER |
TsFileConstant.VALUE_COLUMN_MASK,
+ reader.readMarker());
+ header = reader.readChunkHeader(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER);
+ Assert.assertEquals("vector.s1", header.getMeasurementID());
+ Assert.assertEquals(
+ MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER |
TsFileConstant.VALUE_COLUMN_MASK,
+ reader.readMarker());
+ header = reader.readChunkHeader(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER);
+ Assert.assertEquals("vector.s2", header.getMeasurementID());
+ }
Assert.assertEquals(MetaMarker.OPERATION_INDEX_RANGE, reader.readMarker());
reader.readPlanIndex();
@@ -116,8 +135,79 @@ public class TsFileIOWriterTest {
Assert.assertEquals(MetaMarker.SEPARATOR, reader.readMarker());
+ // make sure timeseriesMetadata is only
+ Map<String, List<TimeseriesMetadata>> deviceTimeseriesMetadataMap =
+ reader.getAllTimeseriesMetadata();
+ Set<String> pathSet = new HashSet<>();
+ for (Map.Entry<String, List<TimeseriesMetadata>> entry :
+ deviceTimeseriesMetadataMap.entrySet()) {
+ for (TimeseriesMetadata timeseriesMetadata : entry.getValue()) {
+ String seriesPath = entry.getKey() + "." +
timeseriesMetadata.getMeasurementId();
+ Assert.assertTrue(!pathSet.contains(seriesPath));
+ pathSet.add(seriesPath);
+ }
+ }
+
// FileMetaData
TsFileMetadata metaData = reader.readFileMetadata();
Assert.assertEquals(1, metaData.getMetadataIndex().getChildren().size());
}
+
+ private void writeChunkGroup(
+ TsFileIOWriter writer,
+ UnaryMeasurementSchema measurementSchema,
+ VectorMeasurementSchema vectorMeasurementSchema,
+ int groupNum)
+ throws IOException {
+ for (int i = 0; i < groupNum; i++) {
+ // chunk group
+ writer.startChunkGroup(deviceId);
+ // ordinary chunk
+ // chunk statistics
+ Statistics statistics =
Statistics.getStatsByType(measurementSchema.getType());
+ statistics.updateStats(0L, 0L);
+ writer.startFlushChunk(
+ measurementSchema.getMeasurementId(),
+ measurementSchema.getCompressor(),
+ measurementSchema.getType(),
+ measurementSchema.getEncodingType(),
+ statistics,
+ 0,
+ 0,
+ 0);
+ writer.endCurrentChunk();
+ // vector chunk (time)
+ Statistics vectorStatistics =
Statistics.getStatsByType(vectorMeasurementSchema.getType());
+ writer.startFlushChunk(
+ vectorMeasurementSchema.getMeasurementId(),
+ vectorMeasurementSchema.getCompressor(),
+ vectorMeasurementSchema.getType(),
+ vectorMeasurementSchema.getTimeTSEncoding(),
+ vectorStatistics,
+ 0,
+ 0,
+ TsFileConstant.TIME_COLUMN_MASK);
+ writer.endCurrentChunk();
+ // vector chunk (values)
+ for (int j = 0; j < vectorMeasurementSchema.getSubMeasurementsCount();
j++) {
+ Statistics subStatistics =
+ Statistics.getStatsByType(
+
vectorMeasurementSchema.getSubMeasurementsTSDataTypeList().get(j));
+ subStatistics.updateStats(0L, 0L);
+ writer.startFlushChunk(
+ vectorMeasurementSchema.getMeasurementId()
+ + "."
+ + vectorMeasurementSchema.getSubMeasurementsList().get(j),
+ vectorMeasurementSchema.getCompressor(),
+ vectorMeasurementSchema.getSubMeasurementsTSDataTypeList().get(j),
+ vectorMeasurementSchema.getSubMeasurementsTSEncodingList().get(j),
+ subStatistics,
+ 0,
+ 0,
+ TsFileConstant.VALUE_COLUMN_MASK);
+ writer.endCurrentChunk();
+ }
+ writer.endChunkGroup();
+ }
+ }
}