This is an automated email from the ASF dual-hosted git repository. bchapuis pushed a commit to branch flatgeobuf in repository https://gitbox.apache.org/repos/asf/incubator-baremaps.git
commit 55adcfdb6b61c0d37e35a729035253b27e000c97 Author: Bertil Chapuis <[email protected]> AuthorDate: Tue Jun 25 00:51:30 2024 +0200 Improve naming and fix tests --- .../org/apache/baremaps/flatgeobuf/BufferUtil.java | 133 ------- .../baremaps/flatgeobuf/FeatureMetaIterator.java | 85 ----- .../org/apache/baremaps/flatgeobuf/FlatGeoBuf.java | 31 +- .../baremaps/flatgeobuf/FlatGeoBufMapper.java | 173 --------- .../baremaps/flatgeobuf/FlatGeoBufReader.java | 232 +++++++++++- .../baremaps/flatgeobuf/FlatGeoBufWriter.java | 388 ++++++++++++--------- .../apache/baremaps/flatgeobuf/BufferUtilTest.java | 127 ------- .../apache/baremaps/flatgeobuf/FlatGeoBufTest.java | 153 ++++---- 8 files changed, 528 insertions(+), 794 deletions(-) diff --git a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/BufferUtil.java b/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/BufferUtil.java deleted file mode 100644 index 5530a340..00000000 --- a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/BufferUtil.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.baremaps.flatgeobuf; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.nio.channels.ReadableByteChannel; - -public class BufferUtil { - - private BufferUtil() { - // Prevent instantiation - } - - public static ByteBuffer createByteBuffer(int capacity, ByteOrder order) { - ByteBuffer buffer = ByteBuffer.allocate(capacity).order(order); - buffer.flip(); - return buffer; - } - - /** - * Skips the given number of bytes from the specified channel, accounting for the bytes already in - * the buffer. - * - * @param channel the channel to skip bytes from - * @param buffer the buffer to use - * @param bytesToSkip the number of bytes to skip - * @return the buffer after skipping the specified number of bytes - * @throws IOException if an I/O error occurs while reading from the channel - */ - public static ByteBuffer skipBytes(ReadableByteChannel channel, ByteBuffer buffer, - long bytesToSkip) throws IOException { - if (channel == null || buffer == null) { - throw new IllegalArgumentException("Channel and buffer must not be null"); - } - - if (bytesToSkip < 0) { - throw new IllegalArgumentException("The number of bytes to skip must be non-negative"); - } - - // If the buffer already has `bytesToSkip` or more bytes remaining, simply adjust the position. - if (buffer.remaining() >= bytesToSkip) { - buffer.position(buffer.position() + (int) bytesToSkip); - return buffer; - } - - // Calculate the number of bytes we still need to skip after accounting for the buffer's - // remaining bytes. - long remainingBytesToSkip = bytesToSkip - buffer.remaining(); - - // Clear the buffer to prepare it for reading. - buffer.clear(); - - // Skip bytes directly from the channel. - while (remainingBytesToSkip > 0) { - // Read into the buffer to discard the data. - int bytesRead = channel.read(buffer); - if (bytesRead == -1) { - break; // End of channel reached - } - remainingBytesToSkip -= bytesRead; - buffer.clear(); - } - - return buffer; - } - - /** - * Prepares the given buffer for reading at least `n` bytes from the specified channel. - * - * @param channel the channel to read bytes from - * @param buffer the buffer to prepare for reading - * @param bytesToRead the minimum number of bytes the buffer should contain - * @return a ByteBuffer that contains at least `n` bytes read from the channel - * @throws IOException if an I/O error occurs while reading from the channel - */ - public static ByteBuffer readBytes(ReadableByteChannel channel, ByteBuffer buffer, - int bytesToRead) throws IOException { - if (channel == null || buffer == null) { - throw new IllegalArgumentException("Channel and buffer must not be null"); - } - - if (bytesToRead < 0) { - throw new IllegalArgumentException("The number of bytes to read must be non-negative"); - } - - // If the buffer already has `n` or more bytes remaining, it will be returned as is. - if (buffer.remaining() >= bytesToRead) { - return buffer; - } - - // If the buffer has sufficient capacity but fewer than `n` bytes remaining, compact it and read - // more bytes. - if (buffer.capacity() >= bytesToRead) { - buffer.compact(); - while (buffer.position() < bytesToRead) { - if (channel.read(buffer) == -1) { - break; // End of channel reached - } - } - buffer.flip(); - return buffer; - } - - // If the buffer has insufficient capacity, allocate a new buffer with the required capacity. - ByteBuffer newBuffer = ByteBuffer.allocate(bytesToRead).order(buffer.order()); - buffer.flip(); - newBuffer.put(buffer); - while (newBuffer.position() < bytesToRead) { - if (channel.read(newBuffer) == -1) { - break; // End of channel reached - } - } - newBuffer.flip(); - return newBuffer; - } -} diff --git a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FeatureMetaIterator.java b/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FeatureMetaIterator.java deleted file mode 100644 index c11b93f6..00000000 --- a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FeatureMetaIterator.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.baremaps.flatgeobuf; - -// -// import java.io.IOException; -// import java.nio.ByteBuffer; -// import java.nio.channels.ReadableByteChannel; -// import java.nio.channels.SeekableByteChannel; -// import java.util.Iterator; -// import java.util.NoSuchElementException; -// -// public class FeatureMetaIterator implements Iterator<FeatureMeta> { -// -// private final HeaderMeta headerMeta; -// -// private final ReadableByteChannel channel; -// -// private final ByteBuffer buffer; -// -// private long cursor = 0; -// -// /** -// * Constructs a row iterator. -// * -// * @param channel the channel to read from -// * @param headerMeta the header meta -// * @param buffer the buffer to use -// */ -// public FeatureMetaIterator( -// SeekableByteChannel channel, -// HeaderMeta headerMeta, -// ByteBuffer buffer) { -// this.channel = channel; -// this.headerMeta = headerMeta; -// this.buffer = buffer; -// } -// -// /** -// * {@inheritDoc} -// */ -// @Override -// public boolean hasNext() { -// return cursor < headerMeta.featuresCount; -// } -// -// /** -// * {@inheritDoc} -// */ -// @Override -// public FeatureMeta next() { -// try { -// channel.read(buffer); -// buffer.flip(); -// -// var featureSize = buffer.getInt(); -// var featureMeta = FlatGeoBufReader.readFeature(buffer, headerMeta); -// -// buffer.position(Integer.BYTES + featureSize); -// buffer.compact(); -// -// cursor++; -// -// return featureMeta; -// } catch (IOException e) { -// throw new NoSuchElementException(e); -// } -// } -// -// } diff --git a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBuf.java b/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBuf.java index a716ff1f..a6a5c0c1 100644 --- a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBuf.java +++ b/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBuf.java @@ -23,25 +23,25 @@ import org.locationtech.jts.geom.Geometry; public class FlatGeoBuf { - public static final byte[] MAGIC_BYTES = - new byte[] {0x66, 0x67, 0x62, 0x03, 0x66, 0x67, 0x62, 0x00}; + public static final byte[] MAGIC_BYTES = + new byte[] {0x66, 0x67, 0x62, 0x03, 0x66, 0x67, 0x62, 0x00}; - private FlatGeoBuf() { + private FlatGeoBuf() { // Prevent instantiation } - public static boolean isFlatgeobuf(ByteBuffer bb) { - return bb.get() == MAGIC_BYTES[0] && - bb.get() == MAGIC_BYTES[1] && - bb.get() == MAGIC_BYTES[2] && - bb.get() == MAGIC_BYTES[3] && - bb.get() == MAGIC_BYTES[4] && - bb.get() == MAGIC_BYTES[5] && - bb.get() == MAGIC_BYTES[6] && - bb.get() == MAGIC_BYTES[7]; - } + public static boolean isFlatgeobuf(ByteBuffer bb) { + return bb.get() == MAGIC_BYTES[0] && + bb.get() == MAGIC_BYTES[1] && + bb.get() == MAGIC_BYTES[2] && + bb.get() == MAGIC_BYTES[3] && + bb.get() == MAGIC_BYTES[4] && + bb.get() == MAGIC_BYTES[5] && + bb.get() == MAGIC_BYTES[6] && + bb.get() == MAGIC_BYTES[7]; + } - // Geometry type enumeration + // Geometry type enumeration public enum GeometryType { UNKNOWN(0), POINT(1), @@ -129,9 +129,6 @@ public class FlatGeoBuf { String title, String description, String metadata) { - public Header { - indexNodeSize = indexNodeSize == 0 ? 16 : indexNodeSize; - } } public record Feature(List<Object> properties, Geometry geometry) { diff --git a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufMapper.java b/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufMapper.java deleted file mode 100644 index f48d4d27..00000000 --- a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufMapper.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.baremaps.flatgeobuf; - -import com.google.flatbuffers.FlatBufferBuilder; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.IntStream; -import org.apache.baremaps.flatgeobuf.generated.Column; -import org.apache.baremaps.flatgeobuf.generated.Feature; -import org.apache.baremaps.flatgeobuf.generated.Header; - -public class FlatGeoBufMapper { - - private FlatGeoBufMapper() { - // Prevent instantiation - } - - public static Header asHeaderFlatGeoBuf(FlatGeoBuf.Header header) { - var builder = new FlatBufferBuilder(); - int[] columnsArray = header.columns().stream().mapToInt(c -> { - int nameOffset = builder.createString(c.name()); - int type = c.type().ordinal(); - return org.apache.baremaps.flatgeobuf.generated.Column.createColumn( - builder, nameOffset, type, 0, 0, c.width(), c.precision(), c.scale(), c.nullable(), - c.unique(), - c.primaryKey(), 0); - }).toArray(); - int columnsOffset = - org.apache.baremaps.flatgeobuf.generated.Header.createColumnsVector(builder, columnsArray); - - int nameOffset = 0; - if (header.name() != null) { - nameOffset = builder.createString(header.name()); - } - int crsOffset = 0; - if (header.crs().code() != 0) { - org.apache.baremaps.flatgeobuf.generated.Crs.startCrs(builder); - org.apache.baremaps.flatgeobuf.generated.Crs.addCode(builder, header.crs().code()); - crsOffset = org.apache.baremaps.flatgeobuf.generated.Crs.endCrs(builder); - } - int envelopeOffset = 0; - if (header.envelope() != null) { - envelopeOffset = Header.createEnvelopeVector(builder, header.envelope().stream().mapToDouble(d -> d).toArray()); - } - Header.startHeader(builder); - Header.addGeometryType(builder, header.geometryType().getValue()); - Header.addIndexNodeSize(builder, header.indexNodeSize()); - Header.addColumns(builder, columnsOffset); - Header.addEnvelope(builder, envelopeOffset); - Header.addName(builder, nameOffset); - Header.addCrs(builder, crsOffset); - Header.addFeaturesCount(builder, header.featuresCount()); - int offset = Header.endHeader(builder); - - builder.finishSizePrefixed(offset); - - return Header.getRootAsHeader(builder.dataBuffer()); - } - - public static FlatGeoBuf.Header asHeaderRecord(Header header) { - return new FlatGeoBuf.Header( - header.name(), - List.of( - header.envelope(0), - header.envelope(1), - header.envelope(2), - header.envelope(3)), - FlatGeoBuf.GeometryType.values()[header.geometryType()], - header.hasZ(), - header.hasM(), - header.hasT(), - header.hasTm(), - IntStream.range(0, header.columnsLength()) - .mapToObj(header::columns) - .map(column -> new FlatGeoBuf.Column( - column.name(), - FlatGeoBuf.ColumnType.values()[column.type()], - column.title(), - column.description(), - column.width(), - column.precision(), - column.scale(), - column.nullable(), - column.unique(), - column.primaryKey(), - column.metadata())) - .toList(), - header.featuresCount(), - header.indexNodeSize(), - new FlatGeoBuf.Crs( - header.crs().org(), - header.crs().code(), - header.crs().name(), - header.crs().description(), - header.crs().wkt(), - header.crs().codeString()), - header.title(), - header.description(), - header.metadata()); - } - - public static FlatGeoBuf.Feature asFeatureRecord(Header header, Feature feature) { - var values = new ArrayList<>(); - if (feature.propertiesLength() > 0) { - var propertiesBuffer = feature.propertiesAsByteBuffer(); - while (propertiesBuffer.hasRemaining()) { - var columnPosition = propertiesBuffer.getShort(); - var columnType = header.columns(columnPosition); - var columnValue = readValue(propertiesBuffer, columnType); - values.add(columnValue); - } - } - return new FlatGeoBuf.Feature( - values, GeometryConversions.readGeometry(feature.geometry(), header.geometryType()) - ); - } - - private static Object readValue(ByteBuffer buffer, Column column) { - return switch (FlatGeoBuf.ColumnType.values()[column.type()]) { - case BYTE -> buffer.get(); - case UBYTE -> buffer.get(); - case BOOL -> buffer.get() == 1; - case SHORT -> buffer.getShort(); - case USHORT -> buffer.getShort(); - case INT -> buffer.getInt(); - case UINT -> buffer.getInt(); - case LONG -> buffer.getLong(); - case ULONG -> buffer.getLong(); - case FLOAT -> buffer.getFloat(); - case DOUBLE -> buffer.getDouble(); - case STRING -> readString(buffer); - case JSON -> readJson(buffer); - case DATETIME -> readDateTime(buffer); - case BINARY -> readBinary(buffer); - }; - } - - private static Object readString(ByteBuffer buffer) { - var length = buffer.getInt(); - var bytes = new byte[length]; - buffer.get(bytes); - return new String(bytes, StandardCharsets.UTF_8); - } - - private static Object readJson(ByteBuffer buffer) { - throw new UnsupportedOperationException(); - } - - private static Object readDateTime(ByteBuffer buffer) { - throw new UnsupportedOperationException(); - } - - private static Object readBinary(ByteBuffer buffer) { - throw new UnsupportedOperationException(); - } -} diff --git a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufReader.java b/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufReader.java index d7c390fd..38b49fa0 100644 --- a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufReader.java +++ b/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufReader.java @@ -24,25 +24,51 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.IntStream; +import org.apache.baremaps.flatgeobuf.generated.Column; import org.apache.baremaps.flatgeobuf.generated.Feature; import org.apache.baremaps.flatgeobuf.generated.Header; +import org.locationtech.jts.geom.Geometry; public class FlatGeoBufReader { - public static Header readHeader(ReadableByteChannel channel) + public static FlatGeoBuf.Header readHeaderRecord(ReadableByteChannel channel) + throws IOException { + Header header = readHeaderFlatGeoBuf(channel); + return asRecord(header); + } + + public static Header readHeaderFlatGeoBuf(ReadableByteChannel channel) throws IOException { // Check if the file is a flatgeobuf - ByteBuffer buffer = BufferUtil.createByteBuffer(12, ByteOrder.LITTLE_ENDIAN); - BufferUtil.readBytes(channel, buffer, 12); - if (!FlatGeoBuf.isFlatgeobuf(buffer)) { + ByteBuffer prefixBuffer = ByteBuffer.allocate(12).order(ByteOrder.LITTLE_ENDIAN); + while (prefixBuffer.hasRemaining()) { + if (channel.read(prefixBuffer) == -1) { + break; // End of channel reached + } + } + prefixBuffer.flip(); + if (!FlatGeoBuf.isFlatgeobuf(prefixBuffer)) { throw new IOException("This is not a flatgeobuf!"); } // Read the header size - int headerSize = buffer.getInt(); - ByteBuffer headerBuffer = BufferUtil.createByteBuffer(headerSize, ByteOrder.LITTLE_ENDIAN); - BufferUtil.readBytes(channel, headerBuffer, headerSize); + int headerSize = prefixBuffer.getInt(); + ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize).order(ByteOrder.LITTLE_ENDIAN); + + // Read the header + while (headerBuffer.hasRemaining()) { + if (channel.read(headerBuffer) == -1) { + break; // End of channel reached + } + } + + // Prepare the buffer for reading + headerBuffer.flip(); return Header.getRootAsHeader(headerBuffer); } @@ -54,12 +80,24 @@ public class FlatGeoBufReader { public static ByteBuffer readIndexBuffer(ReadableByteChannel channel, Header header) throws IOException { + + // Calculate the size of the index long indexSize = PackedRTree.calcSize(header.featuresCount(), header.indexNodeSize()); if (indexSize > 1L << 31) { throw new IOException("Index size is greater than 2GB!"); } - ByteBuffer buffer = BufferUtil.createByteBuffer((int) indexSize, ByteOrder.LITTLE_ENDIAN); - BufferUtil.readBytes(channel, buffer, (int) indexSize); + + // Read the index + ByteBuffer buffer = ByteBuffer.allocate((int) indexSize).order(ByteOrder.LITTLE_ENDIAN); + while (buffer.hasRemaining()) { + if (channel.read(buffer) == -1) { + break; // End of channel reached + } + } + + // Prepare the buffer for reading + buffer.flip(); + return buffer; } @@ -68,20 +106,178 @@ public class FlatGeoBufReader { return new BoundedInputStream(Channels.newInputStream(channel), indexSize); } - public static Feature readFeature(ReadableByteChannel channel, ByteBuffer buffer) + public static FlatGeoBuf.Feature readFeatureRecord(ReadableByteChannel channel, + Header header, ByteBuffer buffer) throws IOException { + Feature feature = readFeatureFlatGeoBuf(channel, buffer); + List<Object> properties = new ArrayList<>(); + + if (feature.propertiesLength() > 0) { + var propertiesBuffer = feature.propertiesAsByteBuffer(); + while (propertiesBuffer.hasRemaining()) { + var type = propertiesBuffer.getShort(); + var column = header.columns(type); + var value = readValue(propertiesBuffer, column); + properties.add(value); + } + } + + + Geometry geometry = + GeometryConversions.readGeometry(feature.geometry(), header.geometryType()); + return new FlatGeoBuf.Feature(properties, geometry); + } + + /** + * Reads a feature from the specified channel. + * <p> + * The provided buffer is reused from call to call to avoid unnecessary allocations, so the caller + * should not modify the buffer after calling this method. It may be freshly allocated or may + * contain data from a previous call. + * + * @param channel the channel to read from + * @param buffer the buffer to use + * @return + * @throws IOException + */ + public static Feature readFeatureFlatGeoBuf(ReadableByteChannel channel, ByteBuffer buffer) + throws IOException { + try { - ByteBuffer newBuffer = BufferUtil.readBytes(channel, buffer, 1<<16); - int featureSize = newBuffer.getInt(); - newBuffer = BufferUtil.readBytes(channel, newBuffer, featureSize); - Feature feature = Feature.getRootAsFeature(newBuffer); - buffer.position(buffer.position() + featureSize); - return feature; - } catch (IOException | BufferUnderflowException e) { - throw new IOException("Error reading feature", e); + // Compact the buffer if it has been used before + if (buffer.position() > 0) { + buffer.compact(); + } + + // Fill the buffer + while (buffer.hasRemaining()) { + if (channel.read(buffer) == -1) { + break; // End of channel reached + } + } + + // Read the feature size + buffer.flip(); + int featureSize = buffer.getInt(); + + // Allocate a new buffer if the feature size is greater than the current buffer capacity + if (featureSize > buffer.remaining()) { + ByteBuffer newBuffer = ByteBuffer.allocate(featureSize).order(ByteOrder.LITTLE_ENDIAN); + + // Copy the remaining bytes from the current buffer to the new buffer + newBuffer.put(buffer); + + // Fill the new buffer with the remaining bytes + while (newBuffer.hasRemaining()) { + if (channel.read(newBuffer) == -1) { + break; // End of channel reached + } + } + + // Prepare the new buffer for reading + newBuffer.flip(); + + // Read the feature from the new buffer + Feature feature = Feature.getRootAsFeature(newBuffer.duplicate()); + + // Clear the old buffer to prepare for the next read + buffer.clear(); + + return feature; + + } else { + Feature feature = Feature.getRootAsFeature(buffer.slice(buffer.position(), featureSize)); + buffer.position(buffer.position() + featureSize); + + return feature; + } + } catch (BufferUnderflowException e) { + throw new IOException("Failed to read feature", e); } } + + public static FlatGeoBuf.Header asRecord(Header header) { + return new FlatGeoBuf.Header( + header.name(), + List.of( + header.envelope(0), + header.envelope(1), + header.envelope(2), + header.envelope(3)), + FlatGeoBuf.GeometryType.values()[header.geometryType()], + header.hasZ(), + header.hasM(), + header.hasT(), + header.hasTm(), + IntStream.range(0, header.columnsLength()) + .mapToObj(header::columns) + .map(column -> new FlatGeoBuf.Column( + column.name(), + FlatGeoBuf.ColumnType.values()[column.type()], + column.title(), + column.description(), + column.width(), + column.precision(), + column.scale(), + column.nullable(), + column.unique(), + column.primaryKey(), + column.metadata())) + .toList(), + header.featuresCount(), + header.indexNodeSize(), + new FlatGeoBuf.Crs( + header.crs().org(), + header.crs().code(), + header.crs().name(), + header.crs().description(), + header.crs().wkt(), + header.crs().codeString()), + header.title(), + header.description(), + header.metadata()); + } + + static Object readValue(ByteBuffer buffer, Column column) { + return switch (FlatGeoBuf.ColumnType.values()[column.type()]) { + case BYTE -> buffer.get(); + case UBYTE -> buffer.get(); + case BOOL -> buffer.get() == 1; + case SHORT -> buffer.getShort(); + case USHORT -> buffer.getShort(); + case INT -> buffer.getInt(); + case UINT -> buffer.getInt(); + case LONG -> buffer.getLong(); + case ULONG -> buffer.getLong(); + case FLOAT -> buffer.getFloat(); + case DOUBLE -> buffer.getDouble(); + case STRING -> readString(buffer); + case JSON -> readJson(buffer); + case DATETIME -> readDateTime(buffer); + case BINARY -> readBinary(buffer); + }; + } + + private static Object readString(ByteBuffer buffer) { + var length = buffer.getInt(); + var bytes = new byte[length]; + buffer.get(bytes); + return new String(bytes, StandardCharsets.UTF_8); + } + + private static Object readJson(ByteBuffer buffer) { + throw new UnsupportedOperationException(); + } + + private static Object readDateTime(ByteBuffer buffer) { + throw new UnsupportedOperationException(); + } + + private static Object readBinary(ByteBuffer buffer) { + throw new UnsupportedOperationException(); + } + private static class BoundedInputStream extends InputStream { private final InputStream in; private long remaining; diff --git a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufWriter.java b/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufWriter.java index a117d894..71ec6dd4 100644 --- a/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufWriter.java +++ b/baremaps-flatgeobuf/src/main/java/org/apache/baremaps/flatgeobuf/FlatGeoBufWriter.java @@ -18,12 +18,9 @@ package org.apache.baremaps.flatgeobuf; -import com.google.flatbuffers.FlatBufferBuilder; -import org.apache.baremaps.flatgeobuf.generated.Column; -import org.apache.baremaps.flatgeobuf.generated.Crs; -import org.apache.baremaps.flatgeobuf.generated.Feature; -import org.apache.baremaps.flatgeobuf.generated.Header; +import static org.apache.baremaps.flatgeobuf.FlatGeoBufReader.readValue; +import com.google.flatbuffers.FlatBufferBuilder; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -32,164 +29,233 @@ import java.nio.ByteOrder; import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import org.apache.baremaps.flatgeobuf.generated.*; +import org.locationtech.jts.geom.Geometry; public class FlatGeoBufWriter { - public static void writeHeader(WritableByteChannel channel, Header header) throws IOException { - ByteBuffer headerBuffer = header.getByteBuffer(); - ByteBuffer startBuffer = ByteBuffer.allocate(12).order(ByteOrder.LITTLE_ENDIAN); - startBuffer.put(FlatGeoBuf.MAGIC_BYTES); - startBuffer.putInt(headerBuffer.remaining()); - startBuffer.flip(); - while (startBuffer.hasRemaining()) { - channel.write(startBuffer); - } - while (headerBuffer.hasRemaining()) { - channel.write(headerBuffer); - } - } - - public static void writeIndexStream(WritableByteChannel channel, InputStream inputStream) throws IOException { - try (OutputStream outputStream = Channels.newOutputStream(channel)) { - outputStream.write(inputStream.readAllBytes()); - } - } - - public static void writeIndexBuffer(WritableByteChannel channel, ByteBuffer buffer) throws IOException { - while (buffer.hasRemaining()) { - channel.write(buffer); - } - } - - public static void writeFeature(WritableByteChannel channel, Feature feature) throws IOException { - ByteBuffer featureBuffer = feature.getByteBuffer().duplicate(); - featureBuffer.flip(); - channel.write(featureBuffer); - while (featureBuffer.hasRemaining()) { - channel.write(featureBuffer); - } - } - - public static void writeColumnValue(ByteBuffer buffer, FlatGeoBuf.Column column, Object value) { - switch (column.type()) { - case BYTE -> buffer.put((byte) value); - case BOOL -> buffer.put((byte) ((boolean) value ? 1 : 0)); - case SHORT -> buffer.putShort((short) value); - case INT -> buffer.putInt((int) value); - case LONG -> buffer.putLong((long) value); - case FLOAT -> buffer.putFloat((float) value); - case DOUBLE -> buffer.putDouble((double) value); - case STRING -> writeColumnString(buffer, value); - case JSON -> writeColumnJson(buffer, value); - case DATETIME -> writeColumnDateTime(buffer, value); - case BINARY -> writeColumnBinary(buffer, value); - } - } - - public static void writeColumnString(ByteBuffer propertiesBuffer, Object value) { - var bytes = ((String) value).getBytes(StandardCharsets.UTF_8); - propertiesBuffer.putInt(bytes.length); - propertiesBuffer.put(bytes); - } - - public static void writeColumnJson(ByteBuffer propertiesBuffer, Object value) { - throw new UnsupportedOperationException(); - } - - public static void writeColumnDateTime(ByteBuffer propertiesBuffer, Object value) { - throw new UnsupportedOperationException(); - } - - public static void writeColumnBinary(ByteBuffer propertiesBuffer, Object value) { - throw new UnsupportedOperationException(); - } - - public static void writeFeature( - OutputStream outputStream, FlatGeoBuf.Header headerMeta, - FlatGeoBuf.Feature featureMeta) throws IOException { - var featureBuilder = new FlatBufferBuilder(4096); - - // Write the properties - var propertiesBuffer = ByteBuffer.allocate(1 << 20).order(ByteOrder.LITTLE_ENDIAN); - var properties = featureMeta.properties(); - for (int i = 0; i < properties.size(); i++) { - var column = headerMeta.columns().get(i); - var value = properties.get(i); - propertiesBuffer.putShort((short) i); - writeColumnValue(propertiesBuffer, column, value); - } - if (propertiesBuffer.position() > 0) { - propertiesBuffer.flip(); - } - var propertiesOffset = Feature.createPropertiesVector(featureBuilder, propertiesBuffer); - - // Write the geometry - var geometry = featureMeta.geometry(); - var geometryOffset = 0; - if (geometry != null) { - geometryOffset = - GeometryConversions.writeGeometry(featureBuilder, geometry, (byte) headerMeta.geometryType().getValue()); - } - - // Write the feature - var featureOffset = Feature.createFeature(featureBuilder, geometryOffset, propertiesOffset, 0); - featureBuilder.finishSizePrefixed(featureOffset); - - byte[] data = featureBuilder.sizedByteArray(); - outputStream.write(data); - } - - public static void write(FlatGeoBuf.Header headerMeta, OutputStream to, FlatBufferBuilder builder) - throws IOException { - int[] columnsArray = headerMeta.columns().stream().mapToInt(c -> { - int nameOffset = builder.createString(c.name()); - int type = c.type().ordinal(); - return Column.createColumn( - builder, - nameOffset, - type, - 0, - 0, - c.width(), - c.precision(), - c.scale(), - c.nullable(), - c.unique(), - c.primaryKey(), - 0); - }).toArray(); - int columnsOffset = Header.createColumnsVector(builder, columnsArray); - - int nameOffset = 0; - if (headerMeta.name() != null) { - nameOffset = builder.createString(headerMeta.name()); - } - int crsOffset = 0; - if (headerMeta.crs().code() != 0) { - Crs.startCrs(builder); - Crs.addCode(builder, headerMeta.crs().code()); - crsOffset = Crs.endCrs(builder); - } - int envelopeOffset = 0; - if (headerMeta.envelope() != null) { - envelopeOffset = Header.createEnvelopeVector(builder,headerMeta.envelope().stream().mapToDouble(d -> d).toArray()); - } - Header.startHeader(builder); - Header.addGeometryType(builder, headerMeta.geometryType().getValue()); - Header.addIndexNodeSize(builder, headerMeta.indexNodeSize()); - Header.addColumns(builder, columnsOffset); - Header.addEnvelope(builder, envelopeOffset); - Header.addName(builder, nameOffset); - Header.addCrs(builder, crsOffset); - Header.addFeaturesCount(builder, headerMeta.featuresCount()); - int offset = Header.endHeader(builder); - - builder.finishSizePrefixed(offset); - - WritableByteChannel channel = Channels.newChannel(to); - ByteBuffer dataBuffer = builder.dataBuffer(); - while (dataBuffer.hasRemaining()) { - channel.write(dataBuffer); - } + public static void writeHeaderRecord(WritableByteChannel channel, FlatGeoBuf.Header header) + throws IOException { + Header headerFlatGeoBuf = asHeaderRecord(header); + writeHeaderFlatGeoBuf(channel, headerFlatGeoBuf); + } + + public static void writeHeaderFlatGeoBuf(WritableByteChannel channel, Header header) throws IOException { + ByteBuffer headerBuffer = header.getByteBuffer(); + ByteBuffer startBuffer = ByteBuffer.allocate(12).order(ByteOrder.LITTLE_ENDIAN); + startBuffer.put(FlatGeoBuf.MAGIC_BYTES); + startBuffer.putInt(headerBuffer.remaining()); + startBuffer.flip(); + while (startBuffer.hasRemaining()) { + channel.write(startBuffer); + } + while (headerBuffer.hasRemaining()) { + channel.write(headerBuffer); + } + } + + public static void writeIndexStream(WritableByteChannel channel, InputStream inputStream) + throws IOException { + try (OutputStream outputStream = Channels.newOutputStream(channel)) { + outputStream.write(inputStream.readAllBytes()); + } + } + + public static void writeIndexBuffer(WritableByteChannel channel, ByteBuffer buffer) + throws IOException { + while (buffer.hasRemaining()) { + channel.write(buffer); + } + } + + public static void writeFeatureFlatGeoBuf(WritableByteChannel channel, Feature feature) throws IOException { + ByteBuffer sizeBuffer = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN); + sizeBuffer.putInt(feature.getByteBuffer().remaining()); + sizeBuffer.flip(); + while (sizeBuffer.hasRemaining()) { + channel.write(sizeBuffer); + } + ByteBuffer featureBuffer = feature.getByteBuffer().duplicate(); + while (featureBuffer.hasRemaining()) { + channel.write(featureBuffer); + } + } + + public static void writeFeatureRecord( + WritableByteChannel channel, + Header header, + FlatGeoBuf.Feature feature) throws IOException { + Feature featureRecord = writeFeature(header, feature); + writeFeatureFlatGeoBuf(channel, featureRecord); + } + + public static Header asHeaderRecord(FlatGeoBuf.Header header) { + FlatBufferBuilder builder = new FlatBufferBuilder(4096); + + int[] columnsArray = header.columns().stream().mapToInt(c -> { + int nameOffset = builder.createString(c.name()); + int type = c.type().ordinal(); + return Column.createColumn( + builder, nameOffset, type, 0, 0, c.width(), c.precision(), c.scale(), c.nullable(), + c.unique(), + c.primaryKey(), 0); + }).toArray(); + int columnsOffset = + Header.createColumnsVector(builder, columnsArray); + + int envelopeOffset = 0; + if (header.envelope() != null) { + envelopeOffset = Header.createEnvelopeVector(builder, + header.envelope().stream().mapToDouble(d -> d).toArray()); + } + + int nameOffset = 0; + if (header.name() != null) { + nameOffset = builder.createString(header.name()); + } + + int crsOrgOffset = 0; + if (header.crs().org() != null) { + crsOrgOffset = builder.createString(header.crs().org()); + } + + int crsNameOffset = 0; + if (header.crs().name() != null) { + crsNameOffset = builder.createString(header.crs().name()); + } + + int crsDescriptionOffset = 0; + if (header.crs().description() != null) { + crsDescriptionOffset = builder.createString(header.crs().description()); + } + + int crsWktOffset = 0; + if (header.crs().wkt() != null) { + crsWktOffset = builder.createString(header.crs().wkt()); + } + + int crsCodeStringOffset = 0; + if (header.crs().codeString() != null) { + crsCodeStringOffset = builder.createString(header.crs().codeString()); + } + + Crs.startCrs(builder); + Crs.addOrg(builder, crsOrgOffset); + Crs.addCode(builder, header.crs().code()); + Crs.addName(builder, crsNameOffset); + Crs.addDescription(builder, crsDescriptionOffset); + Crs.addWkt(builder, crsWktOffset); + Crs.addCodeString(builder, crsCodeStringOffset); + int crsOffset = Crs.endCrs(builder); + + + + Header.startHeader(builder); + Header.addGeometryType(builder, header.geometryType().getValue()); + Header.addFeaturesCount(builder, header.featuresCount()); + Header.addIndexNodeSize(builder, header.indexNodeSize()); + Header.addColumns(builder, columnsOffset); + Header.addEnvelope(builder, envelopeOffset); + Header.addName(builder, nameOffset); + Header.addCrs(builder, crsOffset); + + int offset = Header.endHeader(builder); + builder.finish(offset); + + ByteBuffer buffer = builder.dataBuffer().asReadOnlyBuffer(); + return Header.getRootAsHeader(buffer); + } + + public static void writeValue(ByteBuffer buffer, Column column, Object value) { + switch (column.type()) { + case ColumnType.Bool -> buffer.put((byte) ((boolean) value ? 1 : 0)); + case ColumnType.Short -> buffer.putShort((short) value); + case ColumnType.UShort -> buffer.putShort((short) value); + case ColumnType.Int -> buffer.putInt((int) value); + case ColumnType.UInt -> buffer.putInt((int) value); + case ColumnType.Long -> buffer.putLong((long) value); + case ColumnType.ULong -> buffer.putLong((long) value); + case ColumnType.Float -> buffer.putFloat((float) value); + case ColumnType.Double -> buffer.putDouble((double) value); + case ColumnType.String -> writeColumnString(buffer, value); + case ColumnType.Json -> writeColumnJson(buffer, value); + case ColumnType.DateTime -> writeColumnDateTime(buffer, value); + case ColumnType.Binary -> writeColumnBinary(buffer, value); + } + } + + public static void writeColumnString(ByteBuffer propertiesBuffer, Object value) { + var bytes = ((String) value).getBytes(StandardCharsets.UTF_8); + propertiesBuffer.putInt(bytes.length); + propertiesBuffer.put(bytes); + } + + public static void writeColumnJson(ByteBuffer propertiesBuffer, Object value) { + throw new UnsupportedOperationException(); + } + + public static void writeColumnDateTime(ByteBuffer propertiesBuffer, Object value) { + throw new UnsupportedOperationException(); + } + + public static void writeColumnBinary(ByteBuffer propertiesBuffer, Object value) { + throw new UnsupportedOperationException(); + } + + public static Feature writeFeature(Header header, FlatGeoBuf.Feature feature) + throws IOException { + FlatBufferBuilder builder = new FlatBufferBuilder(4096); + + // Write the properties + ByteBuffer propertiesBuffer = ByteBuffer.allocate(1 << 20).order(ByteOrder.LITTLE_ENDIAN); + List<Object> properties = feature.properties(); + for (int i = 0; i < properties.size(); i++) { + var column = header.columns(i); + var value = properties.get(i); + propertiesBuffer.putShort((short) i); + writeValue(propertiesBuffer, column, value); + } + if (propertiesBuffer.position() > 0) { + propertiesBuffer.flip(); + } + int propertiesOffset = Feature.createPropertiesVector(builder, propertiesBuffer); + + // Write the geometry + Geometry geometry = feature.geometry(); + int geometryOffset = 0; + if (geometry != null) { + geometryOffset = + GeometryConversions.writeGeometry(builder, geometry, + (byte) header.geometryType()); + } + + // Write the feature + Feature.startFeature(builder); + Feature.addGeometry(builder, geometryOffset); + Feature.addProperties(builder, propertiesOffset); + Feature.addColumns(builder, 0); + + int offset = Feature.endFeature(builder); + builder.finish(offset); + + ByteBuffer buffer = builder.dataBuffer().asReadOnlyBuffer(); + return Feature.getRootAsFeature(buffer); + } + + public static FlatGeoBuf.Feature writeFeatureFlatGeoBuf(Header header, Feature feature) { + var values = new ArrayList<>(); + if (feature.propertiesLength() > 0) { + var propertiesBuffer = feature.propertiesAsByteBuffer(); + while (propertiesBuffer.hasRemaining()) { + var columnPosition = propertiesBuffer.getShort(); + var columnType = header.columns(columnPosition); + var columnValue = readValue(propertiesBuffer, columnType); + values.add(columnValue); + } } + return new FlatGeoBuf.Feature( + values, GeometryConversions.readGeometry(feature.geometry(), header.geometryType())); + } } diff --git a/baremaps-flatgeobuf/src/test/java/org/apache/baremaps/flatgeobuf/BufferUtilTest.java b/baremaps-flatgeobuf/src/test/java/org/apache/baremaps/flatgeobuf/BufferUtilTest.java deleted file mode 100644 index 54906543..00000000 --- a/baremaps-flatgeobuf/src/test/java/org/apache/baremaps/flatgeobuf/BufferUtilTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.baremaps.flatgeobuf; - -import static org.junit.Assert.*; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.Channels; -import java.nio.channels.ReadableByteChannel; -import org.junit.jupiter.api.Test; - -public class BufferUtilTest { - - @Test - void testPrepareBufferAlreadySufficient() throws IOException { - byte[] data = "Hello, World!".getBytes(); - ReadableByteChannel channel = Channels.newChannel(new ByteArrayInputStream(data)); - ByteBuffer buffer = ByteBuffer.allocate(15); - buffer.put(data); - buffer.flip(); - - ByteBuffer result = BufferUtil.readBytes(channel, buffer, 5); - assertEquals(buffer, result); - assertEquals(13, result.remaining()); - } - - @Test - void testPrepareBufferCompactAndRead() throws IOException { - byte[] data = "Hello, World!".getBytes(); - ReadableByteChannel channel = Channels.newChannel(new ByteArrayInputStream(data)); - ByteBuffer buffer = ByteBuffer.allocate(15); - buffer.put("Hello".getBytes()); - buffer.flip(); - - ByteBuffer result = BufferUtil.readBytes(channel, buffer, 10); - assertEquals(buffer, result); - assertTrue(result.remaining() >= 10); - } - - @Test - void testPrepareBufferAllocateNewBuffer() throws IOException { - byte[] data = "Hello, World!".getBytes(); - ReadableByteChannel channel = Channels.newChannel(new ByteArrayInputStream(data)); - ByteBuffer buffer = ByteBuffer.allocate(5); - buffer.put("Hi".getBytes()); - buffer.flip(); - - ByteBuffer result = BufferUtil.readBytes(channel, buffer, 10); - assertNotEquals(buffer, result); - assertTrue(result.capacity() >= 10); - assertTrue(result.remaining() >= 10); - } - - @Test - void testPrepareBufferWithExactCapacity() throws IOException { - byte[] data = "Hello, World!".getBytes(); - ReadableByteChannel channel = Channels.newChannel(new ByteArrayInputStream(data)); - ByteBuffer buffer = ByteBuffer.allocate(13); - buffer.put(data, 0, 5); - buffer.flip(); - - ByteBuffer result = BufferUtil.readBytes(channel, buffer, 10); - assertEquals(buffer, result); - assertTrue(result.remaining() >= 10); - } - - @Test - void testPrepareEndOfChannel() throws IOException { - byte[] data = "Hello".getBytes(); - ReadableByteChannel channel = Channels.newChannel(new ByteArrayInputStream(data)); - ByteBuffer buffer = ByteBuffer.allocate(10); - buffer.put("Hi".getBytes()); - buffer.flip(); - - ByteBuffer result = BufferUtil.readBytes(channel, buffer, 10); - assertEquals(buffer, result); - assertTrue(result.remaining() <= 10); - } - - @Test - void testPrepareNullChannel() { - ByteBuffer buffer = ByteBuffer.allocate(10); - IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> { - BufferUtil.readBytes(null, buffer, 10); - }); - assertEquals("Channel and buffer must not be null", thrown.getMessage()); - } - - @Test - void testPrepareNullBuffer() { - byte[] data = "Hello".getBytes(); - ReadableByteChannel channel = Channels.newChannel(new ByteArrayInputStream(data)); - IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> { - BufferUtil.readBytes(channel, null, 10); - }); - assertEquals("Channel and buffer must not be null", thrown.getMessage()); - } - - @Test - void testPrepareNegativeBytes() { - byte[] data = "Hello".getBytes(); - ReadableByteChannel channel = Channels.newChannel(new ByteArrayInputStream(data)); - ByteBuffer buffer = ByteBuffer.allocate(10); - IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> { - BufferUtil.readBytes(channel, buffer, -1); - }); - assertEquals("The number of bytes to read must be non-negative", thrown.getMessage()); - } - -} diff --git a/baremaps-flatgeobuf/src/test/java/org/apache/baremaps/flatgeobuf/FlatGeoBufTest.java b/baremaps-flatgeobuf/src/test/java/org/apache/baremaps/flatgeobuf/FlatGeoBufTest.java index 7a2b6a67..fa5aa424 100644 --- a/baremaps-flatgeobuf/src/test/java/org/apache/baremaps/flatgeobuf/FlatGeoBufTest.java +++ b/baremaps-flatgeobuf/src/test/java/org/apache/baremaps/flatgeobuf/FlatGeoBufTest.java @@ -26,6 +26,7 @@ import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.List; @@ -37,118 +38,110 @@ import org.junit.jupiter.api.Test; public class FlatGeoBufTest { @Test - void readHeader() throws IOException { - var file = TestFiles.resolve("baremaps-testing/data/samples/countries.fgb"); - try (var channel = FileChannel.open(file, StandardOpenOption.READ)) { - Header header = FlatGeoBufReader.readHeader(channel); - assertNotNull(header); - assertEquals(179, header.featuresCount()); + void readWriteFlatGeoBuf() throws IOException { + Path file = TestFiles.resolve("baremaps-testing/data/samples/countries.fgb"); + Path tempFile = Files.createTempFile("countries", ".fgb"); + + Header headerFlatGeoBuf1 = null; + FlatGeoBuf.Header headerRecord1 = null; + List<FlatGeoBuf.Feature> featureRecords = new ArrayList<>(); + + // Copy the file + try (ReadableByteChannel channel = FileChannel.open(file, StandardOpenOption.READ); + WritableByteChannel tempChannel = FileChannel.open(tempFile, StandardOpenOption.WRITE)) { + + // Copy the header + headerFlatGeoBuf1 = FlatGeoBufReader.readHeaderFlatGeoBuf(channel); + headerRecord1 = FlatGeoBufReader.asRecord(headerFlatGeoBuf1); + FlatGeoBufWriter.writeHeaderFlatGeoBuf(tempChannel, headerFlatGeoBuf1); + + // Copy the index + ByteBuffer indexBuffer = FlatGeoBufReader.readIndexBuffer(channel, headerFlatGeoBuf1); + FlatGeoBufWriter.writeIndexBuffer(tempChannel, indexBuffer); + + // Copy the features + var buffer = ByteBuffer.allocate(1 << 10).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < headerFlatGeoBuf1.featuresCount(); i++) { + Feature feature = FlatGeoBufReader.readFeatureFlatGeoBuf(channel, buffer); + FlatGeoBufWriter.writeFeatureFlatGeoBuf(tempChannel, feature); + FlatGeoBuf.Feature featureRecord = FlatGeoBufWriter.writeFeatureFlatGeoBuf(headerFlatGeoBuf1, feature); + featureRecords.add(featureRecord); + } } - } - @Test - void readFeature() throws IOException { - var file = TestFiles.resolve("baremaps-testing/data/samples/countries.fgb"); - try (var channel = FileChannel.open(file, StandardOpenOption.READ)) { + // Read the copied file + try (var channel = FileChannel.open(tempFile, StandardOpenOption.READ)) { // Read the header - Header header = FlatGeoBufReader.readHeader(channel); - assertNotNull(header); - assertEquals(179, header.featuresCount()); - - FlatGeoBuf.Header headerRecord = FlatGeoBufMapper.asHeaderRecord(header); - assertNotNull(headerRecord); - assertEquals(179, headerRecord.featuresCount()); + Header headerFlatGeoBuf2 = FlatGeoBufReader.readHeaderFlatGeoBuf(channel); + FlatGeoBuf.Header headerRecord2 = FlatGeoBufReader.asRecord(headerFlatGeoBuf2); + assertNotNull(headerFlatGeoBuf2); + assertEquals(headerRecord1, headerRecord2); // Read the index - FlatGeoBufReader.skipIndex(channel, header); - - // Read the first feature - ByteBuffer buffer = BufferUtil.createByteBuffer(1 << 16, ByteOrder.LITTLE_ENDIAN); - List<FlatGeoBuf.Feature> featureList = new ArrayList<>(); - for (int i = 0; i < header.featuresCount(); i++) { - Feature feature = FlatGeoBufReader.readFeature(channel, buffer); - featureList.add(FlatGeoBufMapper.asFeatureRecord(header, feature)); - assertNotNull(feature); - } + FlatGeoBufReader.skipIndex(channel, headerFlatGeoBuf2); - // Check the first feature - FlatGeoBuf.Feature firstFeature = featureList.get(0); - assertNotNull(firstFeature); - assertEquals(2, firstFeature.properties().size()); - assertEquals("ATA", firstFeature.properties().get(0)); - assertEquals("Antarctica", firstFeature.properties().get(1)); - assertNotNull(firstFeature.geometry()); - assertEquals(658, firstFeature.geometry().getNumPoints()); - - // Check the last feature - FlatGeoBuf.Feature lastFeature = featureList.get(178); - assertNotNull(lastFeature); - assertEquals(2, lastFeature.properties().size()); - assertEquals("FLK", lastFeature.properties().get(0)); - assertEquals("Falkland Islands", lastFeature.properties().get(1)); - assertNotNull(lastFeature.geometry()); - assertEquals(10, lastFeature.geometry().getNumPoints()); - - assertThrows(IOException.class, () -> FlatGeoBufReader.readFeature(channel, buffer)); + // Read the features + ByteBuffer buffer = ByteBuffer.allocate(1 << 10).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < headerFlatGeoBuf2.featuresCount(); i++) { + Feature featureFlatGeoBuf = FlatGeoBufReader.readFeatureFlatGeoBuf(channel, buffer); + FlatGeoBuf.Feature featureRecord = FlatGeoBufWriter.writeFeatureFlatGeoBuf(headerFlatGeoBuf2, featureFlatGeoBuf); + assertNotNull(featureRecord); + assertEquals(featureRecords.get(i), featureRecord); + } } } - @Test - void readWrite() throws IOException { - var file = TestFiles.resolve("baremaps-testing/data/samples/countries.fgb"); - var tempFile = Files.createTempFile("countries", ".fgb"); + void readWriteRecord() throws IOException { + Path file = TestFiles.resolve("baremaps-testing/data/samples/countries.fgb"); + Path tempFile = Files.createTempFile("countries", ".fgb"); - FlatGeoBuf.Header headerRecord = null; + Header headerFlatGeoBuf1 = null; + FlatGeoBuf.Header headerRecord1 = null; List<FlatGeoBuf.Feature> featureRecords = new ArrayList<>(); + // Copy the file try (ReadableByteChannel channel = FileChannel.open(file, StandardOpenOption.READ); WritableByteChannel tempChannel = FileChannel.open(tempFile, StandardOpenOption.WRITE)) { - // Read the header - Header header = FlatGeoBufReader.readHeader(channel); - headerRecord = FlatGeoBufMapper.asHeaderRecord(header); - FlatGeoBufWriter.writeHeader(tempChannel, header); + // Copy the header + headerRecord1 = FlatGeoBufReader.readHeaderRecord(channel); + headerFlatGeoBuf1 = FlatGeoBufWriter.asHeaderRecord(headerRecord1); + FlatGeoBufWriter.writeHeaderRecord(tempChannel, headerRecord1); - // Read the index - ByteBuffer indexBuffer = FlatGeoBufReader.readIndexBuffer(channel, header); + // Copy the index + ByteBuffer indexBuffer = FlatGeoBufReader.readIndexBuffer(channel, headerFlatGeoBuf1); FlatGeoBufWriter.writeIndexBuffer(tempChannel, indexBuffer); - var buffer = BufferUtil.createByteBuffer(1 << 16, ByteOrder.LITTLE_ENDIAN); - for (int i = 0; i < header.featuresCount(); i++) { - Feature feature = FlatGeoBufReader.readFeature(channel, buffer); - FlatGeoBuf.Feature featureRecord = FlatGeoBufMapper.asFeatureRecord(header, feature); - featureRecords.add(featureRecord); - FlatGeoBufWriter.writeFeature(tempChannel, feature); + // Copy the features + var buffer = ByteBuffer.allocate(1 << 10).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < headerFlatGeoBuf1.featuresCount(); i++) { + FlatGeoBuf.Feature feature = FlatGeoBufReader.readFeatureRecord(channel, headerFlatGeoBuf1, buffer); + FlatGeoBufWriter.writeFeatureRecord(tempChannel, headerFlatGeoBuf1, feature); + featureRecords.add(feature); } } - + // Read the copied file try (var channel = FileChannel.open(tempFile, StandardOpenOption.READ)) { // Read the header - Header header = FlatGeoBufReader.readHeader(channel); - assertNotNull(header); - assertEquals(headerRecord, FlatGeoBufMapper.asHeaderRecord(header)); + FlatGeoBuf.Header headerRecord2 = FlatGeoBufReader.readHeaderRecord(channel); + Header headerFlatGeoBuf2 = FlatGeoBufWriter.asHeaderRecord(headerRecord2); + assertNotNull(headerFlatGeoBuf2); + assertEquals(headerRecord1, headerRecord2); // Read the index - FlatGeoBufReader.skipIndex(channel, header); - - // Read the first feature - ByteBuffer buffer = BufferUtil.createByteBuffer(1 << 16, ByteOrder.LITTLE_ENDIAN); + FlatGeoBufReader.skipIndex(channel, headerFlatGeoBuf2); - for (int i = 0; i < header.featuresCount(); i++) { - Feature feature = FlatGeoBufReader.readFeature(channel, buffer); - FlatGeoBuf.Feature featureRecord = FlatGeoBufMapper.asFeatureRecord(header, feature); - - System.out.println(featureRecord); - - assertNotNull(feature); + // Read the features + ByteBuffer buffer = ByteBuffer.allocate(1 << 10).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < headerFlatGeoBuf2.featuresCount(); i++) { + FlatGeoBuf.Feature featureRecord = FlatGeoBufReader.readFeatureRecord(channel, headerFlatGeoBuf2, buffer); assertNotNull(featureRecord); assertEquals(featureRecords.get(i), featureRecord); } - } }
