This is an automated email from the ASF dual-hosted git repository.
bchapuis pushed a commit to branch calcite-framework
in repository https://gitbox.apache.org/repos/asf/incubator-baremaps.git
The following commit(s) were added to refs/heads/calcite-framework by this push:
new 6e7289722 Fix issues in header
6e7289722 is described below
commit 6e72897225dd85ab175fd2df44348c7d324005c9
Author: Bertil Chapuis <[email protected]>
AuthorDate: Mon Feb 3 09:04:31 2025 +0100
Fix issues in header
---
.../baremaps/calcite/BaremapsTableFactory.java | 108 ++++---
.../{BaremapsTable.java => DataTableAdapter.java} | 6 +-
.../baremaps/calcite/BaremapsTableFactoryTest.java | 118 -------
.../org/apache/baremaps/calcite/CalciteTest.java | 15 +-
.../calcite/DataTableAdapterFactoryTest.java | 154 +++++++++
.../java/org/apache/baremaps/utils/FileUtils.java | 36 ---
.../apache/baremaps/workflow/WorkflowContext.java | 2 +-
.../integration/GeoPackageToPostgresTest.java | 4 +-
.../integration/GeoParquetToPostgresTest.java | 4 +-
.../java/org/apache/baremaps/csv/CsvDataStore.java | 2 +-
.../java/org/apache/baremaps/csv/CsvDataTable.java | 348 ++++++++++++---------
.../baremaps/csv/CsvDataTableGeonamesTest.java | 2 +-
.../org/apache/baremaps/csv/CsvDataTableTest.java | 24 +-
.../data/memory/MemoryMappedDirectory.java | 10 +-
.../baremaps/data/memory/MemoryMappedFile.java | 2 +-
.../apache/baremaps/data/store/DataStoreImpl.java | 3 +-
.../org/apache/baremaps/data/util/FileUtils.java | 5 +
.../geopackage/GeoPackageDataStoreTest.java | 4 +-
.../openstreetmap/OpenStreetMapDataTableTest.java | 4 +-
.../postgres/store/PostgresDataTableTest.java | 4 +-
.../apache/baremaps/rpsl/RsplDataTableTest.java | 4 +-
.../java/org/apache/baremaps/store/DataSchema.java | 19 +-
22 files changed, 475 insertions(+), 403 deletions(-)
diff --git
a/baremaps-calcite/src/main/java/org/apache/baremaps/calcite/BaremapsTableFactory.java
b/baremaps-calcite/src/main/java/org/apache/baremaps/calcite/BaremapsTableFactory.java
index e53de427b..7553deb57 100644
---
a/baremaps-calcite/src/main/java/org/apache/baremaps/calcite/BaremapsTableFactory.java
+++
b/baremaps-calcite/src/main/java/org/apache/baremaps/calcite/BaremapsTableFactory.java
@@ -17,10 +17,16 @@
package org.apache.baremaps.calcite;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.MappedByteBuffer;
+import java.nio.file.Paths;
+import java.util.Map;
import org.apache.baremaps.csv.CsvDataTable;
import org.apache.baremaps.data.collection.AppendOnlyLog;
import org.apache.baremaps.data.collection.DataCollection;
-import org.apache.baremaps.data.collection.IndexedDataList;
import org.apache.baremaps.data.memory.Memory;
import org.apache.baremaps.data.memory.MemoryMappedDirectory;
import org.apache.baremaps.data.store.DataTableImpl;
@@ -35,64 +41,62 @@ import org.apache.calcite.rel.type.RelProtoDataType;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.TableFactory;
-import java.io.File;
-import java.io.IOException;
-import java.nio.MappedByteBuffer;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Map;
+public class BaremapsTableFactory implements TableFactory<DataTableAdapter> {
-public class BaremapsTableFactory implements TableFactory<BaremapsTable> {
+ public BaremapsTableFactory() {
- public BaremapsTableFactory() {
+ }
- }
+ @Override
+ public DataTableAdapter create(
+ SchemaPlus schema,
+ String name,
+ Map<String, Object> operand,
+ RelDataType rowType) {
+ final RelProtoDataType protoRowType =
+ rowType != null ? RelDataTypeImpl.proto(rowType) : null;
+ String format = (String) operand.get("format");
+ DataTable dataTable = switch (format) {
+ case "mmap" -> createMMapTable(schema, name, operand, rowType);
+ case "csv" -> createCsvTable(schema, name, operand, rowType);
+ default -> throw new RuntimeException("Unsupported format");
+ };
+ return new DataTableAdapter(dataTable, protoRowType);
+ }
- @Override
- public BaremapsTable create(
- SchemaPlus schema,
- String name,
- Map<String, Object> operand,
- RelDataType rowType) {
- final RelProtoDataType protoRowType =
- rowType != null ? RelDataTypeImpl.proto(rowType) : null;
- String format = (String) operand.get("format");
- DataTable dataTable = switch (format) {
- case "mmap" -> createMMapTable(schema, name, operand, rowType);
- case "csv" -> createCsvTable(schema, name, operand, rowType);
- default -> throw new RuntimeException("Unsupported format");
- };
- return new BaremapsTable(dataTable, protoRowType);
+ private DataTable createMMapTable(SchemaPlus schema, String name,
Map<String, Object> operand,
+ RelDataType rowType) {
+ String directory = (String) operand.get("directory");
+ if (directory == null) {
+ throw new RuntimeException("A directory should be specified");
}
-
- private DataTable createMMapTable(SchemaPlus schema, String name,
Map<String, Object> operand, RelDataType rowType) {
- String directory = (String) operand.get("directory");
- if (directory == null) {
- throw new RuntimeException("A directory should be specified");
- }
- try {
- Path directoryPath = Paths.get(directory);
- Path schemaFile = directoryPath.resolve("schema.json");
- DataSchema dataSchema = DataSchema.read(schemaFile);
- DataType<DataRow> dataType = new DataTypeImpl(dataSchema);
- Memory<MappedByteBuffer> memory = new
MemoryMappedDirectory(directoryPath);
- DataCollection<DataRow> dataCollection = new
AppendOnlyLog<>(dataType, memory);
- return new DataTableImpl(dataSchema, dataCollection);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
+ try {
+ Memory<MappedByteBuffer> memory = new
MemoryMappedDirectory(Paths.get(directory));
+ ByteBuffer header = memory.header();
+ long size = header.getLong();
+ int length = header.getInt();
+ byte[] bytes = new byte[length];
+ header.get(bytes);
+ DataSchema dataSchema = DataSchema.read(new ByteArrayInputStream(bytes));
+ DataType<DataRow> dataType = new DataTypeImpl(dataSchema);
+ DataCollection<DataRow> dataCollection = new AppendOnlyLog<>(dataType,
memory);
+ return new DataTableImpl(dataSchema, dataCollection);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
}
+ }
- private DataTable createCsvTable(SchemaPlus schema, String name,
Map<String, Object> operand, RelDataType rowType) {
- String file = (String) operand.get("file");
- if (file == null) {
- throw new RuntimeException("A file should be specified");
- }
- try {
- return new CsvDataTable(name, new File(file), true, ',');
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
+ private DataTable createCsvTable(SchemaPlus schema, String name, Map<String,
Object> operand,
+ RelDataType rowType) {
+ String file = (String) operand.get("file");
+ if (file == null) {
+ throw new RuntimeException("A file should be specified");
+ }
+ try {
+ return new CsvDataTable(name, new File(file), ',', true);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
}
+ }
}
diff --git
a/baremaps-calcite/src/main/java/org/apache/baremaps/calcite/BaremapsTable.java
b/baremaps-calcite/src/main/java/org/apache/baremaps/calcite/DataTableAdapter.java
similarity index 95%
rename from
baremaps-calcite/src/main/java/org/apache/baremaps/calcite/BaremapsTable.java
rename to
baremaps-calcite/src/main/java/org/apache/baremaps/calcite/DataTableAdapter.java
index 1d7caf7a4..9957a117b 100644
---
a/baremaps-calcite/src/main/java/org/apache/baremaps/calcite/BaremapsTable.java
+++
b/baremaps-calcite/src/main/java/org/apache/baremaps/calcite/DataTableAdapter.java
@@ -33,7 +33,7 @@ import org.apache.calcite.schema.impl.AbstractTable;
import org.apache.calcite.sql.type.SqlTypeName;
import org.locationtech.jts.geom.*;
-public class BaremapsTable extends AbstractTable implements ScannableTable {
+public class DataTableAdapter extends AbstractTable implements ScannableTable {
private static final EnumMap<ColumnType, RelDataType> types = new
EnumMap<>(ColumnType.class);
@@ -78,12 +78,12 @@ public class BaremapsTable extends AbstractTable implements
ScannableTable {
private RelDataType rowType;
- public BaremapsTable(DataTable table) {
+ public DataTableAdapter(DataTable table) {
this.table = table;
this.protoRowType = null;
}
- public BaremapsTable(DataTable table, RelProtoDataType protoRowType) {
+ public DataTableAdapter(DataTable table, RelProtoDataType protoRowType) {
this.table = table;
this.protoRowType = protoRowType;
}
diff --git
a/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/BaremapsTableFactoryTest.java
b/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/BaremapsTableFactoryTest.java
deleted file mode 100644
index 1598163e7..000000000
---
a/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/BaremapsTableFactoryTest.java
+++ /dev/null
@@ -1,118 +0,0 @@
-package org.apache.baremaps.calcite;
-
-import org.apache.baremaps.data.collection.AppendOnlyLog;
-import org.apache.baremaps.data.memory.MemoryMappedDirectory;
-import org.apache.baremaps.data.store.DataTableImpl;
-import org.apache.baremaps.data.type.DataTypeImpl;
-import org.apache.baremaps.data.util.FileUtils;
-import org.apache.baremaps.store.*;
-import org.apache.baremaps.store.DataColumn.Cardinality;
-import org.apache.baremaps.store.DataColumn.ColumnType;
-import org.junit.jupiter.api.Test;
-import org.locationtech.jts.geom.Coordinate;
-import org.locationtech.jts.geom.GeometryFactory;
-
-import java.io.File;
-import java.io.PrintWriter;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.util.List;
-
-class BaremapsTableFactoryTest {
-
-
- @Test
- public void createCsvTable() throws Exception {
- File file = File.createTempFile("test", ".csv");
- String csv = """
- ID,NAME,GEOM
- 1,Paris,POINT(2.3522 48.8566)
- 2,New York,POINT(-74.0060 40.7128)
- """;
- try (PrintWriter writer = new PrintWriter(file)) {
- writer.write(csv);
- }
- String model = """
- {
- version: '1.0',
- defaultSchema: 'TEST',
- schemas: [
- {
- name: 'TEST',
- tables: [
- {
- name: 'TEST',
- factory:
'org.apache.baremaps.calcite.BaremapsTableFactory',
- operand: {
- format: 'csv',
- file: '%s'
- }
- }
- ]
- }
- ]
- }
- """.formatted(file.getAbsolutePath());
- try (Connection connection =
- DriverManager.getConnection("jdbc:calcite:model=inline:"
+ model)) {
-
- ResultSet resultSet =
connection.createStatement().executeQuery("SELECT * FROM TEST.TEST");
- while (resultSet.next()) {
- System.out.println(resultSet.getString("ID") + " " +
resultSet.getString("GEOM"));
- }
- } finally {
- file.delete();
- }
- }
-
- @Test
- public void createMMapTable() throws Exception {
- Path path = Files.createTempDirectory("temp");
-
- DataSchema dataSchema = new DataSchemaImpl("test", List.of(
- new DataColumnFixed("id", Cardinality.REQUIRED,
ColumnType.INTEGER),
- new DataColumnFixed("name", Cardinality.REQUIRED,
ColumnType.STRING),
- new DataColumnFixed("geom", Cardinality.REQUIRED,
ColumnType.GEOMETRY)
- ));
-
- DataTable dataTable = new DataTableImpl(dataSchema, new
AppendOnlyLog<>(new DataTypeImpl(dataSchema), new MemoryMappedDirectory(path)));
- dataTable.add(new DataRowImpl(dataSchema, List.of(1, "a", new
GeometryFactory().createPoint(new Coordinate(1, 1)))));
- dataTable.add(new DataRowImpl(dataSchema, List.of(2, "b", new
GeometryFactory().createPoint(new Coordinate(2, 2)))));
- dataTable.close();
-
- String model = """
- {
- version: '1.0',
- defaultSchema: 'TEST',
- schemas: [
- {
- name: 'TEST',
- tables: [
- {
- name: 'TEST',
- factory:
'org.apache.baremaps.calcite.BaremapsTableFactory',
- operand: {
- format: 'mmap',
- directory: '%s'
- }
- }
- ]
- }
- ]
- }
- """.formatted(path.toAbsolutePath());
- try (Connection connection =
- DriverManager.getConnection("jdbc:calcite:model=inline:"
+ model)) {
-
- ResultSet resultSet =
connection.createStatement().executeQuery("SELECT * FROM TEST.TEST");
- while (resultSet.next()) {
- System.out.println(resultSet.getString("ID") + " " +
resultSet.getString("GEOM"));
- }
- } finally {
- FileUtils.deleteRecursively(path);
- }
- }
-}
\ No newline at end of file
diff --git
a/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/CalciteTest.java
b/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/CalciteTest.java
index b68d2261c..d96d5ca9b 100644
---
a/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/CalciteTest.java
+++
b/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/CalciteTest.java
@@ -19,7 +19,6 @@ package org.apache.baremaps.calcite;
import java.sql.*;
import java.util.*;
-
import org.apache.baremaps.data.collection.AppendOnlyLog;
import org.apache.baremaps.data.collection.IndexedDataList;
import org.apache.baremaps.data.store.DataTableImpl;
@@ -82,8 +81,8 @@ public class CalciteTest {
cityDataTable.add(new DataRowImpl(cityDataTable.schema(),
List.of(2, "New York", geometryFactory.createPoint(new
Coordinate(-74.0060, 40.7128)))));
- BaremapsTable cityBaremapsTable = new BaremapsTable(cityDataTable);
- rootSchema.add("city", cityBaremapsTable);
+ DataTableAdapter cityDataTableAdapter = new
DataTableAdapter(cityDataTable);
+ rootSchema.add("city", cityDataTableAdapter);
// Create and add 'population' table
DataSchema populationRowType = new DataSchemaImpl("population", List.of(
@@ -97,8 +96,8 @@ public class CalciteTest {
populationDataTable.add(new DataRowImpl(populationDataTable.schema(),
List.of(1, 2_161_000)));
populationDataTable.add(new DataRowImpl(populationDataTable.schema(),
List.of(2, 8_336_000)));
- BaremapsTable populationBaremapsTable = new
BaremapsTable(populationDataTable);
- rootSchema.add("population", populationBaremapsTable);
+ DataTableAdapter populationDataTableAdapter = new
DataTableAdapter(populationDataTable);
+ rootSchema.add("population", populationDataTableAdapter);
// Create view 'city_population'
String mvSql = "SELECT c.id, c.name, c.geometry, p.population " +
@@ -210,8 +209,8 @@ public class CalciteTest {
cityDataTable.add(new DataRowImpl(cityDataTable.schema(),
List.of(2, "New York", geometryFactory.createPoint(new
Coordinate(-74.0060, 40.7128)))));
- BaremapsTable cityBaremapsTable = new BaremapsTable(cityDataTable);
- rootSchema.add("CITY", cityBaremapsTable);
+ DataTableAdapter cityDataTableAdapter = new
DataTableAdapter(cityDataTable);
+ rootSchema.add("CITY", cityDataTableAdapter);
// Configure the framework
FrameworkConfig config = Frameworks.newConfigBuilder()
@@ -244,6 +243,4 @@ public class CalciteTest {
System.out.println("List A: " + listA);
System.out.println("List B (after SQL): " + listB);
}
-
-
}
diff --git
a/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/DataTableAdapterFactoryTest.java
b/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/DataTableAdapterFactoryTest.java
new file mode 100644
index 000000000..69b33e354
--- /dev/null
+++
b/baremaps-calcite/src/test/java/org/apache/baremaps/calcite/DataTableAdapterFactoryTest.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.baremaps.calcite;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintWriter;
+import java.nio.ByteBuffer;
+import java.nio.MappedByteBuffer;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.util.List;
+import org.apache.baremaps.data.collection.AppendOnlyLog;
+import org.apache.baremaps.data.memory.Memory;
+import org.apache.baremaps.data.memory.MemoryMappedDirectory;
+import org.apache.baremaps.data.store.DataTableImpl;
+import org.apache.baremaps.data.type.DataTypeImpl;
+import org.apache.baremaps.data.util.FileUtils;
+import org.apache.baremaps.store.*;
+import org.apache.baremaps.store.DataColumn.Cardinality;
+import org.apache.baremaps.store.DataColumn.ColumnType;
+import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.GeometryFactory;
+
+class DataTableAdapterFactoryTest {
+
+
+ @Test
+ public void createCsvTable() throws Exception {
+ File file = File.createTempFile("test", ".csv");
+ String csv = """
+ ID,NAME,GEOM
+ 1,Paris,POINT(2.3522 48.8566)
+ 2,New York,POINT(-74.0060 40.7128)
+ """;
+ try (PrintWriter writer = new PrintWriter(file)) {
+ writer.write(csv);
+ }
+ String model = """
+ {
+ version: '1.0',
+ defaultSchema: 'TEST',
+ schemas: [
+ {
+ name: 'TEST',
+ tables: [
+ {
+ name: 'TEST',
+ factory:
'org.apache.baremaps.calcite.BaremapsTableFactory',
+ operand: {
+ format: 'csv',
+ file: '%s'
+ }
+ }
+ ]
+ }
+ ]
+ }
+ """.formatted(file.getAbsolutePath());
+ try (Connection connection =
+ DriverManager.getConnection("jdbc:calcite:model=inline:" + model)) {
+
+ ResultSet resultSet = connection.createStatement().executeQuery("SELECT
* FROM TEST.TEST");
+ while (resultSet.next()) {
+ System.out.println(resultSet.getString("ID") + " " +
resultSet.getString("GEOM"));
+ }
+ } finally {
+ file.delete();
+ }
+ }
+
+ @Test
+ public void createMMapTable() throws Exception {
+ Path path = Files.createTempDirectory("temp");
+
+ DataSchema dataSchema = new DataSchemaImpl("test", List.of(
+ new DataColumnFixed("id", Cardinality.REQUIRED, ColumnType.INTEGER),
+ new DataColumnFixed("name", Cardinality.REQUIRED, ColumnType.STRING),
+ new DataColumnFixed("geom", Cardinality.REQUIRED,
ColumnType.GEOMETRY)));
+
+ // Serialize the schema
+ ByteArrayOutputStream output = new ByteArrayOutputStream();
+ DataSchema.write(output, dataSchema);
+ byte[] bytes = output.toByteArray();
+
+ // Write the schema to the header of the memory mapped file
+ Memory<MappedByteBuffer> memory = new MemoryMappedDirectory(path);
+ ByteBuffer header = memory.header();
+ header.position(Long.BYTES);
+ header.putInt(bytes.length);
+ header.put(bytes);
+
+ DataTable dataTable =
+ new DataTableImpl(dataSchema, new AppendOnlyLog<>(new
DataTypeImpl(dataSchema), memory));
+ dataTable.add(new DataRowImpl(dataSchema,
+ List.of(1, "a", new GeometryFactory().createPoint(new Coordinate(1,
1)))));
+ dataTable.add(new DataRowImpl(dataSchema,
+ List.of(2, "b", new GeometryFactory().createPoint(new Coordinate(2,
2)))));
+
+ dataTable.close();
+
+
+ String model = """
+ {
+ version: '1.0',
+ defaultSchema: 'TEST',
+ schemas: [
+ {
+ name: 'TEST',
+ tables: [
+ {
+ name: 'TEST',
+ factory:
'org.apache.baremaps.calcite.BaremapsTableFactory',
+ operand: {
+ format: 'mmap',
+ directory: '%s'
+ }
+ }
+ ]
+ }
+ ]
+ }
+ """.formatted(path.toAbsolutePath());
+ try (Connection connection =
+ DriverManager.getConnection("jdbc:calcite:model=inline:" + model)) {
+
+ ResultSet resultSet = connection.createStatement().executeQuery("SELECT
* FROM TEST.TEST");
+ while (resultSet.next()) {
+ System.out.println(resultSet.getString("ID") + " " +
resultSet.getString("GEOM"));
+ }
+ } finally {
+ FileUtils.deleteRecursively(path);
+ }
+ }
+}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/utils/FileUtils.java
b/baremaps-core/src/main/java/org/apache/baremaps/utils/FileUtils.java
deleted file mode 100644
index c98af3fb6..000000000
--- a/baremaps-core/src/main/java/org/apache/baremaps/utils/FileUtils.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.baremaps.utils;
-
-
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Comparator;
-import java.util.stream.Stream;
-
-public class FileUtils {
-
- public static void deleteRecursively(Path path) throws IOException {
- try (Stream<Path> files = Files.walk(path)) {
-
files.sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete);
- }
- }
-}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowContext.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowContext.java
index bf9c03c10..073c42000 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowContext.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowContext.java
@@ -29,8 +29,8 @@ import javax.sql.DataSource;
import org.apache.baremaps.data.collection.*;
import org.apache.baremaps.data.memory.MemoryMappedDirectory;
import org.apache.baremaps.data.type.*;
+import org.apache.baremaps.data.util.FileUtils;
import org.apache.baremaps.postgres.utils.PostgresUtils;
-import org.apache.baremaps.utils.FileUtils;
import org.locationtech.jts.geom.Coordinate;
/**
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/integration/GeoPackageToPostgresTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/integration/GeoPackageToPostgresTest.java
index 5ace46975..aea373c8a 100644
---
a/baremaps-core/src/test/java/org/apache/baremaps/integration/GeoPackageToPostgresTest.java
+++
b/baremaps-core/src/test/java/org/apache/baremaps/integration/GeoPackageToPostgresTest.java
@@ -42,8 +42,8 @@ class GeoPackageToPostgresTest extends PostgresContainerTest {
// Check the table in Postgres
var postgresTable = postgresStore.get("countries");
- assertEquals("countries", postgresTable.schema().name());
- assertEquals(4, postgresTable.schema().columns().size());
+ assertEquals("countries", postgresTable.schema().getName());
+ assertEquals(4, postgresTable.schema().getColumns().size());
assertEquals(179l, postgresTable.size());
assertEquals(179l, postgresTable.stream().count());
}
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/integration/GeoParquetToPostgresTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/integration/GeoParquetToPostgresTest.java
index 3e4f1588d..1d847cb26 100644
---
a/baremaps-core/src/test/java/org/apache/baremaps/integration/GeoParquetToPostgresTest.java
+++
b/baremaps-core/src/test/java/org/apache/baremaps/integration/GeoParquetToPostgresTest.java
@@ -44,8 +44,8 @@ class GeoParquetToPostgresTest extends PostgresContainerTest {
// Check the table in Postgres
var postgresTable = postgresStore.get("geoparquet");
- assertEquals("geoparquet", postgresTable.schema().name());
- assertEquals(7, postgresTable.schema().columns().size());
+ assertEquals("geoparquet", postgresTable.schema().getName());
+ assertEquals(7, postgresTable.schema().getColumns().size());
assertEquals(5L, postgresTable.size());
assertEquals(5L, postgresTable.stream().count());
}
diff --git
a/baremaps-csv/src/main/java/org/apache/baremaps/csv/CsvDataStore.java
b/baremaps-csv/src/main/java/org/apache/baremaps/csv/CsvDataStore.java
index 9bfc41aa2..355b74f4e 100644
--- a/baremaps-csv/src/main/java/org/apache/baremaps/csv/CsvDataStore.java
+++ b/baremaps-csv/src/main/java/org/apache/baremaps/csv/CsvDataStore.java
@@ -49,7 +49,7 @@ public class CsvDataStore implements DataStore {
char separator) throws IOException {
this.tableName = tableName;
this.schema = schema;
- this.dataTable = new CsvDataTable(tableName, csvFile, header, separator);
+ this.dataTable = new CsvDataTable(tableName, csvFile, separator, header);
}
/**
diff --git
a/baremaps-csv/src/main/java/org/apache/baremaps/csv/CsvDataTable.java
b/baremaps-csv/src/main/java/org/apache/baremaps/csv/CsvDataTable.java
index 03283ec8a..6ed88cc9e 100644
--- a/baremaps-csv/src/main/java/org/apache/baremaps/csv/CsvDataTable.java
+++ b/baremaps-csv/src/main/java/org/apache/baremaps/csv/CsvDataTable.java
@@ -17,162 +17,229 @@
package org.apache.baremaps.csv;
-import com.fasterxml.jackson.core.FormatSchema;
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.MappingIterator;
import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
import org.apache.baremaps.store.*;
import org.apache.baremaps.store.DataColumn.Cardinality;
import org.apache.baremaps.store.DataColumn.ColumnType;
import org.locationtech.jts.io.WKTReader;
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-import java.util.stream.Stream;
-import java.util.stream.StreamSupport;
-
-/**
- * A DataTable implementation that reads data from a CSV file using Jackson.
- */
public class CsvDataTable implements DataTable {
- private final File file;
- private final CsvSchema csvSchema;
- private final DataSchema dataSchema;
-
- private final long size;
- private MappingIterator<Map<String, String>> csvIterator;
-
- /**
- * Constructs a CsvDataTable with the specified schema, CSV file, header
presence, and separator.
- *
- * @param name the name of the table
- * @param csvFile the CSV file to read data from
- * @param header whether the CSV file includes a header row
- * @param separator the separator used in the CSV file
- * @throws IOException if an I/O error occurs
- */
- public CsvDataTable(String name, File csvFile, boolean header, char
separator) throws IOException {
- this.file = csvFile;
-
- // Iterate over all records to infer the csv schema and calculate the
size of the table
- CsvSchema csvSchema = CsvSchema.emptySchema()
- .withUseHeader(header)
- .withColumnSeparator(separator);
-
- try (MappingIterator<Object> iterator = new CsvMapper()
- .readerFor(Object.class)
- .with(csvSchema)
- .readValues(csvFile)) {
-
- int count = 0;
- while (iterator.hasNext()) {
- iterator.next();
- count++;
- }
+ private final File file;
+ private final CsvSchema csvSchema;
+ private final DataSchema dataSchema;
+ private final long size;
+ private final boolean hasHeader;
- this.csvSchema = (CsvSchema) iterator.getParserSchema();
- this.size = count;
- } catch (IOException e) {
- throw new DataStoreException("Error reading CSV file", e);
- }
+ /**
+ * Constructor that infers the CSV file’s columns.
+ * <p>
+ * This constructor accepts a {@code hasHeader} parameter. If {@code
hasHeader} is true, the
+ * file’s header is used to pick column names; if false, the first row is
used to determine the
+ * number of columns, and generic names ("column1", "column2", etc.) are
generated.
+ *
+ * @param name the name of the table
+ * @param csvFile the CSV file to read data from
+ * @param separator the separator used in the CSV file
+ * @param hasHeader whether the CSV file includes a header row
+ * @throws IOException if an I/O error occurs
+ */
+ public CsvDataTable(String name, File csvFile, char separator, boolean
hasHeader)
+ throws IOException {
+ this(name, csvFile, separator, inferColumns(csvFile, separator,
hasHeader), hasHeader);
+ }
- // Map the csv schema to a data schema
- List<DataColumn> columns = new ArrayList<>();
- for (String columnName : this.csvSchema.getColumnNames()) {
- switch (this.csvSchema.column(columnName).getType()) {
- case STRING -> columns
- .add(new DataColumnFixed(columnName,
Cardinality.REQUIRED, ColumnType.STRING));
- case NUMBER -> columns
- .add(new DataColumnFixed(columnName,
Cardinality.REQUIRED, ColumnType.DOUBLE));
- case BOOLEAN -> columns
- .add(new DataColumnFixed(columnName,
Cardinality.REQUIRED, ColumnType.BOOLEAN));
- case ARRAY -> columns
- .add(new DataColumnFixed(columnName,
Cardinality.REPEATED, ColumnType.STRING));
- default -> throw new IllegalArgumentException(
- "Unsupported column type: " +
csvSchema.column(columnName).getType());
- }
+ /**
+ * Constructor that uses the provided column definitions.
+ *
+ * @param name the name of the table
+ * @param csvFile the CSV file to read data from
+ * @param separator the separator used in the CSV file
+ * @param columns the list of columns (data types, names, etc.)
+ * @param hasHeader indicates whether the CSV file has a header row
+ * @throws IOException if an I/O error occurs
+ */
+ public CsvDataTable(String name, File csvFile, char separator,
List<DataColumn> columns,
+ boolean hasHeader)
+ throws IOException {
+ this.file = csvFile;
+ this.hasHeader = hasHeader;
+ CsvMapper mapper = new CsvMapper();
+ CsvSchema schema = buildSchema(columns, separator, hasHeader);
+
+ int rowCount = 0;
+ try (MappingIterator<Map<String, String>> iterator = mapper
+ .readerFor(Map.class)
+ .with(schema)
+ .readValues(file)) {
+ while (iterator.hasNext()) {
+ iterator.next();
+ rowCount++;
+ }
+ // If a header is present, update the schema from the parser (if
available).
+ if (hasHeader) {
+ CsvSchema inferred = (CsvSchema) iterator.getParserSchema();
+ if (inferred != null) {
+ schema = inferred;
}
- this.dataSchema = new DataSchemaImpl(name, columns);
+ }
+ } catch (IOException e) {
+ throw new DataStoreException("Error reading CSV file", e);
}
+ this.size = rowCount;
+ this.csvSchema = schema;
+ this.dataSchema = new DataSchemaImpl(name, columns);
+ }
- @Override
- public DataSchema schema() {
- return dataSchema;
+ /**
+ * Builds a CSV schema from the given column definitions.
+ *
+ * @param columns the list of columns
+ * @param separator the column separator
+ * @param hasHeader whether the CSV file has a header row
+ * @return the CSV schema
+ */
+ private static CsvSchema buildSchema(List<DataColumn> columns, char
separator,
+ boolean hasHeader) {
+ if (hasHeader) {
+ return
CsvSchema.emptySchema().withHeader().withColumnSeparator(separator);
+ } else {
+ CsvSchema.Builder builder = CsvSchema.builder();
+ for (DataColumn col : columns) {
+ builder.addColumn(col.name());
+ }
+ return builder.setColumnSeparator(separator).build();
}
+ }
- @Override
- public boolean add(DataRow row) {
- throw new UnsupportedOperationException("Adding rows is not
supported.");
+ /**
+ * Infers columns from the CSV file.
+ * <p>
+ * If {@code hasHeader} is true, this method reads the header row to extract
column names.
+ * Otherwise, it reads the first row to determine the number of columns and
generates names.
+ *
+ * @param csvFile the CSV file
+ * @param separator the column separator
+ * @param hasHeader whether the CSV file includes a header row
+ * @return a list of columns (all of type STRING)
+ * @throws IOException if an I/O error occurs
+ */
+ private static List<DataColumn> inferColumns(File csvFile, char separator,
boolean hasHeader)
+ throws IOException {
+ CsvMapper mapper = new CsvMapper();
+ List<DataColumn> columns = new ArrayList<>();
+ if (hasHeader) {
+ // Read the header to infer column names.
+ CsvSchema baseSchema =
CsvSchema.emptySchema().withHeader().withColumnSeparator(separator);
+ try (MappingIterator<Map<String, String>> iterator =
mapper.readerFor(Map.class)
+ .with(baseSchema)
+ .readValues(csvFile)) {
+ // Consume one record so that the header is processed.
+ if (iterator.hasNext()) {
+ iterator.next();
+ }
+ CsvSchema inferred = (CsvSchema) iterator.getParserSchema();
+ if (inferred == null) {
+ throw new DataStoreException("Failed to infer CSV schema from
header.");
+ }
+ for (String colName : inferred.getColumnNames()) {
+ columns.add(new DataColumnFixed(colName, Cardinality.OPTIONAL,
ColumnType.STRING));
+ }
+ }
+ } else {
+ // No header: read the first row to determine the number of columns.
+ CsvSchema baseSchema =
CsvSchema.emptySchema().withColumnSeparator(separator);
+ try (MappingIterator<List<String>> iterator =
mapper.readerFor(List.class)
+ .with(baseSchema)
+ .readValues(csvFile)) {
+ if (iterator.hasNext()) {
+ List<String> firstRow = iterator.next();
+ int numColumns = firstRow.size();
+ for (int i = 0; i < numColumns; i++) {
+ columns.add(
+ new DataColumnFixed("column" + (i + 1), Cardinality.OPTIONAL,
ColumnType.STRING));
+ }
+ } else {
+ throw new DataStoreException("CSV file is empty; cannot infer column
names.");
+ }
+ }
}
+ return columns;
+ }
- @Override
- public void clear() {
- throw new UnsupportedOperationException("Clearing rows is not
supported.");
- }
+ @Override
+ public DataSchema schema() {
+ return dataSchema;
+ }
- @Override
- public long size() {
- return size;
- }
+ @Override
+ public boolean add(DataRow row) {
+ throw new UnsupportedOperationException("Adding rows is not supported.");
+ }
- @Override
- public Iterator<DataRow> iterator() {
- try {
+ @Override
+ public void clear() {
+ throw new UnsupportedOperationException("Clearing rows is not supported.");
+ }
- csvIterator = new CsvMapper()
- .readerFor(Map.class)
- .with(csvSchema)
- .readValues(file);
+ @Override
+ public long size() {
+ return size;
+ }
- return new Iterator<>() {
- @Override
- public boolean hasNext() {
- return csvIterator.hasNext();
- }
+ @Override
+ public java.util.Iterator<DataRow> iterator() {
+ try {
+ CsvMapper mapper = new CsvMapper();
+ MappingIterator<Map<String, String>> localIterator =
mapper.readerFor(Map.class)
+ .with(csvSchema)
+ .readValues(file);
- @Override
- public DataRow next() {
- Map<String, String> csvRow = csvIterator.next();
- DataRow dataRow = dataSchema.createRow();
-
- for (int i = 0; i < dataSchema.columns().size(); i++) {
- DataColumn column = dataSchema.columns().get(i);
- String columnName = column.name();
- String value = csvRow.get(columnName);
-
- if (value != null) {
- Object parsedValue = parseValue(column, value);
- dataRow.set(i, parsedValue);
- } else {
- dataRow.set(i, null);
- }
- }
- return dataRow;
- }
- };
+ return new java.util.Iterator<>() {
+ @Override
+ public boolean hasNext() {
+ boolean has = localIterator.hasNext();
+ if (!has) {
+ try {
+ localIterator.close();
+ } catch (IOException e) {
+ throw new DataStoreException("Error closing CSV iterator", e);
+ }
+ }
+ return has;
+ }
- } catch (IOException e) {
- throw new DataStoreException("Error reading CSV file", e);
+ @Override
+ public DataRow next() {
+ if (!hasNext()) {
+ throw new java.util.NoSuchElementException();
+ }
+ Map<String, String> csvRow = localIterator.next();
+ DataRow dataRow = dataSchema.createRow();
+ for (int i = 0; i < dataSchema.columns().size(); i++) {
+ DataColumn column = dataSchema.columns().get(i);
+ String value = csvRow.get(column.name());
+ dataRow.set(i, (value == null || value.isEmpty()) ? null :
parseValue(column, value));
+ }
+ return dataRow;
}
+ };
+
+ } catch (IOException e) {
+ throw new DataStoreException("Error reading CSV file", e);
}
+ }
- /**
- * Parses the string value from the CSV according to the column type.
- *
- * @param column the data column
- * @param value the string value from the CSV
- * @return the parsed value
- */
- private Object parseValue(DataColumn column, String value) {
+ private static Object parseValue(DataColumn column, String value) {
ColumnType type = column.type();
try {
- if (value == null || value.isEmpty()) {
- return null;
- }
return switch (type) {
case STRING -> value;
case INTEGER -> Integer.parseInt(value);
@@ -180,8 +247,7 @@ public class CsvDataTable implements DataTable {
case FLOAT -> Float.parseFloat(value);
case DOUBLE -> Double.parseDouble(value);
case BOOLEAN -> Boolean.parseBoolean(value);
- case GEOMETRY, POINT, LINESTRING, POLYGON, MULTIPOINT,
MULTILINESTRING, MULTIPOLYGON,
- GEOMETRYCOLLECTION -> {
+ case GEOMETRY, POINT, LINESTRING, POLYGON, MULTIPOINT,
MULTILINESTRING, MULTIPOLYGON, GEOMETRYCOLLECTION -> {
WKTReader reader = new WKTReader();
yield reader.read(value);
}
@@ -192,20 +258,18 @@ public class CsvDataTable implements DataTable {
}
}
- @Override
- public Spliterator<DataRow> spliterator() {
- return Spliterators.spliteratorUnknownSize(iterator(),
Spliterator.ORDERED);
- }
+ @Override
+ public java.util.Spliterator<DataRow> spliterator() {
+ return java.util.Spliterators.spliteratorUnknownSize(iterator(),
java.util.Spliterator.ORDERED);
+ }
- @Override
- public Stream<DataRow> stream() {
- return StreamSupport.stream(spliterator(), false);
- }
+ @Override
+ public java.util.stream.Stream<DataRow> stream() {
+ return java.util.stream.StreamSupport.stream(spliterator(), false);
+ }
- @Override
- public void close() throws IOException {
- if (csvIterator != null) {
- csvIterator.close();
- }
- }
+ @Override
+ public void close() throws IOException {
+ // No persistent resources to close.
+ }
}
diff --git
a/baremaps-csv/src/test/java/org/apache/baremaps/csv/CsvDataTableGeonamesTest.java
b/baremaps-csv/src/test/java/org/apache/baremaps/csv/CsvDataTableGeonamesTest.java
index 71776b724..d46dd7728 100644
---
a/baremaps-csv/src/test/java/org/apache/baremaps/csv/CsvDataTableGeonamesTest.java
+++
b/baremaps-csv/src/test/java/org/apache/baremaps/csv/CsvDataTableGeonamesTest.java
@@ -29,7 +29,7 @@ class CsvDataTableGeonamesTest {
@Test
void testGeonamesCsvDataTable() throws IOException {
- DataTable dataTable = new CsvDataTable("sample", GEONAMES_CSV.toFile(),
false, '\t');
+ DataTable dataTable = new CsvDataTable("sample", GEONAMES_CSV.toFile(),
'\t', false);
assertEquals(5, dataTable.size(), "DataTable should have 5 rows.");
diff --git
a/baremaps-csv/src/test/java/org/apache/baremaps/csv/CsvDataTableTest.java
b/baremaps-csv/src/test/java/org/apache/baremaps/csv/CsvDataTableTest.java
index bb67934df..970decb5e 100644
--- a/baremaps-csv/src/test/java/org/apache/baremaps/csv/CsvDataTableTest.java
+++ b/baremaps-csv/src/test/java/org/apache/baremaps/csv/CsvDataTableTest.java
@@ -58,7 +58,12 @@ class CsvDataTableTest {
2,PointB,"POINT(2 2)"
""";
Files.writeString(tempCsvFile.toPath(), csvContent);
- DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, true,
',');
+ // Provide an explicit schema so that the id and geom fields are parsed
correctly.
+ List<DataColumn> columns = List.of(
+ new DataColumnFixed("id", Cardinality.REQUIRED, ColumnType.INTEGER),
+ new DataColumnFixed("name", Cardinality.OPTIONAL, ColumnType.STRING),
+ new DataColumnFixed("geom", Cardinality.OPTIONAL,
ColumnType.GEOMETRY));
+ DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, ',',
columns, true);
assertEquals(2, dataTable.size());
int rowCount = 0;
for (DataRow row : dataTable) {
@@ -86,7 +91,7 @@ class CsvDataTableTest {
new DataColumnFixed("column1", Cardinality.REQUIRED,
ColumnType.INTEGER),
new DataColumnFixed("column2", Cardinality.OPTIONAL,
ColumnType.STRING),
new DataColumnFixed("column3", Cardinality.OPTIONAL,
ColumnType.GEOMETRY));
- DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, false,
';');
+ DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, ';',
columns, false);
assertEquals(2, dataTable.size());
int rowCount = 0;
for (DataRow row : dataTable) {
@@ -119,7 +124,7 @@ class CsvDataTableTest {
new DataColumnFixed("double_col", Cardinality.REQUIRED,
ColumnType.DOUBLE),
new DataColumnFixed("bool_col", Cardinality.REQUIRED,
ColumnType.BOOLEAN),
new DataColumnFixed("string_col", Cardinality.REQUIRED,
ColumnType.STRING));
- DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, true,
',');
+ DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, ',',
columns, true);
assertEquals(2, dataTable.size());
int rowCount = 0;
for (DataRow row : dataTable) {
@@ -148,10 +153,11 @@ class CsvDataTableTest {
List<DataColumn> columns = List.of(
new DataColumnFixed("id", Cardinality.REQUIRED, ColumnType.INTEGER),
new DataColumnFixed("name", Cardinality.OPTIONAL, ColumnType.STRING));
- DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, true,
',');
+ // Pass the explicit columns so that "id" is attempted to be parsed as an
integer.
+ DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, ',',
columns, true);
assertThrows(RuntimeException.class, () -> {
for (DataRow row : dataTable) {
- // This line should throw an exception because abc is not a valid
integer
+ // This line should throw an exception because "abc" is not a valid
integer.
row.values();
}
});
@@ -159,9 +165,10 @@ class CsvDataTableTest {
@Test
void testAddAndClearUnsupportedOperations() throws IOException {
- String csvContent = "";
+ // When using header=true the CSV file must contain at least a header row.
+ String csvContent = "col1\n";
Files.writeString(tempCsvFile.toPath(), csvContent);
- DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, true,
',');
+ DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, ',',
true);
assertThrows(UnsupportedOperationException.class, () ->
dataTable.add(null));
assertThrows(UnsupportedOperationException.class, dataTable::clear);
}
@@ -175,7 +182,8 @@ class CsvDataTableTest {
3,Name3
""";
Files.writeString(tempCsvFile.toPath(), csvContent);
- DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, true,
',');
+ // In this test we rely on the inferred (STRING) schema.
+ DataTable dataTable = new CsvDataTable("test_table", tempCsvFile, ',',
true);
assertEquals(3, dataTable.size());
}
}
diff --git
a/baremaps-data/src/main/java/org/apache/baremaps/data/memory/MemoryMappedDirectory.java
b/baremaps-data/src/main/java/org/apache/baremaps/data/memory/MemoryMappedDirectory.java
index 1b9a18dfa..1c68017a7 100644
---
a/baremaps-data/src/main/java/org/apache/baremaps/data/memory/MemoryMappedDirectory.java
+++
b/baremaps-data/src/main/java/org/apache/baremaps/data/memory/MemoryMappedDirectory.java
@@ -50,10 +50,10 @@ public class MemoryMappedDirectory extends
Memory<MappedByteBuffer> {
* Constructs an {@link MemoryMappedDirectory} with a custom directory and a
custom segment size.
*
* @param directory the directory that stores the data
- * @param segmentBytes the size of the segments in bytes
+ * @param segmentSize the size of the segments in bytes
*/
- public MemoryMappedDirectory(Path directory, int segmentBytes) {
- super(1024, segmentBytes);
+ public MemoryMappedDirectory(Path directory, int segmentSize) {
+ super(1 << 14, segmentSize);
this.directory = directory;
}
@@ -62,8 +62,8 @@ public class MemoryMappedDirectory extends
Memory<MappedByteBuffer> {
try {
Path file = directory.resolve("header");
try (FileChannel channel = FileChannel.open(file,
StandardOpenOption.CREATE,
- StandardOpenOption.READ, StandardOpenOption.WRITE)) {
- return channel.map(MapMode.READ_WRITE, 0, 1024);
+ StandardOpenOption.READ, StandardOpenOption.WRITE)) {
+ return channel.map(MapMode.READ_WRITE, 0, headerSize());
}
} catch (IOException e) {
throw new MemoryException(e);
diff --git
a/baremaps-data/src/main/java/org/apache/baremaps/data/memory/MemoryMappedFile.java
b/baremaps-data/src/main/java/org/apache/baremaps/data/memory/MemoryMappedFile.java
index 7c2de6b8b..30b22b4fd 100644
---
a/baremaps-data/src/main/java/org/apache/baremaps/data/memory/MemoryMappedFile.java
+++
b/baremaps-data/src/main/java/org/apache/baremaps/data/memory/MemoryMappedFile.java
@@ -57,7 +57,7 @@ public class MemoryMappedFile extends
Memory<MappedByteBuffer> {
protected MappedByteBuffer allocateHeader() {
try {
try (FileChannel channel = FileChannel.open(file,
StandardOpenOption.CREATE,
- StandardOpenOption.READ, StandardOpenOption.WRITE)) {
+ StandardOpenOption.READ, StandardOpenOption.WRITE)) {
return channel.map(MapMode.READ_WRITE, 0, headerSize());
}
} catch (IOException e) {
diff --git
a/baremaps-data/src/main/java/org/apache/baremaps/data/store/DataStoreImpl.java
b/baremaps-data/src/main/java/org/apache/baremaps/data/store/DataStoreImpl.java
index c866b1b41..a8a56b6dd 100644
---
a/baremaps-data/src/main/java/org/apache/baremaps/data/store/DataStoreImpl.java
+++
b/baremaps-data/src/main/java/org/apache/baremaps/data/store/DataStoreImpl.java
@@ -29,7 +29,8 @@ public class DataStoreImpl implements DataStore {
public DataStoreImpl(List<DataTable> tables) {
this.tables =
- tables.stream().collect(Collectors.toMap(table ->
table.schema().name(), table -> table));
+ tables.stream()
+ .collect(Collectors.toMap(table -> table.schema().name(), table ->
table));
}
@Override
diff --git
a/baremaps-data/src/main/java/org/apache/baremaps/data/util/FileUtils.java
b/baremaps-data/src/main/java/org/apache/baremaps/data/util/FileUtils.java
index 8fc7469fe..35f0d68c1 100644
--- a/baremaps-data/src/main/java/org/apache/baremaps/data/util/FileUtils.java
+++ b/baremaps-data/src/main/java/org/apache/baremaps/data/util/FileUtils.java
@@ -21,6 +21,7 @@ package org.apache.baremaps.data.util;
import java.io.File;
import java.io.IOException;
+import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Comparator;
@@ -32,6 +33,10 @@ public class FileUtils {
// Prevent instantiation
}
+ public static ByteBuffer read(Path path) throws IOException {
+ return ByteBuffer.wrap(Files.readAllBytes(path));
+ }
+
public static void deleteRecursively(Path path) throws IOException {
try (Stream<Path> files = Files.walk(path)) {
files.sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete);
diff --git
a/baremaps-geopackage/src/test/java/org/apache/baremaps/geopackage/GeoPackageDataStoreTest.java
b/baremaps-geopackage/src/test/java/org/apache/baremaps/geopackage/GeoPackageDataStoreTest.java
index db27ebd1c..8b9ef47ac 100644
---
a/baremaps-geopackage/src/test/java/org/apache/baremaps/geopackage/GeoPackageDataStoreTest.java
+++
b/baremaps-geopackage/src/test/java/org/apache/baremaps/geopackage/GeoPackageDataStoreTest.java
@@ -30,8 +30,8 @@ class GeoPackageDataStoreTest {
var geoPackageStore = new GeoPackageDataStore(file);
var table = geoPackageStore.get("countries");
var rowType = table.schema();
- assertEquals("countries", rowType.name());
- assertEquals(4, rowType.columns().size());
+ assertEquals("countries", rowType.getName());
+ assertEquals(4, rowType.getColumns().size());
}
@Test
diff --git
a/baremaps-openstreetmap/src/test/java/org/apache/baremaps/openstreetmap/OpenStreetMapDataTableTest.java
b/baremaps-openstreetmap/src/test/java/org/apache/baremaps/openstreetmap/OpenStreetMapDataTableTest.java
index cee8ade94..35deb29db 100644
---
a/baremaps-openstreetmap/src/test/java/org/apache/baremaps/openstreetmap/OpenStreetMapDataTableTest.java
+++
b/baremaps-openstreetmap/src/test/java/org/apache/baremaps/openstreetmap/OpenStreetMapDataTableTest.java
@@ -33,8 +33,8 @@ class OpenStreetMapDataTableTest {
try (var inputStream = Files.newInputStream(uri)) {
var table = new OpenStreetMapDataTable(new PbfEntityReader(),
inputStream);
var rowType = table.schema();
- assertEquals(rowType.name(), "osm_data");
- assertEquals(9, rowType.columns().size());
+ assertEquals(rowType.getName(), "osm_data");
+ assertEquals(9, rowType.getColumns().size());
}
}
diff --git
a/baremaps-postgres/src/test/java/org/apache/baremaps/postgres/store/PostgresDataTableTest.java
b/baremaps-postgres/src/test/java/org/apache/baremaps/postgres/store/PostgresDataTableTest.java
index defc7348f..716c57c9b 100644
---
a/baremaps-postgres/src/test/java/org/apache/baremaps/postgres/store/PostgresDataTableTest.java
+++
b/baremaps-postgres/src/test/java/org/apache/baremaps/postgres/store/PostgresDataTableTest.java
@@ -61,8 +61,8 @@ class PostgresDataTableTest extends PostgresContainerTest {
var table = schema.get("mock");
var rowType = table.schema();
assertNotNull(rowType);
- assertEquals("mock", rowType.name());
- assertEquals(5, rowType.columns().size());
+ assertEquals("mock", rowType.getName());
+ assertEquals(5, rowType.getColumns().size());
}
@Test
diff --git
a/baremaps-rpsl/src/test/java/org/apache/baremaps/rpsl/RsplDataTableTest.java
b/baremaps-rpsl/src/test/java/org/apache/baremaps/rpsl/RsplDataTableTest.java
index 597114935..2e800a087 100644
---
a/baremaps-rpsl/src/test/java/org/apache/baremaps/rpsl/RsplDataTableTest.java
+++
b/baremaps-rpsl/src/test/java/org/apache/baremaps/rpsl/RsplDataTableTest.java
@@ -42,8 +42,8 @@ class RpslDataTableTest {
@Test
void schema() {
DataSchema schema = dataTable.schema();
- assertEquals("RpslObject", schema.name());
- List<DataColumn> columns = schema.columns();
+ assertEquals("RpslObject", schema.getName());
+ List<DataColumn> columns = schema.getColumns();
assertTrue(columns.stream().anyMatch(c -> c.name().equals("type")));
assertTrue(columns.stream().anyMatch(c -> c.name().equals("id")));
assertTrue(columns.stream().anyMatch(c -> c.name().equals("inetnum")));
diff --git
a/baremaps-store/src/main/java/org/apache/baremaps/store/DataSchema.java
b/baremaps-store/src/main/java/org/apache/baremaps/store/DataSchema.java
index a2dcfecd8..eceed6c38 100644
--- a/baremaps-store/src/main/java/org/apache/baremaps/store/DataSchema.java
+++ b/baremaps-store/src/main/java/org/apache/baremaps/store/DataSchema.java
@@ -26,10 +26,8 @@ import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.node.ObjectNode;
import java.io.IOException;
-import java.io.Serializable;
-import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Path;
+import java.io.InputStream;
+import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.baremaps.store.DataColumn.Cardinality;
@@ -102,19 +100,14 @@ public interface DataSchema {
return mapper;
}
- static DataSchema read(ByteBuffer buffer) throws IOException {
+ static DataSchema read(InputStream inputStream) throws IOException {
var mapper = configureObjectMapper();
- int length = buffer.getInt();
- byte[] bytes = new byte[length];
- buffer.get(bytes);
- return mapper.readValue(bytes, DataSchema.class);
+ return mapper.readValue(inputStream, DataSchema.class);
}
- static void write(ByteBuffer buffer, DataSchema schema) throws IOException {
+ static void write(OutputStream outputStream, DataSchema schema) throws
IOException {
var mapper = configureObjectMapper();
- var bytes =
mapper.writerWithDefaultPrettyPrinter().writeValueAsBytes(schema);
- buffer.putInt(bytes.length);
- buffer.put(bytes);
+ mapper.writeValue(outputStream, schema);
}
}