This is an automated email from the ASF dual-hosted git repository.
bchapuis pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-baremaps.git
The following commit(s) were added to refs/heads/main by this push:
new 7b97e5d2 Improve workflow package (#802)
7b97e5d2 is described below
commit 7b97e5d25bf8d9a48ff5dece4bd44e2d25803990
Author: Bertil Chapuis <[email protected]>
AuthorDate: Wed Dec 13 14:10:24 2023 +0100
Improve workflow package (#802)
* Replace task records by classes
* Make task argument names more uniform (source, target)
* Introduce a single decompression task
* Remove unused code and tasks
* Initialize lists in the context of the workflow
* Improve javadoc
---
.../apache/baremaps/cli/database/ImportOsm.java | 2 -
.../apache/baremaps/cli/database/UpdateOsm.java | 6 +-
.../java/org/apache/baremaps/cli/map/Export.java | 10 +-
.../org/apache/baremaps/cli/workflow/Execute.java | 2 +-
.../org/apache/baremaps/utils/GeometryUtils.java | 10 +-
.../java/org/apache/baremaps/workflow/Task.java | 40 +++----
.../apache/baremaps/workflow/WorkflowContext.java | 65 +++++++++++
.../apache/baremaps/workflow/WorkflowExecutor.java | 33 +++---
.../{LogMessage.java => CleanContextCache.java} | 17 ++-
.../{LogMessage.java => CleanContextData.java} | 14 ++-
.../tasks/CreateGeocoderOpenStreetMap.java | 72 ++++--------
.../workflow/tasks/CreateGeonamesIndex.java | 26 ++++-
.../baremaps/workflow/tasks/CreateIplocIndex.java | 32 +++++-
.../baremaps/workflow/tasks/DecompressBZip2.java | 46 --------
.../baremaps/workflow/tasks/DecompressFile.java | 125 +++++++++++++++------
.../baremaps/workflow/tasks/DownloadUrl.java | 49 ++++++--
.../baremaps/workflow/tasks/ExecuteCommand.java | 27 ++++-
.../apache/baremaps/workflow/tasks/ExecuteSql.java | 34 +++++-
.../baremaps/workflow/tasks/ExecuteSqlScript.java | 34 +++++-
.../baremaps/workflow/tasks/ExportVectorTiles.java | 39 +++++--
.../workflow/tasks/ImportDaylightFeatures.java | 43 +++++--
.../workflow/tasks/ImportDaylightTranslations.java | 34 +++++-
.../baremaps/workflow/tasks/ImportGeoPackage.java | 39 ++++++-
.../baremaps/workflow/tasks/ImportOsmOsc.java | 91 +++++++--------
.../baremaps/workflow/tasks/ImportOsmPbf.java | 113 ++++++++++---------
.../baremaps/workflow/tasks/ImportShapefile.java | 39 ++++++-
.../apache/baremaps/workflow/tasks/LogMessage.java | 26 ++++-
.../apache/baremaps/workflow/tasks/UngzipFile.java | 51 ---------
.../apache/baremaps/workflow/tasks/UnzipFile.java | 66 -----------
.../baremaps/workflow/tasks/UpdateOsmDatabase.java | 51 ++++++++-
.../apache/baremaps/workflow/ObjectMapperTest.java | 4 +-
.../org/apache/baremaps/workflow/WorkflowTest.java | 62 +++++-----
.../baremaps/workflow/tasks/DownloadUrlTest.java | 6 +-
.../workflow/tasks/ImportGeoPackageTest.java | 2 +-
.../{ImportPbfTest.java => ImportOsmPbfTest.java} | 2 +-
.../workflow/tasks/ImportShapefileTest.java | 17 +--
.../baremaps/workflow/tasks/UngzipFileTest.java | 40 -------
.../baremaps/workflow/tasks/UnzipFileTest.java | 40 -------
baremaps-core/src/test/resources/workflow.json | 4 +-
basemap/import.js | 64 +++--------
daylight/workflow.js | 62 +++++-----
examples/extrusion/workflow.json | 4 +-
examples/geocoding/workflow.js | 4 +-
examples/ip-to-location/workflow.js | 22 ++--
examples/naturalearth/workflow.json | 15 +--
examples/openstreetmap/workflow.json | 4 +-
examples/shadedrelief/workflow.json | 11 +-
pom.xml | 11 ++
scripts/test-basemap.sh | 2 +-
49 files changed, 910 insertions(+), 702 deletions(-)
diff --git
a/baremaps-cli/src/main/java/org/apache/baremaps/cli/database/ImportOsm.java
b/baremaps-cli/src/main/java/org/apache/baremaps/cli/database/ImportOsm.java
index 2f3ede49..ad962fcc 100644
--- a/baremaps-cli/src/main/java/org/apache/baremaps/cli/database/ImportOsm.java
+++ b/baremaps-cli/src/main/java/org/apache/baremaps/cli/database/ImportOsm.java
@@ -49,8 +49,6 @@ public class ImportOsm implements Callable<Integer> {
public Integer call() throws Exception {
new org.apache.baremaps.workflow.tasks.ImportOsmPbf(
file.toAbsolutePath(),
- null,
- true,
database,
srid,
true).execute(new WorkflowContext());
diff --git
a/baremaps-cli/src/main/java/org/apache/baremaps/cli/database/UpdateOsm.java
b/baremaps-cli/src/main/java/org/apache/baremaps/cli/database/UpdateOsm.java
index 886fd0a6..11fb8d6c 100644
--- a/baremaps-cli/src/main/java/org/apache/baremaps/cli/database/UpdateOsm.java
+++ b/baremaps-cli/src/main/java/org/apache/baremaps/cli/database/UpdateOsm.java
@@ -41,9 +41,13 @@ public class UpdateOsm implements Callable<Integer> {
description = "The projection used by the database.")
private int srid = 3857;
+ @Option(names = {"--replication-url"}, paramLabel = "REPLICATION_URL",
+ description = "The replication url of the OpenStreetMap server.")
+ private String replicationUrl = "https://planet.osm.org/replication/hour";
+
@Override
public Integer call() throws Exception {
- new UpdateOsmDatabase(database, srid)
+ new UpdateOsmDatabase(database, srid, replicationUrl)
.execute(new WorkflowContext());
return 0;
}
diff --git a/baremaps-cli/src/main/java/org/apache/baremaps/cli/map/Export.java
b/baremaps-cli/src/main/java/org/apache/baremaps/cli/map/Export.java
index 2c13a09b..661fb81f 100644
--- a/baremaps-cli/src/main/java/org/apache/baremaps/cli/map/Export.java
+++ b/baremaps-cli/src/main/java/org/apache/baremaps/cli/map/Export.java
@@ -50,14 +50,6 @@ public class Export implements Callable<Integer> {
@Option(names = {"--tiles"}, paramLabel = "TILES", description = "The tiles
to export.")
private URI tiles;
- @Option(names = {"--batch-array-size"}, paramLabel = "BATCH_ARRAY_SIZE",
- description = "The size of the batch array.")
- private int batchArraySize = 1;
-
- @Option(names = {"--batch-array-index"}, paramLabel = "READER",
- description = "The index of the batch in the array.")
- private int batchArrayIndex = 0;
-
@Option(names = {"--format"}, paramLabel = "FORMAT",
description = "The format of the repository.")
private ExportVectorTiles.Format format = ExportVectorTiles.Format.file;
@@ -65,7 +57,7 @@ public class Export implements Callable<Integer> {
@Override
public Integer call() throws Exception {
new ExportVectorTiles(tileset.toAbsolutePath(),
- repository.toAbsolutePath(), batchArraySize, batchArrayIndex, format)
+ repository.toAbsolutePath(), format)
.execute(new WorkflowContext());
return 0;
}
diff --git
a/baremaps-cli/src/main/java/org/apache/baremaps/cli/workflow/Execute.java
b/baremaps-cli/src/main/java/org/apache/baremaps/cli/workflow/Execute.java
index b19c70aa..6e67645e 100644
--- a/baremaps-cli/src/main/java/org/apache/baremaps/cli/workflow/Execute.java
+++ b/baremaps-cli/src/main/java/org/apache/baremaps/cli/workflow/Execute.java
@@ -50,7 +50,7 @@ public class Execute implements Callable<Integer> {
var configReader = new ConfigReader();
var workflow = mapper.readValue(configReader.read(file.toAbsolutePath()),
Workflow.class);
try (var executor = new WorkflowExecutor(workflow)) {
- executor.execute().get();
+ executor.execute();
}
logger.info("Finished executing the workflow {}", file);
return 0;
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/utils/GeometryUtils.java
b/baremaps-core/src/main/java/org/apache/baremaps/utils/GeometryUtils.java
index 62df8210..c0d95a3b 100644
--- a/baremaps-core/src/main/java/org/apache/baremaps/utils/GeometryUtils.java
+++ b/baremaps-core/src/main/java/org/apache/baremaps/utils/GeometryUtils.java
@@ -73,16 +73,16 @@ public class GeometryUtils {
/**
* Creates a coordinate transform with the provided SRIDs.
*
- * @param sourceSRID the source SRID
- * @param targetSRID the target SRID
+ * @param sourceSrid the source SRID
+ * @param targetSrid the target SRID
* @return the coordinate transform
*/
- public static CoordinateTransform coordinateTransform(Integer sourceSRID,
Integer targetSRID) {
+ public static CoordinateTransform coordinateTransform(Integer sourceSrid,
Integer targetSrid) {
CRSFactory crsFactory = new CRSFactory();
CoordinateReferenceSystem sourceCRS =
- crsFactory.createFromName(String.format("EPSG:%d", sourceSRID));
+ crsFactory.createFromName(String.format("EPSG:%d", sourceSrid));
CoordinateReferenceSystem targetCRS =
- crsFactory.createFromName(String.format("EPSG:%d", targetSRID));
+ crsFactory.createFromName(String.format("EPSG:%d", targetSrid));
CoordinateTransformFactory coordinateTransformFactory = new
CoordinateTransformFactory();
return coordinateTransformFactory.createTransform(sourceCRS, targetCRS);
}
diff --git a/baremaps-core/src/main/java/org/apache/baremaps/workflow/Task.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/Task.java
index 06fdad9e..1baedefe 100644
--- a/baremaps-core/src/main/java/org/apache/baremaps/workflow/Task.java
+++ b/baremaps-core/src/main/java/org/apache/baremaps/workflow/Task.java
@@ -21,6 +21,7 @@ package org.apache.baremaps.workflow;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
@@ -32,26 +33,25 @@ import org.apache.baremaps.workflow.tasks.*;
@JsonSerialize
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonTypeInfo(use = Id.NAME, property = "type")
-@JsonSubTypes({@JsonSubTypes.Type(value = DownloadUrl.class, name =
"DownloadUrl"),
- @JsonSubTypes.Type(value = ExecuteCommand.class, name = "ExecuteCommand"),
- @JsonSubTypes.Type(value = ExecuteSql.class, name = "ExecuteSql"),
- @JsonSubTypes.Type(value = ExecuteSqlScript.class, name =
"ExecuteSqlScript"),
- @JsonSubTypes.Type(value = ExportVectorTiles.class, name =
"ExportVectorTiles"),
- @JsonSubTypes.Type(value = ImportGeoPackage.class, name =
"ImportGeoPackage"),
- @JsonSubTypes.Type(value = ImportOsmPbf.class, name = "ImportOsmPbf"),
- @JsonSubTypes.Type(value = ImportOsmOsc.class, name = "ImportOsmOsc"),
- @JsonSubTypes.Type(value = ImportShapefile.class, name =
"ImportShapefile"),
- @JsonSubTypes.Type(value = LogMessage.class, name = "LogMessage"),
- @JsonSubTypes.Type(value = UnzipFile.class, name = "UnzipFile"),
- @JsonSubTypes.Type(value = UngzipFile.class, name = "UngzipFile"),
- @JsonSubTypes.Type(value = DecompressBZip2.class, name =
"DecompressBZip2"),
- @JsonSubTypes.Type(value = DecompressFile.class, name = "DecompressFile"),
- @JsonSubTypes.Type(value = UpdateOsmDatabase.class, name =
"UpdateOsmDatabase"),
- @JsonSubTypes.Type(value = CreateGeonamesIndex.class, name =
"CreateGeonamesIndex"),
- @JsonSubTypes.Type(value = CreateIplocIndex.class, name =
"CreateIplocIndex"),
- @JsonSubTypes.Type(value = ImportDaylightTranslations.class,
- name = "ImportDaylightTranslations"),
- @JsonSubTypes.Type(value = ImportDaylightFeatures.class, name =
"ImportDaylightFeatures")
+@JsonSubTypes({
+ @Type(value = CleanContextCache.class, name = "CleanContextCache"),
+ @Type(value = CleanContextData.class, name = "CleanContextData"),
+ @Type(value = CreateGeonamesIndex.class, name = "CreateGeonamesIndex"),
+ @Type(value = CreateIplocIndex.class, name = "CreateIplocIndex"),
+ @Type(value = DecompressFile.class, name = "DecompressFile"),
+ @Type(value = DownloadUrl.class, name = "DownloadUrl"),
+ @Type(value = ExecuteCommand.class, name = "ExecuteCommand"),
+ @Type(value = ExecuteSql.class, name = "ExecuteSql"),
+ @Type(value = ExecuteSqlScript.class, name = "ExecuteSqlScript"),
+ @Type(value = ExportVectorTiles.class, name = "ExportVectorTiles"),
+ @Type(value = ImportDaylightFeatures.class, name =
"ImportDaylightFeatures"),
+ @Type(value = ImportDaylightTranslations.class, name =
"ImportDaylightTranslations"),
+ @Type(value = ImportGeoPackage.class, name = "ImportGeoPackage"),
+ @Type(value = ImportOsmOsc.class, name = "ImportOsmOsc"),
+ @Type(value = ImportOsmPbf.class, name = "ImportOsmPbf"),
+ @Type(value = ImportShapefile.class, name = "ImportShapefile"),
+ @Type(value = LogMessage.class, name = "LogMessage"),
+ @Type(value = UpdateOsmDatabase.class, name = "UpdateOsmDatabase"),
})
public interface Task {
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowContext.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowContext.java
index f4a720f7..d991d071 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowContext.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowContext.java
@@ -19,16 +19,40 @@ package org.apache.baremaps.workflow;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.sql.DataSource;
+import org.apache.baremaps.database.collection.*;
+import org.apache.baremaps.database.memory.MemoryMappedDirectory;
+import org.apache.baremaps.database.type.*;
+import org.apache.baremaps.database.type.geometry.LonLatDataType;
+import org.apache.baremaps.utils.FileUtils;
import org.apache.baremaps.utils.PostgresUtils;
+import org.locationtech.jts.geom.Coordinate;
/**
* A context that is passed to the tasks of a workflow and used to share data
between tasks.
*/
public class WorkflowContext {
+ private final Path dataDir;
+
+ private final Path cacheDir;
+
+ public WorkflowContext() {
+ this(Paths.get("./data"), Paths.get("./cache"));
+ }
+
+ public WorkflowContext(Path dataDir, Path cacheDir) {
+ this.dataDir = dataDir;
+ this.cacheDir = cacheDir;
+ }
+
private Map<Object, DataSource> dataSources = new ConcurrentHashMap<>() {};
/**
@@ -41,4 +65,45 @@ public class WorkflowContext {
return dataSources.computeIfAbsent(database,
PostgresUtils::createDataSourceFromObject);
}
+ public DataMap<Long, Coordinate> getCoordinateMap(Path path) throws
IOException {
+ if (Files.size(path) > 1 << 30) {
+ return getMemoryAlignedDataMap("coordinates", new LonLatDataType());
+ } else {
+ return getMonotonicDataMap("coordinates", new LonLatDataType());
+ }
+ }
+
+ public DataMap<Long, List<Long>> getReferenceMap(Path path) throws
IOException {
+ return getMonotonicDataMap("references", new LongListDataType());
+ }
+
+ public <T> DataMap<Long, T> getMemoryAlignedDataMap(String name,
FixedSizeDataType<T> dataType)
+ throws IOException {
+ var coordinateDir = Files.createDirectories(cacheDir.resolve(name));
+ return new MemoryAlignedDataMap<>(
+ dataType,
+ new MemoryMappedDirectory(coordinateDir));
+ }
+
+ public <T> DataMap<Long, T> getMonotonicDataMap(String name, DataType<T>
dataType)
+ throws IOException {
+ var mapDir = Files.createDirectories(cacheDir.resolve(name));
+ var keysDir = Files.createDirectories(mapDir.resolve("keys"));
+ var valuesDir = Files.createDirectories(mapDir.resolve("values"));
+ return new MonotonicDataMap<>(
+ new MemoryAlignedDataList<>(
+ new PairDataType<>(new LongDataType(), new LongDataType()),
+ new MemoryMappedDirectory(keysDir)),
+ new AppendOnlyBuffer<>(
+ dataType,
+ new MemoryMappedDirectory(valuesDir)));
+ }
+
+ public void cleanCache() throws IOException {
+ FileUtils.deleteRecursively(cacheDir);
+ }
+
+ public void cleanData() throws IOException {
+ FileUtils.deleteRecursively(dataDir);
+ }
}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowExecutor.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowExecutor.java
index 43de015c..36d3c38b 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowExecutor.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/WorkflowExecutor.java
@@ -105,17 +105,24 @@ public class WorkflowExecutor implements AutoCloseable {
/**
* Executes the workflow.
*/
- public CompletableFuture<Void> execute() {
+ public void execute() {
+ try {
+ executeAsync().join();
+ logStepMeasures();
+ } catch (Exception e) {
+ logger.error("Error while executing the workflow", e);
+ }
+ }
+
+ public CompletableFuture<Void> executeAsync() {
// Create futures for each end step
var endSteps = graph.nodes().stream()
.filter(this::isEndStep)
- .map(this::getFutureStep)
+ .map(this::getStep)
.toArray(CompletableFuture[]::new);
- // Create a future that logs the stepMeasures when all the futures are
completed
- var future =
CompletableFuture.allOf(endSteps).thenRun(this::logStepMeasures);
-
- return future;
+ // Wait for all the end steps to complete
+ return CompletableFuture.allOf(endSteps);
}
/**
@@ -125,8 +132,8 @@ public class WorkflowExecutor implements AutoCloseable {
* @param step the step id
* @return the future step
*/
- private CompletableFuture<Void> getFutureStep(String step) {
- return futures.computeIfAbsent(step, this::createFutureStep);
+ private CompletableFuture<Void> getStep(String step) {
+ return futures.computeIfAbsent(step, this::createStep);
}
/**
@@ -135,10 +142,10 @@ public class WorkflowExecutor implements AutoCloseable {
* @param stepId the step id
* @return the future step
*/
- private CompletableFuture<Void> createFutureStep(String stepId) {
+ private CompletableFuture<Void> createStep(String stepId) {
// Initialize the future step with the previous future step
// as it depends on its completion.
- var future = getPreviousFutureStep(stepId);
+ var future = getPreviousStep(stepId);
// Time the execution of the tasks
var measures = new ArrayList<TaskMeasure>();
@@ -182,7 +189,7 @@ public class WorkflowExecutor implements AutoCloseable {
* @param stepId the step id
* @return the future step
*/
- private CompletableFuture<Void> getPreviousFutureStep(String stepId) {
+ private CompletableFuture<Void> getPreviousStep(String stepId) {
var predecessors = graph.predecessors(stepId).stream().toList();
// If the step has no predecessor,
@@ -194,13 +201,13 @@ public class WorkflowExecutor implements AutoCloseable {
// If the step has one predecessor,
// return the future step associated to it.
if (predecessors.size() == 1) {
- return getFutureStep(predecessors.get(0));
+ return getStep(predecessors.get(0));
}
// If the step has multiple predecessors,
// return a future step that completes when all the predecessors complete.
var futurePredecessors = predecessors.stream()
- .map(this::getFutureStep)
+ .map(this::getStep)
.toArray(CompletableFuture[]::new);
return CompletableFuture.allOf(futurePredecessors);
}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CleanContextCache.java
similarity index 81%
copy from
baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
copy to
baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CleanContextCache.java
index a3f6b852..6da297d3 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CleanContextCache.java
@@ -19,15 +19,22 @@ package org.apache.baremaps.workflow.tasks;
import org.apache.baremaps.workflow.Task;
import org.apache.baremaps.workflow.WorkflowContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-public record LogMessage(String message) implements Task {
+/**
+ * Clean the context cache.
+ */
+public class CleanContextCache implements Task {
- private static final Logger logger =
LoggerFactory.getLogger(LogMessage.class);
+ /**
+ * Constructs an {@code CleanContextCache}.
+ */
+ public CleanContextCache() {}
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
- logger.info(message);
+ context.cleanCache();
}
}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CleanContextData.java
similarity index 82%
copy from
baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
copy to
baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CleanContextData.java
index a3f6b852..c3b4539e 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CleanContextData.java
@@ -19,15 +19,19 @@ package org.apache.baremaps.workflow.tasks;
import org.apache.baremaps.workflow.Task;
import org.apache.baremaps.workflow.WorkflowContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-public record LogMessage(String message) implements Task {
+/**
+ * Clean the context data.
+ */
+public class CleanContextData implements Task {
- private static final Logger logger =
LoggerFactory.getLogger(LogMessage.class);
+ /**
+ * Constructs an {@code CleanContextData}.
+ */
+ public CleanContextData() {}
@Override
public void execute(WorkflowContext context) throws Exception {
- logger.info(message);
+ context.cleanData();
}
}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateGeocoderOpenStreetMap.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateGeocoderOpenStreetMap.java
index 055ed5cb..1ef192f1 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateGeocoderOpenStreetMap.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateGeocoderOpenStreetMap.java
@@ -20,23 +20,12 @@ package org.apache.baremaps.workflow.tasks;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.List;
-import org.apache.baremaps.database.collection.AppendOnlyBuffer;
import org.apache.baremaps.database.collection.DataMap;
-import org.apache.baremaps.database.collection.MemoryAlignedDataList;
-import org.apache.baremaps.database.collection.MemoryAlignedDataMap;
-import org.apache.baremaps.database.collection.MonotonicDataMap;
-import org.apache.baremaps.database.memory.MemoryMappedDirectory;
-import org.apache.baremaps.database.type.LongDataType;
-import org.apache.baremaps.database.type.LongListDataType;
-import org.apache.baremaps.database.type.PairDataType;
-import org.apache.baremaps.database.type.geometry.LonLatDataType;
import org.apache.baremaps.geocoder.GeocoderConstants;
import org.apache.baremaps.geocoderosm.GeocoderOsmConsumerEntity;
import org.apache.baremaps.openstreetmap.pbf.PbfEntityReader;
import org.apache.baremaps.stream.StreamUtils;
-import org.apache.baremaps.utils.FileUtils;
import org.apache.baremaps.workflow.Task;
import org.apache.baremaps.workflow.WorkflowContext;
import org.apache.lucene.index.IndexWriter;
@@ -48,52 +37,38 @@ import org.slf4j.LoggerFactory;
/**
* Experimental feature.
- *
+ *
* @see org.apache.baremaps.geocoderosm
*/
-public record CreateGeocoderOpenStreetMap(Path file, Path indexDirectory)
- implements
- Task {
+public class CreateGeocoderOpenStreetMap implements Task {
private static final Logger logger =
LoggerFactory.getLogger(CreateGeocoderOpenStreetMap.class);
+ private Path file;
+ private Path indexDirectory;
+
+ /**
+ * Constructs an {@code CreateGeocoderOpenStreetMap}.
+ */
+ public CreateGeocoderOpenStreetMap() {}
+
+ /**
+ * Constructs an {@code CreateGeocoderOpenStreetMap}.
+ *
+ * @param file the OSM PBF file
+ * @param indexDirectory the index directory
+ */
+ public CreateGeocoderOpenStreetMap(Path file, Path indexDirectory) {
+ this.file = file;
+ this.indexDirectory = indexDirectory;
+ }
+
@Override
public void execute(WorkflowContext context) throws Exception {
-
var path = file.toAbsolutePath();
-
- var cacheDir = Files.createTempDirectory(Paths.get("."), "cache_");
-
- DataMap<Long, Coordinate> coordinateMap;
- if (Files.size(path) > 1 << 30) {
- var coordinateDir =
Files.createDirectories(cacheDir.resolve("coordinate_keys"));
- coordinateMap = new MemoryAlignedDataMap<>(
- new LonLatDataType(),
- new MemoryMappedDirectory(coordinateDir));
- } else {
- var coordinateKeysDir =
Files.createDirectories(cacheDir.resolve("coordinate_keys"));
- var coordinateValuesDir =
Files.createDirectories(cacheDir.resolve("coordinate_vals"));
- coordinateMap =
- new MonotonicDataMap<>(
- new MemoryAlignedDataList<>(
- new PairDataType<>(new LongDataType(), new LongDataType()),
- new MemoryMappedDirectory(coordinateKeysDir)),
- new AppendOnlyBuffer<>(
- new LonLatDataType(),
- new MemoryMappedDirectory(coordinateValuesDir)));
- }
-
- var referenceKeysDir =
Files.createDirectory(cacheDir.resolve("reference_keys"));
- var referenceValuesDir =
Files.createDirectory(cacheDir.resolve("reference_vals"));
- var referenceMap =
- new MonotonicDataMap<>(
- new MemoryAlignedDataList<>(
- new PairDataType<>(new LongDataType(), new LongDataType()),
- new MemoryMappedDirectory(referenceKeysDir)),
- new AppendOnlyBuffer<>(
- new LongListDataType(),
- new MemoryMappedDirectory(referenceValuesDir)));
+ var coordinateMap = context.getCoordinateMap(path);
+ var referenceMap = context.getReferenceMap(path);
var directory = FSDirectory.open(indexDirectory);
var config = new IndexWriterConfig(GeocoderConstants.ANALYZER);
@@ -106,7 +81,6 @@ public record CreateGeocoderOpenStreetMap(Path file, Path
indexDirectory)
referenceMap,
importer);
}
- FileUtils.deleteRecursively(cacheDir);
}
public static void execute(
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateGeonamesIndex.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateGeonamesIndex.java
index 79ed1158..6ceaa504 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateGeonamesIndex.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateGeonamesIndex.java
@@ -35,10 +35,34 @@ import org.slf4j.LoggerFactory;
/**
* A task that creates a geonames index.
*/
-public record CreateGeonamesIndex(Path dataFile, Path indexDirectory)
implements Task {
+public class CreateGeonamesIndex implements Task {
private static final Logger logger =
LoggerFactory.getLogger(CreateGeonamesIndex.class);
+ private Path dataFile;
+ private Path indexDirectory;
+
+ /**
+ * Constructs a {@code CreateGeonamesIndex}.
+ */
+ public CreateGeonamesIndex() {
+
+ }
+
+ /**
+ * Constructs a {@code CreateGeonamesIndex}.
+ *
+ * @param dataFile the path to the data file
+ * @param indexDirectory the path to the index directory
+ */
+ public CreateGeonamesIndex(Path dataFile, Path indexDirectory) {
+ this.dataFile = dataFile;
+ this.indexDirectory = indexDirectory;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var directory = MMapDirectory.open(indexDirectory);
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateIplocIndex.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateIplocIndex.java
index bfbd5d9b..3ca50274 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateIplocIndex.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/CreateIplocIndex.java
@@ -36,13 +36,37 @@ import org.slf4j.LoggerFactory;
import org.sqlite.SQLiteConfig;
import org.sqlite.SQLiteDataSource;
-public record CreateIplocIndex(
- Path geonamesIndexPath,
- List<Path> nicPaths,
- Path targetIplocIndexPath) implements Task {
+public class CreateIplocIndex implements Task {
private static final Logger logger =
LoggerFactory.getLogger(CreateIplocIndex.class);
+ private Path geonamesIndexPath;
+ private List<Path> nicPaths;
+ private Path targetIplocIndexPath;
+
+ /**
+ * Constructs a {@code CreateIplocIndex}.
+ */
+ public CreateIplocIndex() {
+
+ }
+
+ /**
+ * Constructs a {@code CreateIplocIndex}.
+ *
+ * @param geonamesIndexPath the path to the geonames index
+ * @param nicPaths the paths to the nic files
+ * @param targetIplocIndexPath the path to the target iploc index
+ */
+ public CreateIplocIndex(Path geonamesIndexPath, List<Path> nicPaths, Path
targetIplocIndexPath) {
+ this.geonamesIndexPath = geonamesIndexPath;
+ this.nicPaths = nicPaths;
+ this.targetIplocIndexPath = targetIplocIndexPath;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
try (var directory = MMapDirectory.open(geonamesIndexPath);
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DecompressBZip2.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DecompressBZip2.java
deleted file mode 100644
index 9ed96630..00000000
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DecompressBZip2.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.baremaps.workflow.tasks;
-
-import java.io.BufferedInputStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardCopyOption;
-import org.apache.baremaps.workflow.Task;
-import org.apache.baremaps.workflow.WorkflowContext;
-import org.apache.baremaps.workflow.WorkflowException;
-import
org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
-
-public record DecompressBZip2(Path source, Path target) implements Task {
-
- @Override
- public void execute(WorkflowContext context) throws Exception {
- var sourcePath = source.toAbsolutePath();
- try (var bufferedInputStream = new
BufferedInputStream(Files.newInputStream(sourcePath));
- var compressedInputStream = new
BZip2CompressorInputStream(bufferedInputStream)) {
- var targetPath = target.toAbsolutePath();
- if (!Files.exists(targetPath)) {
- Files.createDirectories(targetPath.getParent());
- Files.createFile(targetPath);
- }
- Files.copy(compressedInputStream, targetPath,
StandardCopyOption.REPLACE_EXISTING);
- } catch (Exception e) {
- throw new WorkflowException(e);
- }
- }
-}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DecompressFile.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DecompressFile.java
index 4aa1bb38..8ac8b76c 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DecompressFile.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DecompressFile.java
@@ -32,8 +32,17 @@ import
org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record DecompressFile(Path source, Path target, Compression
compression) implements Task {
+/**
+ * Decompresses a file based on a given compression format. The supported
formats are zip, targz,
+ * tarbz2, gzip and bzip2.
+ */
+public class DecompressFile implements Task {
+
+ private static final Logger logger =
LoggerFactory.getLogger(DecompressFile.class);
+ /**
+ * The compression format.
+ */
public enum Compression {
zip,
targz,
@@ -42,8 +51,33 @@ public record DecompressFile(Path source, Path target,
Compression compression)
bzip2;
}
- private static final Logger logger =
LoggerFactory.getLogger(UngzipFile.class);
+ private Path source;
+ private Path target;
+ private Compression compression;
+
+ /**
+ * Constructs a {@code DecompressFile}.
+ */
+ public DecompressFile() {
+
+ }
+
+ /**
+ * Constructs a {@code DecompressFile}.
+ *
+ * @param source the source file
+ * @param target the target file
+ * @param compression the compression format (zip, targz, tarbz2, gzip or
bzip2)
+ */
+ public DecompressFile(Path source, Path target, Compression compression) {
+ this.source = source;
+ this.target = target;
+ this.compression = compression;
+ }
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var sourcePath = source.toAbsolutePath();
@@ -57,83 +91,108 @@ public record DecompressFile(Path source, Path target,
Compression compression)
}
}
- public static void decompressBzip2(Path sourcePath, Path targetPath) throws
IOException {
- try (var bufferedInputStream = new
BufferedInputStream(Files.newInputStream(sourcePath));
+ /**
+ * Decompresses a bzip2 file.
+ *
+ * @param source the source file
+ * @param target the target file
+ * @throws IOException if an I/O error occurs
+ */
+ protected static void decompressBzip2(Path source, Path target) throws
IOException {
+ try (var bufferedInputStream = new
BufferedInputStream(Files.newInputStream(source));
var bzip2InputStream = new
BZip2CompressorInputStream(bufferedInputStream)) {
- Files.copy(bzip2InputStream, targetPath,
StandardCopyOption.REPLACE_EXISTING);
+ Files.copy(bzip2InputStream, target,
StandardCopyOption.REPLACE_EXISTING);
}
}
- public static void decompressGzip(Path sourcePath, Path targetPath) throws
IOException {
- try (var zis = new GZIPInputStream(new
BufferedInputStream(Files.newInputStream(sourcePath)))) {
- Files.copy(zis, targetPath, StandardCopyOption.REPLACE_EXISTING);
+ /**
+ * Decompresses a gzip file.
+ *
+ * @param source the source file
+ * @param target the target file
+ * @throws IOException if an I/O error occurs
+ */
+ protected static void decompressGzip(Path source, Path target) throws
IOException {
+ try (var bufferedInputStream = new
BufferedInputStream(Files.newInputStream(source));
+ var zis = new GZIPInputStream(bufferedInputStream)) {
+ Files.copy(zis, target, StandardCopyOption.REPLACE_EXISTING);
}
}
- public static void decompressTarGz(Path sourcePath, Path targetPath) throws
IOException {
- try (var bufferedInputStream = new
BufferedInputStream(Files.newInputStream(sourcePath));
+ /**
+ * Decompresses a tar.gz file.
+ *
+ * @param source the source file
+ * @param target the target file
+ * @throws IOException if an I/O error occurs
+ */
+ protected static void decompressTarGz(Path source, Path target) throws
IOException {
+ try (var bufferedInputStream = new
BufferedInputStream(Files.newInputStream(source));
var gzipInputStream = new GZIPInputStream(bufferedInputStream);
var tarInputStream = new TarArchiveInputStream(gzipInputStream)) {
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) tarInputStream.getNextEntry()) !=
null) {
- var path = targetPath.resolve(entry.getName());
+ var path = target.resolve(entry.getName());
if (entry.isDirectory()) {
Files.createDirectories(path);
} else {
Files.createDirectories(path.getParent());
try (BufferedOutputStream outputStream =
new BufferedOutputStream(Files.newOutputStream(path))) {
- int bytesRead;
- byte[] buffer = new byte[4096];
- while ((bytesRead = tarInputStream.read(buffer)) != -1) {
- outputStream.write(buffer, 0, bytesRead);
- }
+ tarInputStream.transferTo(outputStream);
}
}
}
}
}
- public static void decompressTarBz2(Path sourcePath, Path targetPath) throws
IOException {
- try (var bufferedInputStream = new
BufferedInputStream(Files.newInputStream(sourcePath));
+ /**
+ * Decompresses a tar.bz2 file.
+ *
+ * @param source the source file
+ * @param target the target file
+ * @throws IOException if an I/O error occurs
+ */
+ protected static void decompressTarBz2(Path source, Path target) throws
IOException {
+ try (var bufferedInputStream = new
BufferedInputStream(Files.newInputStream(source));
var bzip2InputStream = new
BZip2CompressorInputStream(bufferedInputStream);
var tarInputStream = new TarArchiveInputStream(bzip2InputStream)) {
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) tarInputStream.getNextEntry()) !=
null) {
- var path = targetPath.resolve(entry.getName());
+ var path = target.resolve(entry.getName());
if (entry.isDirectory()) {
Files.createDirectories(path);
} else {
Files.createDirectories(path.getParent());
try (BufferedOutputStream outputStream =
new BufferedOutputStream(Files.newOutputStream(path))) {
- int bytesRead;
- byte[] buffer = new byte[4096];
- while ((bytesRead = tarInputStream.read(buffer)) != -1) {
- outputStream.write(buffer, 0, bytesRead);
- }
+ tarInputStream.transferTo(outputStream);
}
}
}
}
}
- public static void decompressZip(Path sourcePath, Path targetPath) throws
IOException {
- try (var zipFile = new ZipFile(sourcePath.toFile())) {
+ /**
+ * Decompresses a zip file.
+ *
+ * @param source the source file
+ * @param target the target file
+ * @throws IOException if an I/O error occurs
+ */
+ protected static void decompressZip(Path source, Path target) throws
IOException {
+ try (var zipFile = new ZipFile(source.toFile())) {
var entries = zipFile.entries();
while (entries.hasMoreElements()) {
var entry = entries.nextElement();
- var path = targetPath.resolve(entry.getName());
+ var path = target.resolve(entry.getName());
Files.createDirectories(path.getParent());
- Files.write(path, new byte[] {}, StandardOpenOption.CREATE,
+ Files.write(path, new byte[] {},
+ StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING);
try (var input = new
BufferedInputStream(zipFile.getInputStream(entry));
var output = new BufferedOutputStream(new
FileOutputStream(path.toFile()))) {
- int nBytes = -1;
- byte[] buffer = new byte[4096];
- while ((nBytes = input.read(buffer)) > 0) {
- output.write(buffer, 0, nBytes);
- }
+ input.transferTo(output);
}
}
}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DownloadUrl.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DownloadUrl.java
index c6362ea5..3d31aa05 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DownloadUrl.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/DownloadUrl.java
@@ -29,36 +29,63 @@ import org.apache.baremaps.workflow.WorkflowContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record DownloadUrl(String url, Path path, boolean replaceExisting)
implements Task {
+/**
+ * Downloads a file from a URL.
+ */
+public class DownloadUrl implements Task {
+
+ private static final Logger logger =
LoggerFactory.getLogger(DownloadUrl.class);
private static final String PROTOCOL_FTP = "ftp";
+
private static final String PROTOCOL_HTTP = "http";
+
private static final String PROTOCOL_HTTPS = "https";
- public DownloadUrl(String url, Path path) {
- this(url, path, false);
+ private String source;
+
+ private Path target;
+
+ private boolean replaceExisting = true;
+
+ /**
+ * Constructs a {@code DownloadUrl}.
+ */
+ public DownloadUrl() {
+
}
- private static final Logger logger =
LoggerFactory.getLogger(DownloadUrl.class);
+ /**
+ * Constructs an {@code DownloadUrl}.
+ *
+ * @param source the url
+ * @param target the path
+ * @param replaceExisting whether to replace existing files
+ */
+ public DownloadUrl(String source, Path target, boolean replaceExisting) {
+ this.source = source;
+ this.target = target;
+ this.replaceExisting = replaceExisting;
+ }
@Override
public void execute(WorkflowContext context) throws Exception {
- var targetUrl = new URL(url);
- var targetPath = path.toAbsolutePath();
+ var sourceURL = new URL(source);
+ var targetPath = target.toAbsolutePath();
if (Files.exists(targetPath) && !replaceExisting) {
- logger.info("Skipping download of {} to {}", url, path);
+ logger.info("Skipping download of {} to {}", source, target);
return;
}
- if (isHttp(targetUrl)) {
- var get = (HttpURLConnection) targetUrl.openConnection();
+ if (isHttp(sourceURL)) {
+ var get = (HttpURLConnection) sourceURL.openConnection();
get.setInstanceFollowRedirects(true);
get.setRequestMethod("GET");
urlDownloadToFile(get, targetPath);
get.disconnect();
- } else if (isFtp(targetUrl)) {
- urlDownloadToFile(targetUrl.openConnection(), targetPath);
+ } else if (isFtp(sourceURL)) {
+ urlDownloadToFile(sourceURL.openConnection(), targetPath);
} else {
throw new IllegalArgumentException("Unsupported URL protocol (supported:
http(s)/ftp)");
}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteCommand.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteCommand.java
index 4cb4218b..6f1f8f27 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteCommand.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteCommand.java
@@ -22,12 +22,37 @@ import org.apache.baremaps.workflow.WorkflowContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record ExecuteCommand(String command) implements Task {
+/**
+ * Execute a bash command.
+ */
+public class ExecuteCommand implements Task {
private static final Logger logger =
LoggerFactory.getLogger(ExecuteCommand.class);
+ private String command;
+
+ /**
+ * Constructs a {@code ExecuteCommand}.
+ */
+ public ExecuteCommand() {
+
+ }
+
+ /**
+ * Constructs an {@code ExecuteCommand}.
+ *
+ * @param command the bash command
+ */
+ public ExecuteCommand(String command) {
+ this.command = command;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
new ProcessBuilder().command("/bin/sh", "-c", command).start().waitFor();
}
+
}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteSql.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteSql.java
index 25058c8d..0e42adfd 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteSql.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteSql.java
@@ -29,10 +29,42 @@ import org.apache.baremaps.workflow.WorkflowException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record ExecuteSql(Object database, Path file, boolean parallel)
implements Task {
+/**
+ * Execute a SQL query (single statement).
+ */
+public class ExecuteSql implements Task {
private static final Logger logger =
LoggerFactory.getLogger(ExecuteSql.class);
+ private Object database;
+
+ private Path file;
+
+ private boolean parallel = false;
+
+ /**
+ * Constructs a {@code ExecuteSql}.
+ */
+ public ExecuteSql() {
+
+ }
+
+ /**
+ * Constructs an {@code ExecuteSql}.
+ *
+ * @param database the database
+ * @param file the SQL file
+ * @param parallel whether to execute the queries in parallel
+ */
+ public ExecuteSql(Object database, Path file, boolean parallel) {
+ this.database = database;
+ this.file = file;
+ this.parallel = parallel;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var script = clean(Files.readString(file));
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteSqlScript.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteSqlScript.java
index 50341a47..03134a8b 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteSqlScript.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExecuteSqlScript.java
@@ -23,9 +23,41 @@ import java.sql.SQLException;
import org.apache.baremaps.workflow.Task;
import org.apache.baremaps.workflow.WorkflowContext;
import org.apache.baremaps.workflow.WorkflowException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-public record ExecuteSqlScript(Object database, Path file) implements Task {
+/**
+ * Execute a SQL script (multiple statements).
+ */
+public class ExecuteSqlScript implements Task {
+
+ private static final Logger logger =
LoggerFactory.getLogger(ExecuteSqlScript.class);
+
+ private Object database;
+
+ private Path file;
+
+ /**
+ * Constructs a {@code ExecuteSqlScript}.
+ */
+ public ExecuteSqlScript() {
+
+ }
+
+ /**
+ * Constructs an {@code ExecuteSqlScript}.
+ *
+ * @param database the database
+ * @param file the SQL file
+ */
+ public ExecuteSqlScript(Object database, Path file) {
+ this.database = database;
+ this.file = file;
+ }
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var script = Files.readString(file);
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExportVectorTiles.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExportVectorTiles.java
index 93a27a2c..8e277ef8 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExportVectorTiles.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ExportVectorTiles.java
@@ -44,12 +44,12 @@ import org.locationtech.jts.geom.Envelope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record ExportVectorTiles(
- Path tileset,
- Path repository,
- int batchArraySize,
- int batchArrayIndex,
- Format format) implements Task {
+/**
+ * Export vector tiles from a tileset.
+ */
+public class ExportVectorTiles implements Task {
+
+ private static final Logger logger =
LoggerFactory.getLogger(ExportVectorTiles.class);
public enum Format {
file,
@@ -57,8 +57,33 @@ public record ExportVectorTiles(
pmtiles
}
- private static final Logger logger =
LoggerFactory.getLogger(ExportVectorTiles.class);
+ private Path tileset;
+ private Path repository;
+ private Format format;
+
+ /**
+ * Constructs a {@code ExportVectorTiles}.
+ */
+ public ExportVectorTiles() {
+
+ }
+
+ /**
+ * Constructs a {@code ExportVectorTiles}.
+ *
+ * @param tileset the tileset
+ * @param repository the repository
+ * @param format the format
+ */
+ public ExportVectorTiles(Path tileset, Path repository, Format format) {
+ this.tileset = tileset;
+ this.repository = repository;
+ this.format = format;
+ }
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var configReader = new ConfigReader();
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportDaylightFeatures.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportDaylightFeatures.java
index 8422d8fc..1930f7d3 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportDaylightFeatures.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportDaylightFeatures.java
@@ -28,8 +28,15 @@ import
org.apache.baremaps.openstreetmap.postgres.PostgresRelationRepository;
import org.apache.baremaps.openstreetmap.postgres.PostgresWayRepository;
import org.apache.baremaps.workflow.Task;
import org.apache.baremaps.workflow.WorkflowContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-public record ImportDaylightFeatures(Path file, Object database) implements
Task {
+/**
+ * Import daylight features.
+ */
+public class ImportDaylightFeatures implements Task {
+
+ private static final Logger logger =
LoggerFactory.getLogger(ImportDaylightFeatures.class);
record Feature(
@JsonProperty("osm_type") String type,
@@ -39,6 +46,31 @@ public record ImportDaylightFeatures(Path file, Object
database) implements Task
@JsonProperty("category") String category) {
}
+ private Path file;
+
+ private Object database;
+
+ /**
+ * Constructs an {@code ImportDaylightFeatures}.
+ */
+ public ImportDaylightFeatures() {
+
+ }
+
+ /**
+ * Constructs an {@code ImportDaylightFeatures}.
+ *
+ * @param file the daylight file
+ * @param database the database
+ */
+ public ImportDaylightFeatures(Path file, Object database) {
+ this.file = file;
+ this.database = database;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var datasource = context.getDataSource(database);
@@ -60,42 +92,33 @@ public record ImportDaylightFeatures(Path file, Object
database) implements Task
case "node" -> {
var node = nodeRepository.get(feature.id());
if (node != null) {
- // Merge the tags
var tags = new HashMap<>(feature.tags());
if (node.getTags() != null) {
tags.putAll(node.getTags());
}
node.setTags(tags);
-
- // Update the node
nodeRepository.put(node);
}
}
case "way" -> {
var way = wayRepository.get(feature.id());
if (way != null) {
- // Merge the tags
var tags = new HashMap<>(feature.tags());
if (way.getTags() != null) {
tags.putAll(way.getTags());
}
way.setTags(tags);
-
- // Update the way
wayRepository.put(way);
}
}
case "relation" -> {
var relation = relationRepository.get(feature.id());
if (relation != null) {
- // Merge the tags
var tags = new HashMap<>(feature.tags());
if (relation.getTags() != null) {
tags.putAll(relation.getTags());
}
relation.setTags(tags);
-
- // Update the relation
relationRepository.put(relation);
}
}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportDaylightTranslations.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportDaylightTranslations.java
index c5e9d86a..c4142d52 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportDaylightTranslations.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportDaylightTranslations.java
@@ -26,8 +26,15 @@ import
org.apache.baremaps.openstreetmap.postgres.PostgresRelationRepository;
import org.apache.baremaps.openstreetmap.postgres.PostgresWayRepository;
import org.apache.baremaps.workflow.Task;
import org.apache.baremaps.workflow.WorkflowContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-public record ImportDaylightTranslations(Path file, Object database)
implements Task {
+/**
+ * Import daylight translations.
+ */
+public class ImportDaylightTranslations implements Task {
+
+ private static final Logger logger =
LoggerFactory.getLogger(ImportDaylightTranslations.class);
record Group(String type, Long id, String name) {
@@ -50,6 +57,31 @@ public record ImportDaylightTranslations(Path file, Object
database) implements
}
}
+ private Path file;
+
+ private Object database;
+
+ /**
+ * Constructs an {@code ImportDaylightTranslations}.
+ */
+ public ImportDaylightTranslations() {
+
+ }
+
+ /**
+ * Constructs an {@code ImportDaylightTranslations}.
+ *
+ * @param file the daylight file
+ * @param database the database
+ */
+ public ImportDaylightTranslations(Path file, Object database) {
+ this.file = file;
+ this.database = database;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var datasource = context.getDataSource(database);
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportGeoPackage.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportGeoPackage.java
index a8a6b52c..bdfc982b 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportGeoPackage.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportGeoPackage.java
@@ -29,12 +29,43 @@ import org.apache.baremaps.workflow.WorkflowException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record ImportGeoPackage(Path file, Object database, Integer sourceSRID,
Integer targetSRID)
- implements
- Task {
+/**
+ * Import a GeoPackage into a database.
+ */
+public class ImportGeoPackage implements Task {
private static final Logger logger =
LoggerFactory.getLogger(ImportGeoPackage.class);
+ private Path file;
+ private Integer fileSrid;
+ private Object database;
+ private Integer databaseSrid;
+
+ /**
+ * Constructs a {@code ImportGeoPackage}.
+ */
+ public ImportGeoPackage() {
+
+ }
+
+ /**
+ * Constructs an {@code ImportGeoPackage}.
+ *
+ * @param file the GeoPackage file
+ * @param fileSrid the source SRID
+ * @param database the database
+ * @param databaseSrid the target SRID
+ */
+ public ImportGeoPackage(Path file, Integer fileSrid, Object database,
Integer databaseSrid) {
+ this.file = file;
+ this.fileSrid = fileSrid;
+ this.database = database;
+ this.databaseSrid = databaseSrid;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var path = file.toAbsolutePath();
@@ -43,7 +74,7 @@ public record ImportGeoPackage(Path file, Object database,
Integer sourceSRID, I
var postgresDataStore = new PostgresDataSchema(dataSource);
for (var name : geoPackageDataStore.list()) {
var geoPackageTable = geoPackageDataStore.get(name);
- var projectionTransformer = new ProjectionTransformer(sourceSRID,
targetSRID);
+ var projectionTransformer = new ProjectionTransformer(fileSrid,
databaseSrid);
var rowTransformer =
new DataTableGeometryTransformer(geoPackageTable,
projectionTransformer);
var transformedDataTable =
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportOsmOsc.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportOsmOsc.java
index b16fff32..429a0d51 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportOsmOsc.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportOsmOsc.java
@@ -22,79 +22,70 @@ import static
org.apache.baremaps.stream.ConsumerUtils.consumeThenReturn;
import java.io.BufferedInputStream;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
-import org.apache.baremaps.database.collection.*;
-import org.apache.baremaps.database.memory.MemoryMappedDirectory;
-import org.apache.baremaps.database.type.LongDataType;
-import org.apache.baremaps.database.type.LongListDataType;
-import org.apache.baremaps.database.type.PairDataType;
-import org.apache.baremaps.database.type.geometry.LonLatDataType;
import org.apache.baremaps.openstreetmap.function.*;
import org.apache.baremaps.openstreetmap.postgres.*;
import org.apache.baremaps.openstreetmap.repository.CopyChangeImporter;
import org.apache.baremaps.openstreetmap.xml.XmlChangeReader;
import org.apache.baremaps.utils.Compression;
-import org.apache.baremaps.utils.FileUtils;
import org.apache.baremaps.workflow.Task;
import org.apache.baremaps.workflow.WorkflowContext;
-import org.locationtech.jts.geom.Coordinate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record ImportOsmOsc(
- Path file,
- Path cache,
- Object database,
- Integer srid,
- Compression compression) implements Task {
+/**
+ * Import an OSM OSC file into a database.
+ */
+public class ImportOsmOsc implements Task {
private static final Logger logger =
LoggerFactory.getLogger(ImportOsmOsc.class);
- @Override
- public void execute(WorkflowContext context) throws Exception {
- var datasource = context.getDataSource(database);
- var path = file.toAbsolutePath();
+ private Path file;
+ private Compression compression;
+ private Object database;
+ private Integer databaseSrid;
- var cacheDir = cache != null ? cache :
Files.createTempDirectory(Paths.get("."), "cache_");
+ /**
+ * Constructs a {@code ImportOsmOsc}.
+ */
+ public ImportOsmOsc() {
- DataMap<Long, Coordinate> coordinateMap;
- if (Files.size(path) > 1 << 30) {
- var coordinateDir =
Files.createDirectories(cacheDir.resolve("coordinates"));
- coordinateMap = new MemoryAlignedDataMap<>(
- new LonLatDataType(),
- new MemoryMappedDirectory(coordinateDir));
- } else {
- var coordinateKeysDir =
Files.createDirectories(cacheDir.resolve("coordinate_keys"));
- var coordinateValuesDir =
Files.createDirectories(cacheDir.resolve("coordinate_vals"));
- coordinateMap =
- new MonotonicDataMap<>(
- new MemoryAlignedDataList<>(
- new PairDataType<>(new LongDataType(), new LongDataType()),
- new MemoryMappedDirectory(coordinateKeysDir)),
- new AppendOnlyBuffer<>(
- new LonLatDataType(),
- new MemoryMappedDirectory(coordinateValuesDir)));
- }
+ }
- var referenceKeysDir =
Files.createDirectories(cacheDir.resolve("reference_keys"));
- var referenceValuesDir =
Files.createDirectories(cacheDir.resolve("reference_vals"));
- var referenceMap =
- new MonotonicDataMap<>(
- new MemoryAlignedDataList<>(
- new PairDataType<>(new LongDataType(), new LongDataType()),
- new MemoryMappedDirectory(referenceKeysDir)),
- new AppendOnlyBuffer<>(
- new LongListDataType(),
- new MemoryMappedDirectory(referenceValuesDir)));
+ /**
+ * Constructs an {@code ImportOsmOsc}.
+ *
+ * @param file the OSM OSC file
+ * @param compression the compression
+ * @param database the database
+ * @param databaseSrid the database SRID
+ */
+ public ImportOsmOsc(Path file, Compression compression, Object database,
Integer databaseSrid) {
+ this.file = file;
+ this.compression = compression;
+ this.database = database;
+ this.databaseSrid = databaseSrid;
+ }
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void execute(WorkflowContext context) throws Exception {
+ var path = file.toAbsolutePath();
+
+ // Initialize the repositories
+ var datasource = context.getDataSource(database);
var nodeRepository = new PostgresNodeRepository(datasource);
var wayRepository = new PostgresWayRepository(datasource);
var relationRepository = new PostgresRelationRepository(datasource);
+ var coordinateMap = context.getCoordinateMap(path);
+ var referenceMap = context.getReferenceMap(path);
+
var coordinateMapBuilder = new CoordinateMapBuilder(coordinateMap);
var referenceMapBuilder = new ReferenceMapBuilder(referenceMap);
var buildGeometry = new EntityGeometryBuilder(coordinateMap, referenceMap);
- var reprojectGeometry = new EntityProjectionTransformer(4326, srid);
+ var reprojectGeometry = new EntityProjectionTransformer(4326,
databaseSrid);
var prepareGeometries = coordinateMapBuilder
.andThen(referenceMapBuilder)
.andThen(buildGeometry)
@@ -106,7 +97,5 @@ public record ImportOsmOsc(
new
BufferedInputStream(compression.decompress(Files.newInputStream(path)))) {
new
XmlChangeReader().stream(changeInputStream).map(prepareChange).forEach(importChange);
}
-
- FileUtils.deleteRecursively(cacheDir);
}
}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportOsmPbf.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportOsmPbf.java
index 499d244f..01297716 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportOsmPbf.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportOsmPbf.java
@@ -20,14 +20,8 @@ package org.apache.baremaps.workflow.tasks;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.List;
import org.apache.baremaps.database.collection.*;
-import org.apache.baremaps.database.memory.MemoryMappedDirectory;
-import org.apache.baremaps.database.type.LongDataType;
-import org.apache.baremaps.database.type.LongListDataType;
-import org.apache.baremaps.database.type.PairDataType;
-import org.apache.baremaps.database.type.geometry.LonLatDataType;
import org.apache.baremaps.openstreetmap.model.Node;
import org.apache.baremaps.openstreetmap.model.Relation;
import org.apache.baremaps.openstreetmap.model.Way;
@@ -39,75 +33,77 @@ import
org.apache.baremaps.openstreetmap.postgres.PostgresWayRepository;
import org.apache.baremaps.openstreetmap.repository.*;
import org.apache.baremaps.openstreetmap.repository.BlockImporter;
import org.apache.baremaps.stream.StreamUtils;
-import org.apache.baremaps.utils.FileUtils;
import org.apache.baremaps.workflow.Task;
import org.apache.baremaps.workflow.WorkflowContext;
import org.locationtech.jts.geom.Coordinate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record ImportOsmPbf(
- Path file,
- Path cache,
- Boolean cleanCache,
- Object database,
- Integer databaseSrid,
- Boolean replaceExisting) implements Task {
+/**
+ * Import an OSM PBF file into a database.
+ */
+public class ImportOsmPbf implements Task {
private static final Logger logger =
LoggerFactory.getLogger(ImportOsmPbf.class);
+ private Path file;
+ private Object database;
+ private Integer databaseSrid;
+ private Boolean replaceExisting;
+
+ /**
+ * Constructs a {@code ImportOsmPbf}.
+ */
+ public ImportOsmPbf() {
+
+ }
+
+ /**
+ * Constructs an {@code ImportOsmPbf}.
+ *
+ * @param file the OSM PBF file
+ * @param database the database
+ * @param databaseSrid the database SRID
+ * @param replaceExisting whether to replace the existing tables
+ */
+ public ImportOsmPbf(Path file, Object database,
+ Integer databaseSrid, Boolean replaceExisting) {
+ this.file = file;
+ this.database = database;
+ this.databaseSrid = databaseSrid;
+ this.replaceExisting = replaceExisting;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
- var dataSource = context.getDataSource(database);
var path = file.toAbsolutePath();
- var headerRepository = new PostgresHeaderRepository(dataSource);
- var nodeRepository = new PostgresNodeRepository(dataSource);
- var wayRepository = new PostgresWayRepository(dataSource);
- var relationRepository = new PostgresRelationRepository(dataSource);
+ // Initialize the repositories
+ var datasource = context.getDataSource(database);
+ var headerRepository = new PostgresHeaderRepository(datasource);
+ var nodeRepository = new PostgresNodeRepository(datasource);
+ var wayRepository = new PostgresWayRepository(datasource);
+ var relationRepository = new PostgresRelationRepository(datasource);
if (replaceExisting) {
+ // Drop the existing tables
headerRepository.drop();
nodeRepository.drop();
wayRepository.drop();
relationRepository.drop();
+
+ // Create the new tables
headerRepository.create();
nodeRepository.create();
wayRepository.create();
relationRepository.create();
}
- var cacheDir = cache != null ? cache :
Files.createTempDirectory(Paths.get("."), "cache_");
-
- DataMap<Long, Coordinate> coordinateMap;
- if (Files.size(path) > 1 << 30) {
- var coordinateDir =
Files.createDirectories(cacheDir.resolve("coordinates"));
- coordinateMap = new MemoryAlignedDataMap<>(
- new LonLatDataType(),
- new MemoryMappedDirectory(coordinateDir));
- } else {
- var coordinateKeysDir =
Files.createDirectories(cacheDir.resolve("coordinate_keys"));
- var coordinateValuesDir =
Files.createDirectories(cacheDir.resolve("coordinate_vals"));
- coordinateMap =
- new MonotonicDataMap<>(
- new MemoryAlignedDataList<>(
- new PairDataType<>(new LongDataType(), new LongDataType()),
- new MemoryMappedDirectory(coordinateKeysDir)),
- new AppendOnlyBuffer<>(
- new LonLatDataType(),
- new MemoryMappedDirectory(coordinateValuesDir)));
- }
-
- var referenceKeysDir =
Files.createDirectories(cacheDir.resolve("reference_keys"));
- var referenceValuesDir =
Files.createDirectories(cacheDir.resolve("reference_vals"));
- var referenceMap =
- new MonotonicDataMap<>(
- new MemoryAlignedDataList<>(
- new PairDataType<>(new LongDataType(), new LongDataType()),
- new MemoryMappedDirectory(referenceKeysDir)),
- new AppendOnlyBuffer<>(
- new LongListDataType(),
- new MemoryMappedDirectory(referenceValuesDir)));
+ var coordinateMap = context.getCoordinateMap(path);
+ var referenceMap = context.getReferenceMap(path);
execute(
path,
@@ -118,12 +114,21 @@ public record ImportOsmPbf(
wayRepository,
relationRepository,
databaseSrid);
-
- if (cleanCache) {
- FileUtils.deleteRecursively(cacheDir);
- }
}
+ /**
+ * Imports an OSM PBF file into a database.
+ *
+ * @param path the OSM PBF file
+ * @param coordinateMap the coordinate map
+ * @param referenceMap the reference map
+ * @param headerRepository the header repository
+ * @param nodeRepository the node repository
+ * @param wayRepository the way repository
+ * @param relationRepository the relation repository
+ * @param databaseSrid the database SRID
+ * @throws IOException
+ */
public static void execute(
Path path,
DataMap<Long, Coordinate> coordinateMap,
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportShapefile.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportShapefile.java
index b37f56b7..0449b8d7 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportShapefile.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportShapefile.java
@@ -29,12 +29,43 @@ import org.apache.baremaps.workflow.WorkflowException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record ImportShapefile(Path file, Object database, Integer sourceSRID,
Integer targetSRID)
- implements
- Task {
+/**
+ * Import a shapefile into a database.
+ */
+public class ImportShapefile implements Task {
private static final Logger logger =
LoggerFactory.getLogger(ImportShapefile.class);
+ private Path file;
+ private Integer fileSrid;
+ private Object database;
+ private Integer databaseSrid;
+
+ /**
+ * Constructs a {@code ImportShapefile}.
+ */
+ public ImportShapefile() {
+
+ }
+
+ /**
+ * Constructs an {@code ImportShapefile}.
+ *
+ * @param file the shapefile file
+ * @param fileSrid the source SRID
+ * @param database the database
+ * @param databaseSrid the target SRID
+ */
+ public ImportShapefile(Path file, Integer fileSrid, Object database, Integer
databaseSrid) {
+ this.file = file;
+ this.fileSrid = fileSrid;
+ this.database = database;
+ this.databaseSrid = databaseSrid;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var path = file.toAbsolutePath();
@@ -43,7 +74,7 @@ public record ImportShapefile(Path file, Object database,
Integer sourceSRID, In
var dataSource = context.getDataSource(database);
var postgresDataStore = new PostgresDataSchema(dataSource);
var rowTransformer = new DataTableGeometryTransformer(shapefileDataTable,
- new ProjectionTransformer(sourceSRID, targetSRID));
+ new ProjectionTransformer(fileSrid, databaseSrid));
var transformedDataTable = new DataTableAdapter(shapefileDataTable,
rowTransformer);
postgresDataStore.add(transformedDataTable);
} catch (Exception e) {
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
index a3f6b852..e6320570 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/LogMessage.java
@@ -22,10 +22,34 @@ import org.apache.baremaps.workflow.WorkflowContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record LogMessage(String message) implements Task {
+/**
+ * Log a message.
+ */
+public class LogMessage implements Task {
private static final Logger logger =
LoggerFactory.getLogger(LogMessage.class);
+ private String message;
+
+ /**
+ * Constructs a {@code LogMessage}.
+ */
+ public LogMessage() {
+
+ }
+
+ /**
+ * Constructs an {@code LogMessage}.
+ *
+ * @param message the message
+ */
+ public LogMessage(String message) {
+ this.message = message;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
logger.info(message);
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UngzipFile.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UngzipFile.java
deleted file mode 100644
index d55d7662..00000000
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UngzipFile.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.baremaps.workflow.tasks;
-
-import java.io.BufferedInputStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardCopyOption;
-import java.util.zip.GZIPInputStream;
-import org.apache.baremaps.workflow.Task;
-import org.apache.baremaps.workflow.WorkflowContext;
-import org.apache.baremaps.workflow.WorkflowException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public record UngzipFile(Path file, Path directory) implements Task {
-
- private static final Logger logger =
LoggerFactory.getLogger(UngzipFile.class);
-
- @Override
- public void execute(WorkflowContext context) throws Exception {
- var filePath = file.toAbsolutePath();
- var directoryPath = directory.toAbsolutePath();
- try (var zis = new GZIPInputStream(new
BufferedInputStream(Files.newInputStream(filePath)))) {
- var decompressed =
directoryPath.resolve(filePath.getFileName().toString().substring(0,
- filePath.getFileName().toString().length() - 3));
- if (!Files.exists(decompressed)) {
- Files.createDirectories(decompressed.getParent());
- Files.createFile(decompressed);
- }
- Files.copy(zis, decompressed, StandardCopyOption.REPLACE_EXISTING);
- } catch (Exception e) {
- throw new WorkflowException(e);
- }
- }
-}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UnzipFile.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UnzipFile.java
deleted file mode 100644
index dc89d9c5..00000000
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UnzipFile.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.baremaps.workflow.tasks;
-
-import java.io.*;
-import java.nio.file.*;
-import java.util.zip.ZipFile;
-import org.apache.baremaps.workflow.Task;
-import org.apache.baremaps.workflow.WorkflowContext;
-
-public record UnzipFile(Path file, Path directory) implements Task {
-
- @Override
- public void execute(WorkflowContext context) throws Exception {
- var filePath = file.toAbsolutePath();
- var directoryPath = directory.toAbsolutePath();
-
- try (var zipFile = new ZipFile(filePath.toFile())) {
- var entries = zipFile.entries();
-
- while (entries.hasMoreElements()) {
- var ze = entries.nextElement();
- if (ze.isDirectory()) {
- continue;
- }
-
- var path = directoryPath.resolve(ze.getName());
-
- var file = path.toFile().getCanonicalFile();
- var directory = directoryPath.toFile().getCanonicalFile();
- if (!file.toPath().startsWith(directory.toPath())) {
- throw new IOException("Entry is outside of the target directory");
- }
-
- Files.createDirectories(path.getParent());
- Files.write(path, new byte[] {}, StandardOpenOption.CREATE,
- StandardOpenOption.TRUNCATE_EXISTING);
-
- try (var input = new BufferedInputStream(zipFile.getInputStream(ze));
- var output = new BufferedOutputStream(new
FileOutputStream(path.toFile()))) {
-
- int nBytes;
- byte[] buffer = new byte[4096];
- while ((nBytes = input.read(buffer)) > 0) {
- output.write(buffer, 0, nBytes);
- }
- }
- }
- }
- }
-}
diff --git
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UpdateOsmDatabase.java
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UpdateOsmDatabase.java
index 1a2005d2..154024d3 100644
---
a/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UpdateOsmDatabase.java
+++
b/baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/UpdateOsmDatabase.java
@@ -46,15 +46,39 @@ import org.locationtech.jts.geom.Coordinate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public record UpdateOsmDatabase(Object database, Integer databaseSrid,
- String replicationUrl) implements Task {
+/**
+ * Update an OSM database based on the header data stored in the database.
+ */
+public class UpdateOsmDatabase implements Task {
private static final Logger logger =
LoggerFactory.getLogger(UpdateOsmDatabase.class);
- public UpdateOsmDatabase(Object database, Integer databaseSrid) {
- this(database, databaseSrid, null);
+ private Object database;
+ private Integer databaseSrid;
+ private String replicationUrl;
+
+ /**
+ * Constructs a {@code UpdateOsmDatabase}.
+ */
+ public UpdateOsmDatabase() {
+
}
+ /**
+ * Constructs an {@code UpdateOsmDatabase}.
+ *
+ * @param database the database
+ * @param databaseSrid the database SRID
+ */
+ public UpdateOsmDatabase(Object database, Integer databaseSrid, String
replicationUrl) {
+ this.database = database;
+ this.databaseSrid = databaseSrid;
+ this.replicationUrl = replicationUrl;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
@Override
public void execute(WorkflowContext context) throws Exception {
var datasource = context.getDataSource(database);
@@ -75,6 +99,18 @@ public record UpdateOsmDatabase(Object database, Integer
databaseSrid,
replicationUrl);
}
+ /**
+ * Executes the task.
+ *
+ * @param coordinateMap the coordinate map
+ * @param referenceMap the reference map
+ * @param headerRepository the header repository
+ * @param nodeRepository the node repository
+ * @param wayRepository the way repository
+ * @param relationRepository the relation repository
+ * @param databaseSrid the SRID
+ * @throws Exception if something went wrong
+ */
public static void execute(DataMap<Long, Coordinate> coordinateMap,
DataMap<Long, List<Long>> referenceMap,
HeaderRepository headerRepository, Repository<Long, Node> nodeRepository,
@@ -82,6 +118,7 @@ public record UpdateOsmDatabase(Object database, Integer
databaseSrid,
Integer databaseSrid,
String replicationUrl) throws Exception {
+ // Get the latest header from the database
var header = headerRepository.selectLatest();
// If the replicationUrl is not provided, use the one from the latest
header.
@@ -89,6 +126,7 @@ public record UpdateOsmDatabase(Object database, Integer
databaseSrid,
replicationUrl = header.getReplicationUrl();
}
+ // Get the sequence number of the latest header
var stateReader = new StateReader(replicationUrl, true);
var sequenceNumber = header.getReplicationSequenceNumber();
@@ -101,21 +139,23 @@ public record UpdateOsmDatabase(Object database, Integer
databaseSrid,
}
}
+ // Increment the sequence number and get the changeset url
var nextSequenceNumber = sequenceNumber + 1;
var changeUrl = stateReader.getUrl(replicationUrl, nextSequenceNumber,
"osc.gz");
logger.info("Updating the database with the changeset: {}", changeUrl);
+ // Process the changeset and update the database
var createGeometry = new EntityGeometryBuilder(coordinateMap,
referenceMap);
var reprojectGeometry = new EntityProjectionTransformer(4326,
databaseSrid);
var prepareGeometries = new
ChangeEntitiesHandler(createGeometry.andThen(reprojectGeometry));
var prepareChange = consumeThenReturn(prepareGeometries);
var importChange = new PutChangeImporter(nodeRepository, wayRepository,
relationRepository);
-
try (var changeInputStream =
new GZIPInputStream(new BufferedInputStream(changeUrl.openStream()))) {
new
XmlChangeReader().stream(changeInputStream).map(prepareChange).forEach(importChange);
}
+ // Add the new header to the database
var stateUrl = stateReader.getUrl(replicationUrl, nextSequenceNumber,
"state.txt");
try (var stateInputStream = new
BufferedInputStream(stateUrl.openStream())) {
var state = new StateReader().readState(stateInputStream);
@@ -123,5 +163,4 @@ public record UpdateOsmDatabase(Object database, Integer
databaseSrid,
header.getReplicationUrl(), header.getSource(),
header.getWritingProgram()));
}
}
-
}
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/ObjectMapperTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/ObjectMapperTest.java
index 0bfd1dab..09f1d4a8 100644
---
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/ObjectMapperTest.java
+++
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/ObjectMapperTest.java
@@ -39,11 +39,9 @@ public class ObjectMapperTest {
new Step("download", List.of(),
List.of(new DownloadUrl(
"https://download.geofabrik.de/europe/liechtenstein-latest.osm.pbf",
- Paths.get("liechtenstein-latest.osm.pbf")))),
+ Paths.get("liechtenstein-latest.osm.pbf"), false))),
new Step("import", List.of("download"),
List.of(new
ImportOsmPbf(Paths.get("liechtenstein-latest.osm.pbf"),
- null,
- true,
"jdbc:postgresql://localhost:5432/baremaps?&user=baremaps&password=baremaps",
3857, true)))));
var json = mapper.writeValueAsString(workflow1);
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/WorkflowTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/WorkflowTest.java
index 4fb5b32d..dbc0c855 100644
--- a/baremaps-core/src/test/java/org/apache/baremaps/workflow/WorkflowTest.java
+++ b/baremaps-core/src/test/java/org/apache/baremaps/workflow/WorkflowTest.java
@@ -22,11 +22,8 @@ package org.apache.baremaps.workflow;
import java.nio.file.Paths;
import java.util.List;
import org.apache.baremaps.testing.PostgresContainerTest;
-import org.apache.baremaps.workflow.tasks.DownloadUrl;
-import org.apache.baremaps.workflow.tasks.ImportGeoPackage;
-import org.apache.baremaps.workflow.tasks.ImportOsmPbf;
-import org.apache.baremaps.workflow.tasks.ImportShapefile;
-import org.apache.baremaps.workflow.tasks.UnzipFile;
+import org.apache.baremaps.workflow.tasks.*;
+import org.apache.baremaps.workflow.tasks.DecompressFile.Compression;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@@ -37,13 +34,13 @@ class WorkflowTest extends PostgresContainerTest {
void naturalearthGeoPackage() {
var workflow = new Workflow(List.of(new Step("fetch-geopackage",
List.of(), List.of(
new
DownloadUrl("https://naciscdn.org/naturalearth/packages/natural_earth_vector.gpkg.zip",
- Paths.get("natural_earth_vector.gpkg.zip")),
- new UnzipFile(Paths.get("natural_earth_vector.gpkg.zip"),
- Paths.get("natural_earth_vector")),
+ Paths.get("natural_earth_vector.gpkg.zip"), false),
+ new DecompressFile(Paths.get("natural_earth_vector.gpkg.zip"),
+ Paths.get("natural_earth_vector"), Compression.zip),
new
ImportGeoPackage(Paths.get("natural_earth_vector/packages/natural_earth_vector.gpkg"),
- jdbcUrl(),
- 4326, 3857)))));
- new WorkflowExecutor(workflow).execute().join();
+ 4326, jdbcUrl(),
+ 3857)))));
+ new WorkflowExecutor(workflow).execute();
}
@Test
@@ -52,13 +49,13 @@ class WorkflowTest extends PostgresContainerTest {
var workflow = new Workflow(List.of(new Step("fetch-geopackage", List.of(),
List.of(
new
DownloadUrl("https://osmdata.openstreetmap.de/download/coastlines-split-4326.zip",
- Paths.get("coastlines-split-4326.zip")),
- new UnzipFile(Paths.get("coastlines-split-4326.zip"),
- Paths.get("coastlines-split-4326")),
+ Paths.get("coastlines-split-4326.zip"), false),
+ new DecompressFile(Paths.get("coastlines-split-4326.zip"),
+ Paths.get("coastlines-split-4326"), Compression.zip),
new
ImportShapefile(Paths.get("coastlines-split-4326/coastlines-split-4326/lines.shp"),
- jdbcUrl(),
- 4326, 3857)))));
- new WorkflowExecutor(workflow).execute().join();
+ 4326, jdbcUrl(),
+ 3857)))));
+ new WorkflowExecutor(workflow).execute();
}
@Test
@@ -74,9 +71,9 @@ class WorkflowTest extends PostgresContainerTest {
new ImportShapefile(
Paths.get(
"simplified-water-polygons-split-3857/simplified-water-polygons-split-3857/simplified_water_polygons.shp"),
-
"jdbc:postgresql://localhost:5432/baremaps?&user=baremaps&password=baremaps",
3857,
+ 3857,
"jdbc:postgresql://localhost:5432/baremaps?&user=baremaps&password=baremaps",
3857)))));
- new WorkflowExecutor(workflow).execute().join();
+ new WorkflowExecutor(workflow).execute();
}
@Test
@@ -84,9 +81,9 @@ class WorkflowTest extends PostgresContainerTest {
void workflow() {
var workflow = new Workflow(List.of(new Step("fetch-geopackage",
List.of(), List.of(
new
DownloadUrl("https://naciscdn.org/naturalearth/packages/natural_earth_vector.gpkg.zip",
- Paths.get("downloads/import_db.gpkg")),
- new ImportShapefile(Paths.get("downloads/import_db.gpkg"), jdbcUrl(),
4326, 3857)))));
- new WorkflowExecutor(workflow).execute().join();
+ Paths.get("downloads/import_db.gpkg"), false),
+ new ImportShapefile(Paths.get("downloads/import_db.gpkg"), 4326,
jdbcUrl(), 3857)))));
+ new WorkflowExecutor(workflow).execute();
}
@Test
@@ -95,33 +92,34 @@ class WorkflowTest extends PostgresContainerTest {
var workflow = new Workflow(List.of(
new Step("fetch-geopackage", List.of(),
List.of(new
DownloadUrl("https://tiles.baremaps.com/samples/import_db.gpkg",
- Paths.get("downloads/import_db.gpkg")))),
+ Paths.get("downloads/import_db.gpkg"), false))),
new Step("import-geopackage", List.of("fetch-geopackage"),
- List.of(new
ImportGeoPackage(Paths.get("downloads/import_db.gpkg"), jdbcUrl(), 4326,
+ List.of(new
ImportGeoPackage(Paths.get("downloads/import_db.gpkg"), 4326, jdbcUrl(),
3857))),
new Step("fetch-osmpbf", List.of(),
List.of(new
DownloadUrl("https://tiles.baremaps.com/samples/liechtenstein.osm.pbf",
- Paths.get("downloads/liechtenstein.osm.pbf")))),
+ Paths.get("downloads/liechtenstein.osm.pbf"), false))),
new Step("import-osmpbf", List.of("fetch-osmpbf"),
- List.of(new
ImportOsmPbf(Paths.get("downloads/liechtenstein.osm.pbf"), null, true,
+ List.of(new
ImportOsmPbf(Paths.get("downloads/liechtenstein.osm.pbf"),
jdbcUrl(),
3857, true))),
new Step("fetch-shapefile", List.of(), List.of(new DownloadUrl(
"https://osmdata.openstreetmap.de/download/simplified-water-polygons-split-3857.zip",
- Paths.get("downloads/simplified-water-polygons-split-3857.zip")))),
+ Paths.get("downloads/simplified-water-polygons-split-3857.zip"),
false))),
new Step("unzip-shapefile", List.of("fetch-shapefile"),
List.of(
- new
UnzipFile(Paths.get("downloads/simplified-water-polygons-split-3857.zip"),
- Paths.get("archives")))),
+ new
DecompressFile(Paths.get("downloads/simplified-water-polygons-split-3857.zip"),
+ Paths.get("archives"), Compression.zip))),
new Step("fetch-projection", List.of("unzip-shapefile"),
List.of(new
DownloadUrl("https://spatialreference.org/ref/sr-org/epsg3857/prj/",
Paths.get(
-
"archives/simplified-water-polygons-split-3857/simplified_water_polygons.prj")))),
+
"archives/simplified-water-polygons-split-3857/simplified_water_polygons.prj"),
+ false))),
new Step("import-shapefile", List.of("fetch-projection"),
List.of(new ImportShapefile(
Paths.get(
"archives/simplified-water-polygons-split-3857/simplified_water_polygons.shp"),
- jdbcUrl(), 3857, 3857)))));
- new WorkflowExecutor(workflow).execute().join();
+ 3857, jdbcUrl(), 3857)))));
+ new WorkflowExecutor(workflow).execute();
}
}
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/DownloadUrlTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/DownloadUrlTest.java
index ac8c68b7..1c2526f1 100644
---
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/DownloadUrlTest.java
+++
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/DownloadUrlTest.java
@@ -49,7 +49,7 @@ class DownloadUrlTest {
var file = directory.resolve("file");
// TODO: do not use a 3rd party server, replaces test URL to a baremaps
owned test resource.
var task = new DownloadUrl("ftp://whois.in.bell.ca/bell.db.gz",
- file);
+ file, false);
task.execute(new WorkflowContext());
assertTrue(file.toFile().length() > 50, "file is less than 50 bytes");
FileUtils.deleteRecursively(directory);
@@ -62,7 +62,7 @@ class DownloadUrlTest {
var file = directory.resolve("file");
assertThrows(IllegalArgumentException.class, () -> {
var task = new DownloadUrl("file://not-existing-file-243jhks",
- file);
+ file, false);
task.execute(new WorkflowContext());
}, "Unsupported protocol throws IOException");
FileUtils.deleteRecursively(directory);
@@ -74,7 +74,7 @@ class DownloadUrlTest {
var directory = Files.createTempDirectory("tmp_");
var file = directory.resolve("README.md");
var task = new
DownloadUrl("https://raw.githubusercontent.com/baremaps/baremaps/main/README.md",
- file.toAbsolutePath());
+ file.toAbsolutePath(), false);
task.execute(new WorkflowContext());
assertTrue(Files.readString(file).contains("Baremaps"));
FileUtils.deleteRecursively(directory);
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportGeoPackageTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportGeoPackageTest.java
index c8473c46..e633a13e 100644
---
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportGeoPackageTest.java
+++
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportGeoPackageTest.java
@@ -31,7 +31,7 @@ class ImportGeoPackageTest extends PostgresContainerTest {
@Tag("integration")
void execute() throws Exception {
var task =
- new ImportGeoPackage(TestFiles.resolve("data.gpkg"), jdbcUrl(), 4326,
3857);
+ new ImportGeoPackage(TestFiles.resolve("data.gpkg"), 4326, jdbcUrl(),
3857);
task.execute(new WorkflowContext());
}
}
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportPbfTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportOsmPbfTest.java
similarity index 94%
rename from
baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportPbfTest.java
rename to
baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportOsmPbfTest.java
index 9684c6df..4a913bc5 100644
---
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportPbfTest.java
+++
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportOsmPbfTest.java
@@ -33,7 +33,7 @@ class ImportOsmPbfTest extends PostgresContainerTest {
var file = TestFiles.resolve("data.osm.pbf");
var jdbcUrl = jdbcUrl();
var srid = 3857;
- var task = new ImportOsmPbf(file, null, true, jdbcUrl, srid, true);
+ var task = new ImportOsmPbf(file, jdbcUrl, srid, true);
task.execute(new WorkflowContext());
}
}
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportShapefileTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportShapefileTest.java
index d611efb0..443e829e 100644
---
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportShapefileTest.java
+++
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/ImportShapefileTest.java
@@ -24,6 +24,7 @@ import org.apache.baremaps.testing.PostgresContainerTest;
import org.apache.baremaps.testing.TestFiles;
import org.apache.baremaps.utils.FileUtils;
import org.apache.baremaps.workflow.WorkflowContext;
+import org.apache.baremaps.workflow.tasks.DecompressFile.Compression;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
@@ -32,13 +33,13 @@ class ImportShapefileTest extends PostgresContainerTest {
@Test
@Tag("integration")
void execute() throws Exception {
- var zip = TestFiles.resolve("monaco-shapefile.zip");
- var directory = Files.createTempDirectory("tmp_");
- var unzip = new UnzipFile(zip, directory);
- unzip.execute(new WorkflowContext());
- var task = new
ImportShapefile(directory.resolve("gis_osm_buildings_a_free_1.shp"),
- jdbcUrl(), 4326, 3857);
- task.execute(new WorkflowContext());
- FileUtils.deleteRecursively(directory);
+ var source = TestFiles.resolve("monaco-shapefile.zip");
+ var target = Files.createTempDirectory("tmp_");
+ var decompressFile = new DecompressFile(source, target, Compression.zip);
+ decompressFile.execute(new WorkflowContext());
+ var importShapefile = new
ImportShapefile(target.resolve("gis_osm_buildings_a_free_1.shp"),
+ 4326, jdbcUrl(), 3857);
+ importShapefile.execute(new WorkflowContext());
+ FileUtils.deleteRecursively(target);
}
}
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/UngzipFileTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/UngzipFileTest.java
deleted file mode 100644
index 78ab3504..00000000
---
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/UngzipFileTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.baremaps.workflow.tasks;
-
-
-
-import java.nio.file.Files;
-import org.apache.baremaps.testing.TestFiles;
-import org.apache.baremaps.utils.FileUtils;
-import org.apache.baremaps.workflow.WorkflowContext;
-import org.junit.jupiter.api.Tag;
-import org.junit.jupiter.api.Test;
-
-class UngzipFileTest {
-
- @Test
- @Tag("integration")
- void run() throws Exception {
- var gzip = TestFiles.resolve("ripe/sample.txt.gz");
- var directory = Files.createTempDirectory("tmp_");
- var task = new UngzipFile(gzip, directory);
- task.execute(new WorkflowContext());
- FileUtils.deleteRecursively(directory);
- }
-}
diff --git
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/UnzipFileTest.java
b/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/UnzipFileTest.java
deleted file mode 100644
index a9350139..00000000
---
a/baremaps-core/src/test/java/org/apache/baremaps/workflow/tasks/UnzipFileTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.baremaps.workflow.tasks;
-
-
-
-import java.nio.file.Files;
-import org.apache.baremaps.testing.TestFiles;
-import org.apache.baremaps.utils.FileUtils;
-import org.apache.baremaps.workflow.WorkflowContext;
-import org.junit.jupiter.api.Tag;
-import org.junit.jupiter.api.Test;
-
-class UnzipFileTest {
-
- @Test
- @Tag("integration")
- void execute() throws Exception {
- var zip = TestFiles.resolve("monaco-shapefile.zip");
- var directory = Files.createTempDirectory("tmp_");
- var task = new UnzipFile(zip, directory);
- task.execute(new WorkflowContext());
- FileUtils.deleteRecursively(directory);
- }
-}
diff --git a/baremaps-core/src/test/resources/workflow.json
b/baremaps-core/src/test/resources/workflow.json
index 3bfb651f..3f68cb8f 100644
--- a/baremaps-core/src/test/resources/workflow.json
+++ b/baremaps-core/src/test/resources/workflow.json
@@ -13,8 +13,8 @@
"username": "%s",
"password": "%s"
},
- "sourceSRID": 4326,
- "targetSRID": 3857
+ "fileSrid": 4326,
+ "databaseSrid": 3857
}
]
}
\ No newline at end of file
diff --git a/basemap/import.js b/basemap/import.js
index 75715991..93054a34 100644
--- a/basemap/import.js
+++ b/basemap/import.js
@@ -34,55 +34,27 @@ export default {
},
]
},
- {
- "id": "natural-earth",
- "needs": [],
- "tasks": [
- {
- "type": "DownloadUrl",
- "url":
"https://naciscdn.org/naturalearth/packages/natural_earth_vector.gpkg.zip",
- "path": "data/natural_earth_vector.gpkg.zip"
- },
- {
- "type": "UnzipFile",
- "file": "data/natural_earth_vector.gpkg.zip",
- "directory": "data/natural_earth_vector"
- },
- {
- "type": "ImportGeoPackage",
- "file":
"data/natural_earth_vector/packages/natural_earth_vector.gpkg",
- "database": config.database,
- "sourceSRID": 4326,
- "targetSRID": 3857
- },
- {
- "type": "ExecuteSql",
- "file": "queries/ne_index.sql",
- "database": config.database,
- "parallel": true,
- }
- ]
- },
{
"id": "openstreetmap-water-polygons",
"needs": [],
"tasks": [
{
"type": "DownloadUrl",
- "url":
"https://osmdata.openstreetmap.de/download/water-polygons-split-3857.zip",
- "path": "data/water-polygons-split-3857.zip"
+ "source":
"https://osmdata.openstreetmap.de/download/water-polygons-split-3857.zip",
+ "target": "data/water-polygons-split-3857.zip"
},
{
- "type": "UnzipFile",
- "file": "data/water-polygons-split-3857.zip",
- "directory": "data"
+ "type": "DecompressFile",
+ "source": "data/water-polygons-split-3857.zip",
+ "target": "data",
+ "compression": "zip"
},
{
"type": "ImportShapefile",
"file": "data/water-polygons-split-3857/water_polygons.shp",
"database": config.database,
- "sourceSRID": 3857,
- "targetSRID": 3857
+ "fileSrid": 3857,
+ "databaseSrid": 3857
},
]
},
@@ -92,20 +64,21 @@ export default {
"tasks": [
{
"type": "DownloadUrl",
- "url":
"https://osmdata.openstreetmap.de/download/simplified-water-polygons-split-3857.zip",
- "path": "data/simplified-water-polygons-split-3857.zip"
+ "source":
"https://osmdata.openstreetmap.de/download/simplified-water-polygons-split-3857.zip",
+ "target": "data/simplified-water-polygons-split-3857.zip"
},
{
- "type": "UnzipFile",
- "file": "data/simplified-water-polygons-split-3857.zip",
- "directory": "data"
+ "type": "DecompressFile",
+ "source": "data/simplified-water-polygons-split-3857.zip",
+ "target": "data",
+ "compression": "zip"
},
{
"type": "ImportShapefile",
"file":
"data/simplified-water-polygons-split-3857/simplified_water_polygons.shp",
"database": config.database,
- "sourceSRID": 3857,
- "targetSRID": 3857
+ "fileSrid": 3857,
+ "databaseSrid": 3857
},
]
},
@@ -139,8 +112,8 @@ export default {
"tasks": [
{
"type": "DownloadUrl",
- "url": config.osmPbfUrl,
- "path": "data/data.osm.pbf"
+ "source": config.osmPbfUrl,
+ "target": "data/data.osm.pbf"
},
{
"type": "ImportOsmPbf",
@@ -148,7 +121,6 @@ export default {
"database": config.database,
"databaseSrid": 3857,
"replaceExisting": true,
- "cleanCache": true,
},
]
},
diff --git a/daylight/workflow.js b/daylight/workflow.js
index c256c87b..9df4a0de 100644
--- a/daylight/workflow.js
+++ b/daylight/workflow.js
@@ -20,64 +20,58 @@ export default {
"tasks": [
{
"type": "DownloadUrl",
- "url":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/planet-v1.33.osm.pbf",
- "path": "data/data.osm.pbf"
+ "source":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/planet-v1.33.osm.pbf",
+ "target": "data/data.osm.pbf"
},
{
"type": "ImportOsmPbf",
"file": "data/data.osm.pbf",
"cache": "cache/",
- "cleanCache": false,
"database": config.database,
"databaseSrid": 3857,
"replaceExisting": true,
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/ml-buildings-v1.33.osm.pbf",
- "path": "data/buildings.osm.pbf"
+ "source":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/ml-buildings-v1.33.osm.pbf",
+ "target": "data/buildings.osm.pbf"
},
{
"type": "ImportOsmPbf",
"file": "data/buildings.osm.pbf",
"cache": "building_cache/",
- "cleanCache": true,
"database": config.database,
"databaseSrid": 3857,
"replaceExisting": false,
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/fb-ml-roads-v1.33.osc.gz",
- "path": "data/roads.osc.gz"
+ "source":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/fb-ml-roads-v1.33.osc.gz",
+ "target": "data/roads.osc.gz"
},
{
"type": "ImportOsmOsc",
"file": "data/roads.osc.gz",
- "cache": "cache/",
- "cleanCache": false,
"compression": "gzip",
"database": config.database,
- "srid": 3857
+ "databaseSrid": 3857
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/admin-v1.33.osc.gz",
- "path": "data/admin.osc.gz"
+ "source":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/admin-v1.33.osc.gz",
+ "target": "data/admin.osc.gz"
},
{
"type": "ImportOsmOsc",
"file": "data/admin.osc.gz",
- "cache": "cache/",
- "cleanCache": false,
"compression": "gzip",
"database": config.database,
- "srid": 3857
+ "databaseSrid": 3857
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/coastlines-v1.33.tgz",
- "path": "data/coastlines.tgz"
+ "source":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/coastlines-v1.33.tgz",
+ "target": "data/coastlines.tgz"
},
{
"type": "DecompressFile",
@@ -89,8 +83,8 @@ export default {
"type": "ImportShapefile",
"file": "data/coastlines/water_polygons.shp",
"database": config.database,
- "sourceSRID": 4326,
- "targetSRID": 3857
+ "fileSrid": 4326,
+ "databaseSrid": 3857
},
{
"type": "ExecuteSql",
@@ -99,8 +93,8 @@ export default {
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/preferred-localization-v1.33.tsv",
- "path": "data/preferred-localization.tsv"
+ "source":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/preferred-localization-v1.33.tsv",
+ "target": "data/preferred-localization.tsv"
},
{
"type": "ImportDaylightTranslations",
@@ -109,8 +103,8 @@ export default {
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/important-features-v1.33.json",
- "path": "data/important-features.json"
+ "source":
"https://daylight-map-distribution.s3.us-west-1.amazonaws.com/release/v1.33/important-features-v1.33.json",
+ "target": "data/important-features.json"
},
{
"type": "ImportDaylightFeatures",
@@ -119,30 +113,30 @@ export default {
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-openstreetmap.s3.us-west-2.amazonaws.com/landcover/low.shp",
- "path": "data/landcover/low.shp"
+ "source":
"https://daylight-openstreetmap.s3.us-west-2.amazonaws.com/landcover/low.shp",
+ "target": "data/landcover/low.shp"
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-openstreetmap.s3.us-west-2.amazonaws.com/landcover/low.dbf",
- "path": "data/landcover/low.dbf"
+ "source":
"https://daylight-openstreetmap.s3.us-west-2.amazonaws.com/landcover/low.dbf",
+ "target": "data/landcover/low.dbf"
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-openstreetmap.s3.us-west-2.amazonaws.com/landcover/low.prj",
- "path": "data/landcover/low.prj"
+ "source":
"https://daylight-openstreetmap.s3.us-west-2.amazonaws.com/landcover/low.prj",
+ "target": "data/landcover/low.prj"
},
{
"type": "DownloadUrl",
- "url":
"https://daylight-openstreetmap.s3.us-west-2.amazonaws.com/landcover/low.shx",
- "path": "data/landcover/low.shx"
+ "source":
"https://daylight-openstreetmap.s3.us-west-2.amazonaws.com/landcover/low.shx",
+ "target": "data/landcover/low.shx"
},
{
"type": "ImportShapefile",
"file": "data/landcover/low.shp",
"database": config.database,
- "sourceSRID": 4326,
- "targetSRID": 3857
+ "fileSrid": 4326,
+ "databaseSrid": 3857
},
]
},
diff --git a/examples/extrusion/workflow.json b/examples/extrusion/workflow.json
index 039ad0d1..c555df65 100644
--- a/examples/extrusion/workflow.json
+++ b/examples/extrusion/workflow.json
@@ -6,8 +6,8 @@
"tasks": [
{
"type": "DownloadUrl",
- "url":
"https://download.geofabrik.de/europe/great-britain/england/greater-london-latest.osm.pbf",
- "path": "greater-london-latest.osm.pbf"
+ "source":
"https://download.geofabrik.de/europe/great-britain/england/greater-london-latest.osm.pbf",
+ "target": "greater-london-latest.osm.pbf"
}
]
},
diff --git a/examples/geocoding/workflow.js b/examples/geocoding/workflow.js
index bee197f8..f6ad29bc 100644
--- a/examples/geocoding/workflow.js
+++ b/examples/geocoding/workflow.js
@@ -18,8 +18,8 @@ const geonamesUrl =
"https://download.geonames.org/export/dump/allCountries.zip"
// Fetch and unzip Geonames
const FetchAndUnzipGeonames = {id: "fetch-geonames-allcountries", needs: [],
tasks: [
- {type: "DownloadUrl", url: geonamesUrl, path:
"downloads/geonames-allcountries.zip", force: true},
- {type: "UnzipFile", file: "downloads/geonames-allcountries.zip",
directory: "archives"}
+ {type: "DownloadUrl", source: geonamesUrl, target:
"downloads/geonames-allcountries.zip", replaceExisting: true},
+ {type: "DecompressFile", source: "downloads/geonames-allcountries.zip",
target: "archives", compression: "zip"}
]};
// Create the Geocoder index
diff --git a/examples/ip-to-location/workflow.js
b/examples/ip-to-location/workflow.js
index c262e179..cdbad8a6 100644
--- a/examples/ip-to-location/workflow.js
+++ b/examples/ip-to-location/workflow.js
@@ -61,25 +61,27 @@ export default {"steps": [
...nics.flatMap(nic => [
{
type: "DownloadUrl",
- url: nic.url,
- path: `downloads/${nic.filename}.gz`
+ source: nic.url,
+ target: `downloads/${nic.filename}.gz`
},
{
- type: "UngzipFile",
- file: `downloads/${nic.filename}.gz`,
- directory: "archives"
+ type: "DecompressFile",
+ source: `downloads/${nic.filename}.gz`,
+ target: "archives",
+ compression: "gzip"
}
]),
{
type: "DownloadUrl",
- url:
"https://download.geonames.org/export/dump/allCountries.zip",
- path: "downloads/geonames-allcountries.zip",
+ source:
"https://download.geonames.org/export/dump/allCountries.zip",
+ target: "downloads/geonames-allcountries.zip",
force: true
},
{
- type: "UnzipFile",
- file: "downloads/geonames-allcountries.zip",
- directory: "archives"
+ type: "DecompressFile",
+ source: "downloads/geonames-allcountries.zip",
+ target: "archives",
+ compression: "zip"
},
{
type: "CreateGeonamesIndex",
diff --git a/examples/naturalearth/workflow.json
b/examples/naturalearth/workflow.json
index bd9d6444..d3b6ee04 100644
--- a/examples/naturalearth/workflow.json
+++ b/examples/naturalearth/workflow.json
@@ -6,20 +6,21 @@
"tasks": [
{
"type": "DownloadUrl",
- "url":
"https://naciscdn.org/naturalearth/packages/natural_earth_vector.gpkg.zip",
- "path": "natural_earth_vector.gpkg.zip"
+ "source":
"https://naciscdn.org/naturalearth/packages/natural_earth_vector.gpkg.zip",
+ "target": "natural_earth_vector.gpkg.zip"
},
{
- "type": "UnzipFile",
- "file": "natural_earth_vector.gpkg.zip",
- "directory": "natural_earth_vector"
+ "type": "DecompressFile",
+ "source": "natural_earth_vector.gpkg.zip",
+ "target": "natural_earth_vector",
+ "compression": "zip"
},
{
"type": "ImportGeoPackage",
"file": "natural_earth_vector/packages/natural_earth_vector.gpkg",
"database":
"jdbc:postgresql://localhost:5432/baremaps?&user=baremaps&password=baremaps",
- "sourceSRID": 4326,
- "targetSRID": 3857
+ "fileSrid": 4326,
+ "databaseSrid": 3857
},
{
"type": "ExecuteSql",
diff --git a/examples/openstreetmap/workflow.json
b/examples/openstreetmap/workflow.json
index d488cb4b..ca62ac08 100644
--- a/examples/openstreetmap/workflow.json
+++ b/examples/openstreetmap/workflow.json
@@ -6,8 +6,8 @@
"tasks": [
{
"type": "DownloadUrl",
- "url":
"https://download.geofabrik.de/europe/liechtenstein-latest.osm.pbf",
- "path": "liechtenstein-latest.osm.pbf"
+ "source":
"https://download.geofabrik.de/europe/liechtenstein-latest.osm.pbf",
+ "target": "liechtenstein-latest.osm.pbf"
}
]
},
diff --git a/examples/shadedrelief/workflow.json
b/examples/shadedrelief/workflow.json
index 48407d52..b530fac6 100644
--- a/examples/shadedrelief/workflow.json
+++ b/examples/shadedrelief/workflow.json
@@ -6,13 +6,14 @@
"tasks": [
{
"type": "DownloadUrl",
- "url":
"http://www.shadedrelief.com/ne-draft/World-Base-Map-Shapefiles.zip",
- "path": "shadedrelief.zip"
+ "source":
"http://www.shadedrelief.com/ne-draft/World-Base-Map-Shapefiles.zip",
+ "target": "shadedrelief.zip"
},
{
- "type": "UnzipFile",
- "file": "shadedrelief.zip",
- "directory": "shadedrelief"
+ "type": "DecompressFile",
+ "source": "shadedrelief.zip",
+ "target": "shadedrelief",
+ "compression": "zip"
}
]
}
diff --git a/pom.xml b/pom.xml
index 7bb72aff..54a75195 100644
--- a/pom.xml
+++ b/pom.xml
@@ -91,6 +91,7 @@ limitations under the License.
<version.lib.graalvm>23.0.2</version.lib.graalvm>
<version.lib.guava>32.1.3-jre</version.lib.guava>
<version.lib.hikari>5.0.1</version.lib.hikari>
+ <version.lib.hk2>2.6.1</version.lib.hk2>
<version.lib.ipresource>1.52</version.lib.ipresource>
<version.lib.jackson>2.13.0</version.lib.jackson>
<version.lib.jakarta>2.1.6</version.lib.jakarta>
@@ -342,6 +343,16 @@ limitations under the License.
<version>${version.lib.awaitability}</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.glassfish.hk2</groupId>
+ <artifactId>hk2-api</artifactId>
+ <version>${version.lib.hk2}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.glassfish.hk2</groupId>
+ <artifactId>hk2-locator</artifactId>
+ <version>${version.lib.hk2}</version>
+ </dependency>
<dependency>
<groupId>org.glassfish.jersey</groupId>
<artifactId>jersey-bom</artifactId>
diff --git a/scripts/test-basemap.sh b/scripts/test-basemap.sh
index d9ae8a7f..46554bcc 100755
--- a/scripts/test-basemap.sh
+++ b/scripts/test-basemap.sh
@@ -29,7 +29,7 @@ echo ""
rm -fr data tiles tiles.mbtiles
-baremaps workflow execute --file workflow.js
+baremaps workflow execute --file import.js
echo ""
echo "--------------------------------------------------------------------"