github-advanced-security[bot] commented on code in PR #956:
URL:
https://github.com/apache/incubator-baremaps/pull/956#discussion_r2039164878
##########
baremaps-core/src/main/java/org/apache/baremaps/tasks/ImportGeoPackage.java:
##########
@@ -70,22 +76,138 @@
*/
@Override
public void execute(WorkflowContext context) throws Exception {
+ // Validate required parameters
+ if (file == null) {
+ throw new WorkflowException("GeoPackage file path cannot be null");
+ }
+ if (fileSrid == null) {
+ throw new WorkflowException("Source SRID cannot be null");
+ }
+ if (database == null) {
+ throw new WorkflowException("Database connection cannot be null");
+ }
+ if (databaseSrid == null) {
+ throw new WorkflowException("Target SRID cannot be null");
+ }
+
var path = file.toAbsolutePath();
- try (var geoPackageDataStore = new GeoPackageDataStore(path)) {
- var dataSource = context.getDataSource(database);
- var postgresDataStore = new PostgresDataStore(dataSource);
- for (var name : geoPackageDataStore.list()) {
- var geoPackageTable = geoPackageDataStore.get(name);
- var projectionTransformer = new ProjectionTransformer(fileSrid,
databaseSrid);
- var rowTransformer =
- new DataTableGeometryMapper(geoPackageTable,
projectionTransformer);
- var transformedDataTable =
- new DataTableMapper(geoPackageDataStore.get(name), rowTransformer);
- postgresDataStore.add(transformedDataTable);
+ logger.info("Importing GeoPackage from: {}", path);
+
+ var dataSource = context.getDataSource(database);
+
+ // Set ThreadLocal DataSource for PostgresDdlExecutor to use
+ PostgresDdlExecutor.setThreadLocalDataSource(dataSource);
+
+ try {
+ // Setup Calcite connection properties
+ Properties info = new Properties();
+ info.setProperty("lex", "MYSQL");
+ info.setProperty("caseSensitive", "false");
+ info.setProperty("unquotedCasing", "TO_LOWER");
+ info.setProperty("quotedCasing", "TO_LOWER");
+ info.setProperty("parserFactory", PostgresDdlExecutor.class.getName() +
"#PARSER_FACTORY");
+
+ // Create a GeoPackageSchema instance
+ GeoPackageSchema geoPackageSchema = new GeoPackageSchema(path.toFile());
+
+ // Create a temporary schema name for the GeoPackage data
+ String schemaName = "geopackage_schema_" + System.currentTimeMillis();
+
+ try (Connection connection =
DriverManager.getConnection("jdbc:calcite:", info)) {
+ CalciteConnection calciteConnection =
connection.unwrap(CalciteConnection.class);
+ SchemaPlus rootSchema = calciteConnection.getRootSchema();
+
+ // Register the GeoPackage schema in the Calcite schema
+ rootSchema.add(schemaName, geoPackageSchema);
+
+ // Debug logging to check schema registration
+ Schema registeredSchema = rootSchema.getSubSchema(schemaName);
+ logger.info("Registered schema class: {}",
+ registeredSchema != null ? registeredSchema.getClass().getName() :
"null");
+ logger.info("Is GeoPackageSchema: {}", registeredSchema instanceof
GeoPackageSchema);
+
+ // Get the list of tables in the GeoPackage
+ List<String> tables = new ArrayList<>();
+
+ // Get the tables directly from the GeoPackage file
+ GeoPackage geoPackage = GeoPackageManager.open(file.toFile());
+ tables.addAll(geoPackage.getFeatureTables());
+ geoPackage.close();
+
+ if (tables.isEmpty()) {
+ logger.warn("No tables found in GeoPackage: {}", path);
+ return;
+ }
+
+ // Import each table
+ for (String tableName : tables) {
+ // Sanitize table name to prevent SQL injection
+ String sanitizedTableName = sanitizeTableName(tableName);
+ logger.info("Importing table: {} to: {}", tableName,
sanitizedTableName);
+
+ // Create a table in PostgreSQL by selecting from the GeoPackage
table
+ String createTableSql = "CREATE TABLE " + sanitizedTableName + " AS
" +
+ "SELECT * FROM " + schemaName + "." + tableName;
+
+ logger.info("Executing SQL: {}", createTableSql);
+
+ // Execute the DDL statement to create the table
+ try (Statement statement = connection.createStatement()) {
+ statement.execute(createTableSql);
+ }
+
+ // Set SRID on geometry column if specified
+ try (Connection pgConnection = dataSource.getConnection();
+ Statement stmt = pgConnection.createStatement()) {
+ stmt.execute(String.format(
+ "SELECT UpdateGeometrySRID('%s', 'geom', %d)",
+ sanitizedTableName, databaseSrid));
+ }
+
+ // Verify that the table was created in PostgreSQL
+ try (Connection pgConnection = dataSource.getConnection();
+ Statement statement = pgConnection.createStatement();
+ ResultSet resultSet = statement.executeQuery(
+ "SELECT EXISTS (SELECT 1 FROM information_schema.tables
WHERE table_name = '" +
+ sanitizedTableName + "')")) {
Review Comment:
## Query built by concatenation with a possibly-untrusted string
Query built by concatenation with [this expression](1), which may be
untrusted.
[Show more
details](https://github.com/apache/incubator-baremaps/security/code-scanning/1731)
##########
baremaps-calcite/src/main/java/org/apache/baremaps/calcite/BaremapsDdlExecutor.java:
##########
@@ -0,0 +1,688 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.baremaps.calcite;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static java.util.Objects.requireNonNull;
+import static org.apache.calcite.util.Static.RESOURCE;
+
+import com.google.common.collect.ImmutableList;
+import java.io.Reader;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.*;
+import org.apache.baremaps.calcite.data.DataMaterializedView;
+import org.apache.baremaps.calcite.data.DataModifiableTable;
+import org.apache.calcite.adapter.java.JavaTypeFactory;
+import org.apache.calcite.adapter.jdbc.JdbcSchema;
+import org.apache.calcite.avatica.AvaticaUtils;
+import org.apache.calcite.jdbc.CalcitePrepare;
+import org.apache.calcite.jdbc.CalciteSchema;
+import org.apache.calcite.jdbc.ContextSqlValidator;
+import org.apache.calcite.linq4j.Ord;
+import org.apache.calcite.materialize.MaterializationKey;
+import org.apache.calcite.materialize.MaterializationService;
+import org.apache.calcite.model.JsonSchema;
+import org.apache.calcite.plan.RelOptTable;
+import org.apache.calcite.rel.RelRoot;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelDataTypeField;
+import org.apache.calcite.rel.type.RelDataTypeImpl;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.schema.*;
+import org.apache.calcite.schema.impl.AbstractSchema;
+import org.apache.calcite.schema.impl.ViewTable;
+import org.apache.calcite.schema.impl.ViewTableMacro;
+import org.apache.calcite.server.DdlExecutor;
+import org.apache.calcite.server.DdlExecutorImpl;
+import org.apache.calcite.sql.*;
+import org.apache.calcite.sql.ddl.*;
+import org.apache.calcite.sql.dialect.CalciteSqlDialect;
+import org.apache.calcite.sql.fun.SqlStdOperatorTable;
+import org.apache.calcite.sql.parser.SqlAbstractParserImpl;
+import org.apache.calcite.sql.parser.SqlParseException;
+import org.apache.calcite.sql.parser.SqlParserImplFactory;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.parser.ddl.SqlDdlParserImpl;
+import org.apache.calcite.sql.pretty.SqlPrettyWriter;
+import org.apache.calcite.sql.validate.SqlValidator;
+import org.apache.calcite.sql2rel.InitializerContext;
+import org.apache.calcite.sql2rel.InitializerExpressionFactory;
+import org.apache.calcite.sql2rel.NullInitializerExpressionFactory;
+import org.apache.calcite.tools.*;
+import org.apache.calcite.util.NlsString;
+import org.apache.calcite.util.Pair;
+import org.apache.calcite.util.Util;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+/**
+ * Executes DDL commands.
+ *
+ * <p>
+ * Given a DDL command that is a sub-class of {@link SqlNode}, dispatches the
command to an
+ * appropriate {@code execute} method. For example, "CREATE TABLE" ({@link
SqlCreateTable}) is
+ * dispatched to {@link #execute(SqlCreateTable, CalcitePrepare.Context)}.
+ */
+public class BaremapsDdlExecutor extends DdlExecutorImpl {
+ /** Singleton instance. */
+ public static final BaremapsDdlExecutor INSTANCE = new BaremapsDdlExecutor();
+
+ /** Parser factory. */
+ @SuppressWarnings("unused") // used via reflection
+ public static final SqlParserImplFactory PARSER_FACTORY =
+ new SqlParserImplFactory() {
+ @Override
+ public SqlAbstractParserImpl getParser(Reader stream) {
+ return SqlDdlParserImpl.FACTORY.getParser(stream);
+ }
+
+ @Override
+ public DdlExecutor getDdlExecutor() {
+ return BaremapsDdlExecutor.INSTANCE;
+ }
+ };
+
+ /**
+ * Creates a ServerDdlExecutor. Protected only to allow sub-classing; use
{@link #INSTANCE} where
+ * possible.
+ */
+ protected BaremapsDdlExecutor() {}
+
+ /**
+ * Returns the schema in which to create an object; the left part is null if
the schema does not
+ * exist.
+ */
+ static Pair<@Nullable CalciteSchema, String> schema(
+ CalcitePrepare.Context context, boolean mutable, SqlIdentifier id) {
+ final String name;
+ final List<String> path;
+ if (id.isSimple()) {
+ path = context.getDefaultSchemaPath();
+ name = id.getSimple();
+ } else {
+ path = Util.skipLast(id.names);
+ name = Util.last(id.names);
+ }
+ CalciteSchema schema =
+ mutable ? context.getMutableRootSchema()
+ : context.getRootSchema();
+ for (String p : path) {
+ @Nullable
+ CalciteSchema subSchema = schema.getSubSchema(p, true);
+ if (subSchema == null) {
+ return Pair.of(null, name);
+ }
+ schema = subSchema;
+ }
+ return Pair.of(schema, name);
+ }
+
+ /**
+ * Returns the SqlValidator with the given {@code context} schema and type
factory.
+ */
+ static SqlValidator validator(CalcitePrepare.Context context,
+ boolean mutable) {
+ return new ContextSqlValidator(context, mutable);
+ }
+
+ /**
+ * Wraps a query to rename its columns. Used by CREATE VIEW and CREATE
MATERIALIZED VIEW.
+ */
+ static SqlNode renameColumns(@Nullable SqlNodeList columnList,
+ SqlNode query) {
+ if (columnList == null) {
+ return query;
+ }
+ final SqlParserPos p = query.getParserPosition();
+ final SqlNodeList selectList = SqlNodeList.SINGLETON_STAR;
+ final SqlCall from =
+ SqlStdOperatorTable.AS.createCall(p,
+ ImmutableList.<SqlNode>builder()
+ .add(query)
+ .add(new SqlIdentifier("_", p))
+ .addAll(columnList)
+ .build());
+ return new SqlSelect(p, null, selectList, from, null, null, null, null,
+ null, null, null, null, null);
+ }
+
+ /** Erase the table date that calcite-sever created. */
+ static void erase(SqlIdentifier name, CalcitePrepare.Context context) {
+ // Directly clearing data is more efficient than executing SQL
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, true, name);
+ final CalciteSchema calciteSchema = requireNonNull(pair.left);
+ final String tblName = pair.right;
+ final CalciteSchema.TableEntry tableEntry =
+ calciteSchema.getTable(tblName, context.config().caseSensitive());
+ final Table table = requireNonNull(tableEntry, "tableEntry").getTable();
+ if (table instanceof DataModifiableTable) {
+ DataModifiableTable mutableArrayTable = (DataModifiableTable) table;
+ mutableArrayTable.rows.clear();
+ } else {
+ // Not calcite-server created, so not support truncate.
+ throw new UnsupportedOperationException("Only MutableArrayTable support
truncate");
+ }
+ }
+
+ /** Populates the table called {@code name} by executing {@code query}. */
+ static void populate(SqlIdentifier name, SqlNode query,
+ CalcitePrepare.Context context) {
+ // Generate, prepare and execute an "INSERT INTO table query" statement.
+ // (It's a bit inefficient that we convert from SqlNode to SQL and back
+ // again.)
+ final FrameworkConfig config = Frameworks.newConfigBuilder()
+ .defaultSchema(context.getRootSchema().plus())
+ .build();
+ final Planner planner = Frameworks.getPlanner(config);
+ try {
+ final StringBuilder buf = new StringBuilder();
+ final SqlWriterConfig writerConfig =
+ SqlPrettyWriter.config().withAlwaysUseParentheses(false);
+ final SqlPrettyWriter w = new SqlPrettyWriter(writerConfig, buf);
+ buf.append("INSERT INTO ");
+ name.unparse(w, 0, 0);
+ buf.append(' ');
+ query.unparse(w, 0, 0);
+ final String sql = buf.toString();
+ final SqlNode query1 = planner.parse(sql);
+ final SqlNode query2 = planner.validate(query1);
+ final RelRoot r = planner.rel(query2);
+ final PreparedStatement prepare =
+ context.getRelRunner().prepareStatement(r.rel);
+ int rowCount = prepare.executeUpdate();
+ Util.discard(rowCount);
+ prepare.close();
+ } catch (SqlParseException | ValidationException
+ | RelConversionException | SQLException e) {
+ throw Util.throwAsRuntime(e);
+ }
+ }
+
+ /**
+ * Returns the value of a literal, converting {@link NlsString} into String.
+ */
+ @SuppressWarnings("rawtypes")
+ static @Nullable Comparable value(SqlNode node) {
+ final Comparable v = SqlLiteral.value(node);
+ return v instanceof NlsString ? ((NlsString) v).getValue() : v;
+ }
+
+ /** Executes a {@code CREATE FOREIGN SCHEMA} command. */
+ public void execute(SqlCreateForeignSchema create,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, true, create.name);
+ requireNonNull(pair.left); // TODO: should not assume parent schema exists
+ if (pair.left.plus().getSubSchema(pair.right) != null) {
+ if (!create.getReplace() && !create.ifNotExists) {
+ throw SqlUtil.newContextException(create.name.getParserPosition(),
+ RESOURCE.schemaExists(pair.right));
+ }
+ }
+ final Schema subSchema;
+ final String libraryName;
+ if (create.type != null) {
+ checkArgument(create.library == null);
+ final String typeName = (String) requireNonNull(value(create.type));
+ final JsonSchema.Type type =
+ Util.enumVal(JsonSchema.Type.class,
+ typeName.toUpperCase(Locale.ROOT));
+ if (type != null) {
+ switch (type) {
+ case JDBC:
+ libraryName = JdbcSchema.Factory.class.getName();
+ break;
+ default:
+ libraryName = null;
+ }
+ } else {
+ libraryName = null;
+ }
+ if (libraryName == null) {
+ throw SqlUtil.newContextException(create.type.getParserPosition(),
+ RESOURCE.schemaInvalidType(typeName,
+ Arrays.toString(JsonSchema.Type.values())));
+ }
+ } else {
+ libraryName =
+ requireNonNull((String) value(requireNonNull(create.library)));
+ }
+ final SchemaFactory schemaFactory =
+ AvaticaUtils.instantiatePlugin(SchemaFactory.class, libraryName);
+ final Map<String, Object> operandMap = new LinkedHashMap<>();
+ for (Pair<SqlIdentifier, SqlNode> option : create.options()) {
+ operandMap.put(option.left.getSimple(),
+ requireNonNull(value(option.right)));
+ }
+ subSchema =
+ schemaFactory.create(pair.left.plus(), pair.right, operandMap);
+ pair.left.add(pair.right, subSchema);
+ }
+
+ /** Executes a {@code CREATE FUNCTION} command. */
+ public void execute(SqlCreateFunction create,
+ CalcitePrepare.Context context) {
+ throw new UnsupportedOperationException("CREATE FUNCTION is not
supported");
+ }
+
+ /**
+ * Executes {@code DROP FUNCTION}, {@code DROP TABLE}, {@code DROP
MATERIALIZED VIEW},
+ * {@code DROP TYPE}, {@code DROP VIEW} commands.
+ */
+ public void execute(SqlDropObject drop,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, false, drop.name);
+ final @Nullable CalciteSchema schema =
+ pair.left; // null if schema does not exist
+ final String objectName = pair.right;
+
+ boolean existed;
+ switch (drop.getKind()) {
+ case DROP_TABLE:
+ case DROP_MATERIALIZED_VIEW:
+ Table materializedView =
+ schema != null
+ && drop.getKind() == SqlKind.DROP_MATERIALIZED_VIEW
+ ? schema.plus().getTable(objectName)
+ : null;
+
+ existed = schema != null && schema.removeTable(objectName);
+ if (existed) {
+ if (materializedView instanceof Wrapper) {
+ ((Wrapper) materializedView).maybeUnwrap(MaterializationKey.class)
+ .ifPresent(materializationKey ->
MaterializationService.instance()
+ .removeMaterialization(materializationKey));
+ }
+ } else if (!drop.ifExists) {
+ throw SqlUtil.newContextException(drop.name.getParserPosition(),
+ RESOURCE.tableNotFound(objectName));
+ }
+ break;
+ case DROP_VIEW:
+ // Not quite right: removes any other functions with the same name
+ existed = schema != null && schema.removeFunction(objectName);
+ if (!existed && !drop.ifExists) {
+ throw SqlUtil.newContextException(drop.name.getParserPosition(),
+ RESOURCE.viewNotFound(objectName));
+ }
+ break;
+ case DROP_TYPE:
+ existed = schema != null && schema.removeType(objectName);
+ if (!existed && !drop.ifExists) {
+ throw SqlUtil.newContextException(drop.name.getParserPosition(),
+ RESOURCE.typeNotFound(objectName));
+ }
+ break;
+ case DROP_FUNCTION:
+ existed = schema != null && schema.removeFunction(objectName);
+ if (!existed && !drop.ifExists) {
+ throw SqlUtil.newContextException(drop.name.getParserPosition(),
+ RESOURCE.functionNotFound(objectName));
+ }
+ break;
+ case OTHER_DDL:
+ default:
+ throw new AssertionError(drop.getKind());
+ }
+ }
+
+ /**
+ * Executes a {@code TRUNCATE TABLE} command.
+ */
+ public void execute(SqlTruncateTable truncate,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, true, truncate.name);
+ if (pair.left == null
+ || pair.left.plus().getTable(pair.right) == null) {
+ throw SqlUtil.newContextException(truncate.name.getParserPosition(),
+ RESOURCE.tableNotFound(pair.right));
+ }
+
+ if (!truncate.continueIdentify) {
+ // Calcite not support RESTART IDENTIFY
+ throw new UnsupportedOperationException("RESTART IDENTIFY is not
supported");
+ }
+
+ erase(truncate.name, context);
+ }
+
+ /** Executes a {@code CREATE MATERIALIZED VIEW} command. */
+ public void execute(SqlCreateMaterializedView create,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, true, create.name);
+ if (pair.left != null
+ && pair.left.plus().getTable(pair.right) != null) {
+ // Materialized view exists.
+ if (!create.ifNotExists) {
+ // They did not specify IF NOT EXISTS, so give error.
+ throw SqlUtil.newContextException(create.name.getParserPosition(),
+ RESOURCE.tableExists(pair.right));
+ }
+ return;
+ }
+ final SqlNode q = renameColumns(create.columnList, create.query);
+ final String sql = q.toSqlString(CalciteSqlDialect.DEFAULT).getSql();
+ requireNonNull(pair.left); // TODO: should not assume parent schema exists
+ final List<String> schemaPath = pair.left.path(null);
+ final ViewTableMacro viewTableMacro =
+ ViewTable.viewMacro(pair.left.plus(), sql, schemaPath,
+ context.getObjectPath(), false);
+ final TranslatableTable x = viewTableMacro.apply(ImmutableList.of());
+ final RelDataType rowType = x.getRowType(context.getTypeFactory());
+
+ // Table does not exist. Create it.
+ final DataMaterializedView table =
+ new DataMaterializedView(pair.right, RelDataTypeImpl.proto(rowType),
+ context.getTypeFactory());
+ pair.left.add(pair.right, table);
+ populate(create.name, create.query, context);
+ table.key =
+ MaterializationService.instance().defineMaterialization(pair.left,
null,
+ sql, schemaPath, pair.right, true, true);
+ }
+
+ /** Executes a {@code CREATE SCHEMA} command. */
+ public void execute(SqlCreateSchema create,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, true, create.name);
+ requireNonNull(pair.left); // TODO: should not assume parent schema exists
+ if (pair.left.plus().getSubSchema(pair.right) != null) {
+ if (create.ifNotExists) {
+ return;
+ }
+ if (!create.getReplace()) {
+ throw SqlUtil.newContextException(create.name.getParserPosition(),
+ RESOURCE.schemaExists(pair.right));
+ }
+ }
+ final Schema subSchema = new AbstractSchema();
+ pair.left.add(pair.right, subSchema);
+ }
+
+ /** Executes a {@code DROP SCHEMA} command. */
+ public void execute(SqlDropSchema drop,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, false, drop.name);
+ final String name = pair.right;
+ final boolean existed = pair.left != null
+ && pair.left.removeSubSchema(name);
+ if (!existed && !drop.ifExists) {
+ throw SqlUtil.newContextException(drop.name.getParserPosition(),
+ RESOURCE.schemaNotFound(name));
+ }
+ }
+
+ /** Executes a {@code CREATE TABLE} command. */
+ public void execute(SqlCreateTable create,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, true, create.name);
+ requireNonNull(pair.left); // TODO: should not assume parent schema exists
+ final JavaTypeFactory typeFactory = context.getTypeFactory();
+ final RelDataType queryRowType;
+ if (create.query != null) {
+ // A bit of a hack: pretend it's a view, to get its row type
+ final String sql =
+ create.query.toSqlString(CalciteSqlDialect.DEFAULT).getSql();
+ final ViewTableMacro viewTableMacro =
+ ViewTable.viewMacro(pair.left.plus(), sql, pair.left.path(null),
+ context.getObjectPath(), false);
+ final TranslatableTable x = viewTableMacro.apply(ImmutableList.of());
+ queryRowType = x.getRowType(typeFactory);
+
+ if (create.columnList != null
+ && queryRowType.getFieldCount() != create.columnList.size()) {
+ throw SqlUtil.newContextException(
+ create.columnList.getParserPosition(),
+ RESOURCE.columnCountMismatch());
+ }
+ } else {
+ queryRowType = null;
+ }
+ final List<SqlNode> columnList;
+ if (create.columnList != null) {
+ columnList = create.columnList;
+ } else {
+ if (queryRowType == null) {
+ // "CREATE TABLE t" is invalid; because there is no "AS query" we need
+ // a list of column names and types, "CREATE TABLE t (INT c)".
+ throw SqlUtil.newContextException(create.name.getParserPosition(),
+ RESOURCE.createTableRequiresColumnList());
+ }
+ columnList = new ArrayList<>();
+ for (String name : queryRowType.getFieldNames()) {
+ columnList.add(new SqlIdentifier(name, SqlParserPos.ZERO));
+ }
+ }
+ final ImmutableList.Builder<ColumnDef> b = ImmutableList.builder();
+ final RelDataTypeFactory.Builder builder = typeFactory.builder();
+ final RelDataTypeFactory.Builder storedBuilder = typeFactory.builder();
+ // REVIEW 2019-08-19 Danny Chan: Should we implement the
+ // #validate(SqlValidator) to get the SqlValidator instance?
+ final SqlValidator validator = validator(context, true);
+ for (Ord<SqlNode> c : Ord.zip(columnList)) {
+ if (c.e instanceof SqlColumnDeclaration) {
+ final SqlColumnDeclaration d = (SqlColumnDeclaration) c.e;
+ final RelDataType type = d.dataType.deriveType(validator, true);
+ builder.add(d.name.getSimple(), type);
+ if (d.strategy != ColumnStrategy.VIRTUAL) {
+ storedBuilder.add(d.name.getSimple(), type);
+ }
+ b.add(ColumnDef.of(d.expression, type, d.strategy));
+ } else if (c.e instanceof SqlIdentifier) {
+ final SqlIdentifier id = (SqlIdentifier) c.e;
+ if (queryRowType == null) {
+ throw SqlUtil.newContextException(id.getParserPosition(),
+ RESOURCE.createTableRequiresColumnTypes(id.getSimple()));
+ }
+ final RelDataTypeField f = queryRowType.getFieldList().get(c.i);
+ final ColumnStrategy strategy = f.getType().isNullable()
+ ? ColumnStrategy.NULLABLE
+ : ColumnStrategy.NOT_NULLABLE;
+ b.add(ColumnDef.of(c.e, f.getType(), strategy));
+ builder.add(id.getSimple(), f.getType());
+ storedBuilder.add(id.getSimple(), f.getType());
+ } else {
+ throw new AssertionError(c.e.getClass());
+ }
+ }
+ final RelDataType rowType = builder.build();
+ if (pair.left.plus().getTable(pair.right) != null) {
+ // Table exists.
+ if (create.ifNotExists) {
+ return;
+ }
+ if (!create.getReplace()) {
+ // They did not specify IF NOT EXISTS, so give error.
+ throw SqlUtil.newContextException(create.name.getParserPosition(),
+ RESOURCE.tableExists(pair.right));
+ }
+ }
+ // Table does not exist. Create it.
+ pair.left.add(pair.right,
+ new DataModifiableTable(pair.right,
+ RelDataTypeImpl.proto(rowType), context.getTypeFactory()));
+ if (create.query != null) {
+ populate(create.name, create.query, context);
+ }
+ }
+
+ /** Executes a {@code CREATE TABLE LIKE} command. */
+ public void execute(SqlCreateTableLike create,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, true, create.name);
+ requireNonNull(pair.left); // TODO: should not assume parent schema exists
+ if (pair.left.plus().getTable(pair.right) != null) {
+ // Table exists.
+ if (create.ifNotExists) {
+ return;
+ }
+ if (!create.getReplace()) {
+ // They did not specify IF NOT EXISTS, so give error.
+ throw SqlUtil.newContextException(create.name.getParserPosition(),
+ RESOURCE.tableExists(pair.right));
+ }
+ }
+
+ final Pair<@Nullable CalciteSchema, String> sourceTablePair =
+ schema(context, true, create.sourceTable);
+ final CalciteSchema schema =
+ // TODO: should not assume parent schema exists
+ requireNonNull(sourceTablePair.left);
+ final String tableName = sourceTablePair.right;
+ final CalciteSchema.TableEntry tableEntry =
+ schema.getTable(tableName, context.config().caseSensitive());
+ final Table table = requireNonNull(tableEntry, "tableEntry").getTable();
+
+ final JavaTypeFactory typeFactory = context.getTypeFactory();
+ final RelDataType rowType = table.getRowType(typeFactory);
+ // Table does not exist. Create it.
+ pair.left.add(pair.right,
+ new DataModifiableTable(pair.right,
+ RelDataTypeImpl.proto(rowType), typeFactory));
+ }
+
+ /** Executes a {@code CREATE TYPE} command. */
+ public void execute(SqlCreateType create,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, true, create.name);
+ requireNonNull(pair.left); // TODO: should not assume parent schema exists
+ final SqlValidator validator = validator(context, false);
+ pair.left.add(pair.right, typeFactory -> {
+ if (create.dataType != null) {
+ return create.dataType.deriveType(validator);
+ } else {
+ final RelDataTypeFactory.Builder builder = typeFactory.builder();
+ if (create.attributeDefs != null) {
+ for (SqlNode def : create.attributeDefs) {
+ final SqlAttributeDefinition attributeDef =
+ (SqlAttributeDefinition) def;
+ final SqlDataTypeSpec typeSpec = attributeDef.dataType;
+ final RelDataType type = typeSpec.deriveType(validator);
+ builder.add(attributeDef.name.getSimple(), type);
+ }
+ }
+ return builder.build();
+ }
+ });
+ }
+
+ /** Executes a {@code CREATE VIEW} command. */
+ public void execute(SqlCreateView create,
+ CalcitePrepare.Context context) {
+ final Pair<@Nullable CalciteSchema, String> pair =
+ schema(context, true, create.name);
+ requireNonNull(pair.left); // TODO: should not assume parent schema exists
+ final SchemaPlus schemaPlus = pair.left.plus();
+ for (Function function : schemaPlus.getFunctions(pair.right)) {
+ if (function.getParameters().isEmpty()) {
+ if (!create.getReplace()) {
+ throw SqlUtil.newContextException(create.name.getParserPosition(),
+ RESOURCE.viewExists(pair.right));
+ }
+ pair.left.removeFunction(pair.right);
+ }
+ }
+ final SqlNode q = renameColumns(create.columnList, create.query);
+ final String sql = q.toSqlString(CalciteSqlDialect.DEFAULT).getSql();
+ final ViewTableMacro viewTableMacro =
+ ViewTable.viewMacro(schemaPlus, sql, pair.left.path(null),
+ context.getObjectPath(), false);
+ final TranslatableTable x = viewTableMacro.apply(ImmutableList.of());
+ Util.discard(x);
+ schemaPlus.add(pair.right, viewTableMacro);
+ }
+
+ /**
+ * Initializes columns based on the source {@link
InitializerExpressionFactory} and like options.
+ */
+ private static class CopiedTableInitializerExpressionFactory
Review Comment:
## Unused classes and interfaces
Unused class: CopiedTableInitializerExpressionFactory is not referenced
within this codebase. If not used as an external API it should be removed.
[Show more
details](https://github.com/apache/incubator-baremaps/security/code-scanning/1736)
##########
baremaps-core/src/main/java/org/apache/baremaps/tasks/ImportShapefile.java:
##########
@@ -69,18 +73,117 @@
*/
@Override
public void execute(WorkflowContext context) throws Exception {
+ // Validate required parameters
+ if (file == null) {
+ throw new WorkflowException("Shapefile path cannot be null");
+ }
+ if (fileSrid == null) {
+ throw new WorkflowException("Source SRID cannot be null");
+ }
+ if (database == null) {
+ throw new WorkflowException("Database connection cannot be null");
+ }
+ if (databaseSrid == null) {
+ throw new WorkflowException("Target SRID cannot be null");
+ }
+
var path = file.toAbsolutePath();
+ logger.info("Importing shapefile from: {}", path);
+
+ var dataSource = context.getDataSource(database);
+ // Sanitize table name to prevent SQL injection
+ var tableName = sanitizeTableName(
+ file.getFileName().toString().replaceFirst("[.][^.]+$",
"").toLowerCase());
+ logger.info("Creating table: {}", tableName);
+
+ // Set ThreadLocal DataSource for PostgresDdlExecutor to use
+ PostgresDdlExecutor.setThreadLocalDataSource(dataSource);
+
try {
- var shapefileDataTable = new ShapefileDataTable(path);
- var dataSource = context.getDataSource(database);
- var postgresDataStore = new PostgresDataStore(dataSource);
- var rowTransformer = new DataTableGeometryMapper(shapefileDataTable,
- new ProjectionTransformer(fileSrid, databaseSrid));
- var transformedDataTable = new DataTableMapper(shapefileDataTable,
rowTransformer);
- postgresDataStore.add(transformedDataTable);
- } catch (Exception e) {
- throw new WorkflowException(e);
+ // Setup Calcite connection properties
+ Properties info = new Properties();
+ info.setProperty("lex", "MYSQL");
+ info.setProperty("caseSensitive", "false");
+ info.setProperty("unquotedCasing", "TO_LOWER");
+ info.setProperty("quotedCasing", "TO_LOWER");
+ info.setProperty("parserFactory", PostgresDdlExecutor.class.getName() +
"#PARSER_FACTORY");
+
+ // Create a ShapefileTable instance
+ ShapefileTable shapefileTable = new ShapefileTable(path.toFile());
+
+ // Create a temporary table name for the shapefile data
+ String shapefileTableName = "shapefile_data_" +
System.currentTimeMillis();
+
+ try (Connection connection =
DriverManager.getConnection("jdbc:calcite:", info)) {
+ CalciteConnection calciteConnection =
connection.unwrap(CalciteConnection.class);
+ SchemaPlus rootSchema = calciteConnection.getRootSchema();
+
+ // Register the shapefile table in the Calcite schema
+ rootSchema.add(shapefileTableName, shapefileTable);
+
+ // Create a table in PostgreSQL by selecting from the shapefile table
+ String createTableSql = "CREATE TABLE " + tableName + " AS " +
+ "SELECT * FROM " + shapefileTableName;
+
+ logger.info("Executing SQL: {}", createTableSql);
+
+ // Execute the DDL statement to create the table
+ try (Statement statement = connection.createStatement()) {
+ statement.execute(createTableSql);
+ }
+
+ // Set SRID on geometry column if specified
+ if (databaseSrid != null) {
+ try (Connection pgConnection = dataSource.getConnection();
+ Statement stmt = pgConnection.createStatement()) {
+ stmt.execute(String.format(
+ "SELECT UpdateGeometrySRID('%s', 'geometry', %d)",
+ tableName, databaseSrid));
+ }
+ }
+
+ // Verify that the table was created in PostgreSQL
+ try (Connection pgConnection = dataSource.getConnection();
+ Statement statement = pgConnection.createStatement();
+ ResultSet resultSet = statement.executeQuery(
+ "SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE
table_name = '" +
+ tableName + "')")) {
Review Comment:
## Query built by concatenation with a possibly-untrusted string
Query built by concatenation with [this expression](1), which may be
untrusted.
[Show more
details](https://github.com/apache/incubator-baremaps/security/code-scanning/1733)
##########
baremaps-calcite/src/main/java/org/apache/baremaps/calcite/BaremapsTableFactory.java:
##########
@@ -0,0 +1,340 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.baremaps.calcite;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.MappedByteBuffer;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Map;
+import org.apache.baremaps.calcite.csv.CsvTable;
+import org.apache.baremaps.calcite.data.DataModifiableTable;
+import org.apache.baremaps.calcite.data.DataRow;
+import org.apache.baremaps.calcite.data.DataRowType;
+import org.apache.baremaps.calcite.data.DataSchema;
+import org.apache.baremaps.calcite.flatgeobuf.FlatGeoBufTable;
+import org.apache.baremaps.calcite.geopackage.GeoPackageTable;
+import org.apache.baremaps.calcite.geoparquet.GeoParquetTable;
+import org.apache.baremaps.calcite.openstreetmap.OpenStreetMapTable;
+import org.apache.baremaps.calcite.rpsl.RpslTable;
+import org.apache.baremaps.calcite.shapefile.ShapefileTable;
+import org.apache.baremaps.data.collection.AppendOnlyLog;
+import org.apache.baremaps.data.collection.DataCollection;
+import org.apache.baremaps.data.memory.Memory;
+import org.apache.baremaps.data.memory.MemoryMappedDirectory;
+import org.apache.baremaps.openstreetmap.pbf.PbfEntityReader;
+import org.apache.baremaps.openstreetmap.xml.XmlEntityReader;
+import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelDataTypeImpl;
+import org.apache.calcite.rel.type.RelProtoDataType;
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.calcite.schema.Table;
+import org.apache.calcite.schema.TableFactory;
+
+/**
+ * A table factory for creating tables in the calcite2 package.
+ */
+public class BaremapsTableFactory implements TableFactory<Table> {
+
+ /**
+ * Constructor.
+ */
+ public BaremapsTableFactory() {}
+
+ @Override
+ public Table create(
+ SchemaPlus schema,
+ String name,
+ Map<String, Object> operand,
+ RelDataType rowType) {
+ final RelProtoDataType protoRowType =
+ rowType != null ? RelDataTypeImpl.proto(rowType) : null;
Review Comment:
## Unread local variable
Variable 'RelProtoDataType protoRowType' is never read.
[Show more
details](https://github.com/apache/incubator-baremaps/security/code-scanning/1737)
##########
baremaps-core/src/test/java/org/apache/baremaps/integration/GeoParquetToPostgresTest.java:
##########
@@ -17,36 +17,112 @@
package org.apache.baremaps.integration;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
-import org.apache.baremaps.geoparquet.GeoParquetDataStore;
-import org.apache.baremaps.postgres.store.PostgresDataStore;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.Properties;
+import org.apache.baremaps.calcite.geoparquet.GeoParquetSchema;
+import org.apache.baremaps.calcite.postgres.PostgresDdlExecutor;
import org.apache.baremaps.testing.PostgresContainerTest;
import org.apache.baremaps.testing.TestFiles;
+import org.apache.calcite.jdbc.CalciteConnection;
+import org.apache.calcite.schema.Schema;
+import org.apache.calcite.schema.SchemaPlus;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
class GeoParquetToPostgresTest extends PostgresContainerTest {
@Test
@Tag("integration")
- void copyGeoParquetToPostgres() {
+ void copyGeoParquetToPostgres() throws Exception {
// Open the GeoParquet
var uri =
TestFiles.resolve("baremaps-testing/data/samples/example.parquet").toUri();
- var geoParquetSchema = new GeoParquetDataStore(uri);
- var tables = geoParquetSchema.list();
- var geoParquetTable = geoParquetSchema.get(tables.get(0));
- // Copy the table to Postgres
- var postgresStore = new PostgresDataStore(dataSource());
- postgresStore.add("geoparquet", geoParquetTable);
+ // Set ThreadLocal DataSource for PostgresDdlExecutor to use
+ PostgresDdlExecutor.setThreadLocalDataSource(dataSource());
- // Check the table in Postgres
- var postgresTable = postgresStore.get("geoparquet");
+ try {
+ // Setup Calcite connection properties
+ Properties info = new Properties();
+ info.setProperty("lex", "MYSQL");
+ info.setProperty("caseSensitive", "false");
+ info.setProperty("unquotedCasing", "TO_LOWER");
+ info.setProperty("quotedCasing", "TO_LOWER");
+ info.setProperty("parserFactory", PostgresDdlExecutor.class.getName() +
"#PARSER_FACTORY");
- assertEquals("geoparquet", postgresTable.schema().name());
- assertEquals(7, postgresTable.schema().columns().size());
- assertEquals(5L, postgresTable.size());
- assertEquals(5L, postgresTable.stream().count());
+ // Create a connection to Calcite
+ try (Connection connection =
DriverManager.getConnection("jdbc:calcite:", info)) {
+ CalciteConnection calciteConnection =
connection.unwrap(CalciteConnection.class);
+ SchemaPlus rootSchema = calciteConnection.getRootSchema();
+
+ // Register the GeoParquet schema
+ Schema geoParquetSchema = new GeoParquetSchema(uri);
+ rootSchema.add("geoparquet", geoParquetSchema);
+
+ // Get the list of tables in the GeoParquet
+ String[] tables = getGeoParquetTables(connection);
+
+ assertTrue(tables.length > 0, "No tables found in GeoParquet");
+
+ // Import each table
+ for (String tableName : tables) {
+ // Register the GeoParquet table in the Calcite schema
+ String registerSql = "CREATE TABLE " + tableName + " AS " +
+ "SELECT * FROM geoparquet." + tableName;
+
+ // Execute the DDL statement to create the table
+ try (Statement statement = connection.createStatement()) {
+ statement.execute(registerSql);
+ }
+
+ // Verify that the table was created in PostgreSQL
+ try (Connection pgConnection = dataSource().getConnection();
+ Statement statement = pgConnection.createStatement();
+ ResultSet resultSet = statement.executeQuery(
+ "SELECT EXISTS (SELECT 1 FROM information_schema.tables
WHERE table_name = '" +
+ tableName + "')")) {
Review Comment:
## Query built by concatenation with a possibly-untrusted string
Query built by concatenation with [this expression](1), which may be
untrusted.
[Show more
details](https://github.com/apache/incubator-baremaps/security/code-scanning/1735)
##########
baremaps-calcite/src/main/java/org/apache/baremaps/calcite/geopackage/GeoPackageTable.java:
##########
@@ -0,0 +1,374 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.baremaps.calcite.geopackage;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import mil.nga.geopackage.GeoPackage;
+import mil.nga.geopackage.GeoPackageManager;
+import mil.nga.geopackage.features.user.FeatureColumn;
+import mil.nga.geopackage.features.user.FeatureDao;
+import mil.nga.geopackage.features.user.FeatureResultSet;
+import mil.nga.geopackage.geom.GeoPackageGeometryData;
+import mil.nga.sf.Geometry;
+import mil.nga.sf.GeometryCollection;
+import mil.nga.sf.LineString;
+import mil.nga.sf.MultiLineString;
+import mil.nga.sf.MultiPoint;
+import mil.nga.sf.MultiPolygon;
+import mil.nga.sf.Point;
+import mil.nga.sf.Polygon;
+import org.apache.calcite.DataContext;
+import org.apache.calcite.linq4j.AbstractEnumerable;
+import org.apache.calcite.linq4j.Enumerable;
+import org.apache.calcite.linq4j.Enumerator;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.schema.ScannableTable;
+import org.apache.calcite.schema.impl.AbstractTable;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.PrecisionModel;
+
+/**
+ * A Calcite table implementation for GeoPackage data. This table reads data
from a GeoPackage file
+ * and makes it available through the Apache Calcite framework for SQL
querying.
+ */
+public class GeoPackageTable extends AbstractTable implements ScannableTable {
+
+ private final File file;
+ private final String tableName;
+ private final RelDataType rowType;
+ private final FeatureDao featureDao;
+ private final GeometryFactory geometryFactory;
+
+ /**
+ * Constructs a GeoPackageTable with the specified file and table name.
+ *
+ * @param file the GeoPackage file to read data from
+ * @param tableName the name of the table in the GeoPackage
+ * @throws IOException if an I/O error occurs
+ */
+ public GeoPackageTable(File file, String tableName) throws IOException {
+ this(file, tableName, new org.apache.calcite.jdbc.JavaTypeFactoryImpl());
+ }
+
+ /**
+ * Constructs a GeoPackageTable with the specified file, table name, and
type factory.
+ *
+ * @param file the GeoPackage file to read data from
+ * @param tableName the name of the table in the GeoPackage
+ * @param typeFactory the type factory
+ * @throws IOException if an I/O error occurs
+ */
+ public GeoPackageTable(File file, String tableName, RelDataTypeFactory
typeFactory)
+ throws IOException {
+ this.file = file;
+ this.tableName = tableName;
+
+ // Open the GeoPackage file
+ GeoPackage geoPackage = GeoPackageManager.open(file);
+ this.featureDao = geoPackage.getFeatureDao(tableName);
+
+ // Create a geometry factory with the SRS from the feature DAO
+ this.geometryFactory = new GeometryFactory(
+ new PrecisionModel(),
+ (int) featureDao.getSrs().getId());
+
+ // Create the row type based on the feature columns
+ this.rowType = createRowType(typeFactory);
+ }
+
+ /**
+ * Creates a row type based on the feature columns.
+ *
+ * @param typeFactory the type factory
+ * @return the row type
+ */
+ private RelDataType createRowType(RelDataTypeFactory typeFactory) {
+ List<RelDataType> types = new ArrayList<>();
+ List<String> names = new ArrayList<>();
+
+ for (FeatureColumn column : featureDao.getColumns()) {
+ String columnName = column.getName();
+ RelDataType sqlType;
+
+ if (column.isGeometry()) {
+ // For geometry columns, use a proper geometry type
+ sqlType =
typeFactory.createJavaType(org.locationtech.jts.geom.Geometry.class);
+ } else {
+ // Map Java types to SQL types
+ Class<?> javaType = column.getDataType().getClassType();
+ if (javaType == String.class) {
+ sqlType = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+ } else if (javaType == Integer.class || javaType == int.class) {
+ sqlType = typeFactory.createSqlType(SqlTypeName.INTEGER);
+ } else if (javaType == Long.class || javaType == long.class) {
+ sqlType = typeFactory.createSqlType(SqlTypeName.BIGINT);
+ } else if (javaType == Double.class || javaType == double.class) {
+ sqlType = typeFactory.createSqlType(SqlTypeName.DOUBLE);
+ } else if (javaType == Float.class || javaType == float.class) {
+ sqlType = typeFactory.createSqlType(SqlTypeName.FLOAT);
+ } else if (javaType == Boolean.class || javaType == boolean.class) {
+ sqlType = typeFactory.createSqlType(SqlTypeName.BOOLEAN);
+ } else if (javaType == Date.class) {
+ sqlType = typeFactory.createSqlType(SqlTypeName.TIMESTAMP);
+ } else {
+ // Default to VARCHAR for unknown types
+ sqlType = typeFactory.createSqlType(SqlTypeName.VARCHAR);
+ }
+ }
+
+ // Handle nullability
+ if (!column.isNotNull()) {
+ sqlType = typeFactory.createTypeWithNullability(sqlType, true);
+ }
+
+ types.add(sqlType);
+ names.add(columnName);
+ }
+
+ return typeFactory.createStructType(types, names);
+ }
+
+ @Override
+ public RelDataType getRowType(RelDataTypeFactory typeFactory) {
+ return rowType;
+ }
+
+ @Override
+ public Enumerable<Object[]> scan(DataContext root) {
+ return new AbstractEnumerable<>() {
+ @Override
+ public Enumerator<Object[]> enumerator() {
+ try {
+ return new GeoPackageEnumerator(featureDao.queryForAll());
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to create GeoPackage enumerator",
e);
+ }
+ }
+ };
+ }
+
+ /**
+ * Enumerator for GeoPackage data.
+ */
+ private class GeoPackageEnumerator implements Enumerator<Object[]> {
+
+ private final FeatureResultSet featureResultSet;
+ private boolean hasNext;
+
+ public GeoPackageEnumerator(FeatureResultSet featureResultSet) {
+ this.featureResultSet = featureResultSet;
+ this.hasNext = featureResultSet.moveToFirst();
+ }
+
+ @Override
+ public Object[] current() {
+ if (!hasNext) {
+ return new Object[0];
+ }
+
+ Object[] values = new
Object[featureResultSet.getColumns().getColumns().size()];
+ int i = 0;
+
+ for (FeatureColumn column : featureResultSet.getColumns().getColumns()) {
+ Object value = featureResultSet.getValue(column);
+ values[i++] = convertValue(value);
+ }
+
+ return values;
+ }
+
+ @Override
+ public boolean moveNext() {
+ if (!hasNext) {
+ return false;
+ }
+
+ hasNext = featureResultSet.moveToNext();
+ return hasNext;
+ }
+
+ @Override
+ public void reset() {
+ featureResultSet.moveToFirst();
+ hasNext = true;
+ }
+
+ @Override
+ public void close() {
+ featureResultSet.close();
+ }
+ }
+
+ /**
+ * Converts a GeoPackage value to a Java value.
+ *
+ * @param value the GeoPackage value
+ * @return the Java value
+ */
+ private Object convertValue(Object value) {
+ if (value == null) {
+ return null;
+ }
+
+ if (value instanceof GeoPackageGeometryData) {
+ GeoPackageGeometryData geometryData = (GeoPackageGeometryData) value;
+ return convertGeometry(geometryData.getGeometry());
+ }
+
+ return value;
+ }
+
+ /**
+ * Converts a GeoPackage geometry to a JTS geometry.
+ *
+ * @param geometry the GeoPackage geometry
+ * @return the JTS geometry
+ */
+ private org.locationtech.jts.geom.Geometry convertGeometry(Geometry
geometry) {
+ if (geometry == null) {
+ return null;
+ }
+
+ if (geometry instanceof Point point) {
Review Comment:
## Chain of 'instanceof' tests
This if block performs a chain of 7 type tests - consider alternatives, e.g.
polymorphism or the visitor pattern.
[Show more
details](https://github.com/apache/incubator-baremaps/security/code-scanning/1707)
##########
baremaps-core/src/test/java/org/apache/baremaps/integration/GeoPackageToPostgresTest.java:
##########
@@ -17,34 +17,109 @@
package org.apache.baremaps.integration;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
-import org.apache.baremaps.geopackage.GeoPackageDataStore;
-import org.apache.baremaps.postgres.store.PostgresDataStore;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.Properties;
+import org.apache.baremaps.calcite.geopackage.GeoPackageSchema;
+import org.apache.baremaps.calcite.postgres.PostgresDdlExecutor;
import org.apache.baremaps.testing.PostgresContainerTest;
import org.apache.baremaps.testing.TestFiles;
+import org.apache.calcite.jdbc.CalciteConnection;
+import org.apache.calcite.schema.Schema;
+import org.apache.calcite.schema.SchemaPlus;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
class GeoPackageToPostgresTest extends PostgresContainerTest {
@Test
@Tag("integration")
- void copyGeoPackageToPostgres() {
- // Open the GeoPackage
- var file =
TestFiles.resolve("baremaps-testing/data/samples/countries.gpkg");
- var geoPackageSchema = new GeoPackageDataStore(file);
- var geoPackageTable = geoPackageSchema.get("countries");
-
- // Copy the table to Postgres
- var postgresStore = new PostgresDataStore(dataSource());
- postgresStore.add(geoPackageTable);
-
- // Check the table in Postgres
- var postgresTable = postgresStore.get("countries");
- assertEquals("countries", postgresTable.schema().name());
- assertEquals(4, postgresTable.schema().columns().size());
- assertEquals(179l, postgresTable.size());
- assertEquals(179l, postgresTable.stream().count());
+ void copyGeoPackageToPostgres() throws Exception {
+ // Set ThreadLocal DataSource for PostgresDdlExecutor to use
+ PostgresDdlExecutor.setThreadLocalDataSource(dataSource());
+
+ try {
+ // Setup Calcite connection properties
+ Properties info = new Properties();
+ info.setProperty("lex", "MYSQL");
+ info.setProperty("caseSensitive", "false");
+ info.setProperty("unquotedCasing", "TO_LOWER");
+ info.setProperty("quotedCasing", "TO_LOWER");
+ info.setProperty("parserFactory", PostgresDdlExecutor.class.getName() +
"#PARSER_FACTORY");
+
+ // Create a connection to Calcite
+ try (Connection connection =
DriverManager.getConnection("jdbc:calcite:", info)) {
+ CalciteConnection calciteConnection =
connection.unwrap(CalciteConnection.class);
+ SchemaPlus rootSchema = calciteConnection.getRootSchema();
+
+ // Register the GeoPackage schema
+ Schema geoPackageSchema = new
GeoPackageSchema(TestFiles.GEOPACKAGE.toFile());
+ rootSchema.add("geopackage", geoPackageSchema);
+
+ // Get the list of tables in the GeoPackage
+ String[] tables = getGeoPackageTables(connection);
+
+ assertTrue(tables.length > 0, "No tables found in GeoPackage");
+
+ // Import each table
+ for (String tableName : tables) {
+ // Register the GeoPackage table in the Calcite schema
+ String registerSql = "CREATE TABLE " + tableName + " AS " +
+ "SELECT * FROM geopackage." + tableName;
+
+ // Execute the DDL statement to create the table
+ try (Statement statement = connection.createStatement()) {
+ statement.execute(registerSql);
+ }
+
+ // Verify that the table was created in PostgreSQL
+ try (Connection pgConnection = dataSource().getConnection();
+ Statement statement = pgConnection.createStatement();
+ ResultSet resultSet = statement.executeQuery(
+ "SELECT EXISTS (SELECT 1 FROM information_schema.tables
WHERE table_name = '" +
+ tableName + "')")) {
Review Comment:
## Query built by concatenation with a possibly-untrusted string
Query built by concatenation with [this expression](1), which may be
untrusted.
[Show more
details](https://github.com/apache/incubator-baremaps/security/code-scanning/1734)
##########
baremaps-core/src/main/java/org/apache/baremaps/tasks/ImportGeoParquet.java:
##########
@@ -68,28 +70,148 @@
*/
@Override
public void execute(WorkflowContext context) throws Exception {
- var geoParquetDataStore = new GeoParquetDataStore(uri);
+ // Validate required parameters
+ if (uri == null) {
+ throw new WorkflowException("GeoParquet URI cannot be null");
+ }
+ if (tableName == null || tableName.isEmpty()) {
+ throw new WorkflowException("Table name cannot be null or empty");
+ }
+ if (database == null) {
+ throw new WorkflowException("Database connection cannot be null");
+ }
+ if (databaseSrid == null) {
+ throw new WorkflowException("Target SRID cannot be null");
+ }
+
+ logger.info("Importing GeoParquet from: {}", uri);
+
var dataSource = context.getDataSource(database);
- var postgresDataStore = new PostgresDataStore(dataSource);
- for (var name : geoParquetDataStore.list()) {
- var geoParquetTable = (GeoParquetDataTable)
geoParquetDataStore.get(name);
- var projectionTransformer =
- new ProjectionTransformer(geoParquetTable.srid("geometry"),
databaseSrid);
- var rowTransformer =
- new DataTableGeometryMapper(geoParquetTable, projectionTransformer);
- var transformedDataTable =
- new DataTableMapper(geoParquetDataStore.get(name), rowTransformer);
- postgresDataStore.add(tableName, transformedDataTable);
+
+ // Sanitize table name to prevent SQL injection
+ String sanitizedTableName = sanitizeTableName(tableName);
+ logger.info("Creating table: {}", sanitizedTableName);
+
+ // Set ThreadLocal DataSource for PostgresDdlExecutor to use
+ PostgresDdlExecutor.setThreadLocalDataSource(dataSource);
+
+ try {
+ // Setup Calcite connection properties
+ Properties info = new Properties();
+ info.setProperty("lex", "MYSQL");
+ info.setProperty("caseSensitive", "false");
+ info.setProperty("unquotedCasing", "TO_LOWER");
+ info.setProperty("quotedCasing", "TO_LOWER");
+ info.setProperty("parserFactory", PostgresDdlExecutor.class.getName() +
"#PARSER_FACTORY");
+
+ // Create a connection to Calcite
+ try (Connection connection =
DriverManager.getConnection("jdbc:calcite:", info)) {
+
+ // Get the list of tables in the GeoParquet
+ String[] tables = getGeoParquetTables(connection);
+
+ if (tables.length == 0) {
+ logger.warn("No tables found in GeoParquet: {}", uri);
+ return;
+ }
+
+ // Import each table
+ for (String sourceTableName : tables) {
+ // Create a temporary table name for the GeoParquet data
+ String tempTableName =
+ "geoparquet_data_" + System.currentTimeMillis() + "_" +
sourceTableName;
+
+ // Register the GeoParquet table in the Calcite schema
+ String registerSql = "CREATE TABLE " + tempTableName + " AS " +
+ "SELECT * FROM " + sourceTableName;
+
+ logger.info("Executing SQL: {}", registerSql);
+
+ // Execute the DDL statement to create the table
+ try (Statement statement = connection.createStatement()) {
+ statement.execute(registerSql);
+ }
+
+ // Set SRID on geometry column if specified
+ try (Connection pgConnection = dataSource.getConnection();
+ Statement stmt = pgConnection.createStatement()) {
+ stmt.execute(String.format(
+ "SELECT UpdateGeometrySRID('%s', 'geometry', %d)",
+ sanitizedTableName, databaseSrid));
+ }
+
+ // Verify that the table was created in PostgreSQL
+ try (Connection pgConnection = dataSource.getConnection();
+ Statement statement = pgConnection.createStatement();
+ ResultSet resultSet = statement.executeQuery(
+ "SELECT EXISTS (SELECT 1 FROM information_schema.tables
WHERE table_name = '" +
+ sanitizedTableName + "')")) {
Review Comment:
## Query built by concatenation with a possibly-untrusted string
Query built by concatenation with [this expression](1), which may be
untrusted.
[Show more
details](https://github.com/apache/incubator-baremaps/security/code-scanning/1732)
##########
baremaps-core/src/main/java/org/apache/baremaps/tasks/ImportShapefile.java:
##########
@@ -69,18 +73,117 @@
*/
@Override
public void execute(WorkflowContext context) throws Exception {
+ // Validate required parameters
+ if (file == null) {
+ throw new WorkflowException("Shapefile path cannot be null");
+ }
+ if (fileSrid == null) {
+ throw new WorkflowException("Source SRID cannot be null");
+ }
+ if (database == null) {
+ throw new WorkflowException("Database connection cannot be null");
+ }
+ if (databaseSrid == null) {
+ throw new WorkflowException("Target SRID cannot be null");
+ }
+
var path = file.toAbsolutePath();
+ logger.info("Importing shapefile from: {}", path);
+
+ var dataSource = context.getDataSource(database);
+ // Sanitize table name to prevent SQL injection
+ var tableName = sanitizeTableName(
+ file.getFileName().toString().replaceFirst("[.][^.]+$",
"").toLowerCase());
+ logger.info("Creating table: {}", tableName);
+
+ // Set ThreadLocal DataSource for PostgresDdlExecutor to use
+ PostgresDdlExecutor.setThreadLocalDataSource(dataSource);
+
try {
- var shapefileDataTable = new ShapefileDataTable(path);
- var dataSource = context.getDataSource(database);
- var postgresDataStore = new PostgresDataStore(dataSource);
- var rowTransformer = new DataTableGeometryMapper(shapefileDataTable,
- new ProjectionTransformer(fileSrid, databaseSrid));
- var transformedDataTable = new DataTableMapper(shapefileDataTable,
rowTransformer);
- postgresDataStore.add(transformedDataTable);
- } catch (Exception e) {
- throw new WorkflowException(e);
+ // Setup Calcite connection properties
+ Properties info = new Properties();
+ info.setProperty("lex", "MYSQL");
+ info.setProperty("caseSensitive", "false");
+ info.setProperty("unquotedCasing", "TO_LOWER");
+ info.setProperty("quotedCasing", "TO_LOWER");
+ info.setProperty("parserFactory", PostgresDdlExecutor.class.getName() +
"#PARSER_FACTORY");
+
+ // Create a ShapefileTable instance
+ ShapefileTable shapefileTable = new ShapefileTable(path.toFile());
+
+ // Create a temporary table name for the shapefile data
+ String shapefileTableName = "shapefile_data_" +
System.currentTimeMillis();
+
+ try (Connection connection =
DriverManager.getConnection("jdbc:calcite:", info)) {
+ CalciteConnection calciteConnection =
connection.unwrap(CalciteConnection.class);
+ SchemaPlus rootSchema = calciteConnection.getRootSchema();
+
+ // Register the shapefile table in the Calcite schema
+ rootSchema.add(shapefileTableName, shapefileTable);
+
+ // Create a table in PostgreSQL by selecting from the shapefile table
+ String createTableSql = "CREATE TABLE " + tableName + " AS " +
+ "SELECT * FROM " + shapefileTableName;
+
+ logger.info("Executing SQL: {}", createTableSql);
+
+ // Execute the DDL statement to create the table
+ try (Statement statement = connection.createStatement()) {
+ statement.execute(createTableSql);
+ }
+
+ // Set SRID on geometry column if specified
+ if (databaseSrid != null) {
Review Comment:
## Useless null check
This check is useless. [databaseSrid](1) cannot be null at this check, since
it is guarded by [... == ...](2).
[Show more
details](https://github.com/apache/incubator-baremaps/security/code-scanning/1710)
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]