This is an automated email from the ASF dual-hosted git repository.
desruisseaux pushed a commit to branch geoapi-4.0
in repository https://gitbox.apache.org/repos/asf/sis.git
The following commit(s) were added to refs/heads/geoapi-4.0 by this push:
new 42de0f8 Remove CachedResultSet. Examination of the code show that
either an iteration on cached values happened only once, or when many
iterations happenned they were filtered by table name and schema names. Those
filterings can be done by DatabaseMetaData directly.
42de0f8 is described below
commit 42de0f8eb038f1bb0cfa150e019afc1cebbf214c
Author: Martin Desruisseaux <[email protected]>
AuthorDate: Thu Jul 5 11:56:23 2018 +0200
Remove CachedResultSet. Examination of the code show that either an
iteration on cached values happened only once, or when many iterations
happenned they were filtered by table name and schema names. Those filterings
can be done by DatabaseMetaData directly.
---
.../sis/internal/sql/feature/CachedResultSet.java | 73 ---
.../sis/internal/sql/feature/ColumnMetaModel.java | 7 -
.../sis/internal/sql/feature/DataBaseModel.java | 570 ++++++++-------------
.../apache/sis/internal/sql/feature/Dialect.java | 26 +-
.../sis/internal/sql/feature/InsertRelation.java | 35 --
.../internal/sql/feature/RelationMetaModel.java | 2 +-
.../sis/internal/sql/postgres/PostgresDialect.java | 8 +-
7 files changed, 214 insertions(+), 507 deletions(-)
diff --git
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/CachedResultSet.java
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/CachedResultSet.java
deleted file mode 100644
index 4aba4fc..0000000
---
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/CachedResultSet.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sis.internal.sql.feature;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-
-/**
- * A cache of {@link ResultSet} content.
- *
- * @author Johann Sorel (Geomatys)
- * @version 1.0
- * @since 1.0
- * @module
- *
- * @todo Current implementation consumes more memory than needed, with the
construction of a hash map for each record.
- * The construction of {@code DenseFeature} instances would be more
efficient. Furthermore this construct reads
- * all records at construction time. We should consider lazy population
instead.
- */
-final class CachedResultSet {
- /**
- * All records read by the SQL query, as (column, value) pairs.
- */
- final List<Map<String,Object>> records;
-
- /**
- * Creates an initially empty set.
- */
- CachedResultSet() {
- records = new ArrayList<>();
- }
-
- /**
- * Creates a set initialized with the given content.
- */
- CachedResultSet(final ResultSet rs, final String... columns) throws
SQLException {
- records = new ArrayList<>(columns.length);
- append(rs, columns);
- }
-
- /**
- * Appends the given content to this set.
- */
- void append(final ResultSet rs, final String... columns) throws
SQLException {
- while (rs.next()) {
- final Map<String,Object> record = new HashMap<>();
- for (final String col : columns) {
- record.put(col, rs.getObject(col));
- }
- records.add(record);
- }
- rs.close();
- }
-}
diff --git
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/ColumnMetaModel.java
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/ColumnMetaModel.java
index 9b86ac3..0d11928 100644
---
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/ColumnMetaModel.java
+++
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/ColumnMetaModel.java
@@ -38,13 +38,6 @@ import org.apache.sis.internal.metadata.sql.SQLBuilder;
*/
public final class ColumnMetaModel {
/**
- * Description of the attribute holding native SRID associated to a
certain descriptor.
- */
- static final AttributeType<Integer> JDBC_PROPERTY_SRID = new
DefaultAttributeType<>(
- Collections.singletonMap(DefaultAttributeType.NAME_KEY,
"nativeSRID"),
- Integer.class, 1, 1, null);
-
- /**
* Description of the attribute telling whether a field is unique in the
database.
*/
static final AttributeType<Boolean> JDBC_PROPERTY_UNIQUE = new
DefaultAttributeType<>(
diff --git
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/DataBaseModel.java
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/DataBaseModel.java
index 4e3d86d..d20f49d 100644
---
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/DataBaseModel.java
+++
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/DataBaseModel.java
@@ -26,13 +26,10 @@ import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
import java.util.Map;
-import java.util.Objects;
import java.util.Set;
-import java.util.function.Predicate;
import org.opengis.util.GenericName;
import org.opengis.coverage.Coverage;
import org.opengis.feature.AttributeType;
@@ -50,6 +47,7 @@ import org.apache.sis.storage.sql.SQLStore;
import org.apache.sis.storage.DataStore;
import org.apache.sis.storage.DataStoreException;
import org.apache.sis.storage.FeatureNaming;
+import org.apache.sis.util.ArgumentChecks;
import org.apache.sis.util.logging.WarningListeners;
@@ -93,12 +91,6 @@ public final class DataBaseModel {
//various cache while analyzing model
private DatabaseMetaData metadata;
- private CachedResultSet cacheTables;
- private CachedResultSet cacheColumns;
- private CachedResultSet cachePrimaryKeys;
- private CachedResultSet cacheImportedKeys;
- private CachedResultSet cacheExportedKeys;
- private CachedResultSet cacheIndexInfos;
//this set contains schema names which are needed to rebuild relations
private Set<String> visitedSchemas;
private Set<String> requieredSchemas;
@@ -106,6 +98,9 @@ public final class DataBaseModel {
private final WarningListeners<DataStore> listeners;
public DataBaseModel(final SQLStore store, final Dialect dialect, final
String schema, final String table, final WarningListeners<DataStore> listeners)
{
+ if (table != null) {
+ ArgumentChecks.ensureNonEmpty("table", table);
+ }
this.store = store;
this.dialect = dialect;
this.databaseSchema = schema;
@@ -113,14 +108,14 @@ public final class DataBaseModel {
this.listeners = listeners;
}
- private Collection<SchemaMetaModel> getSchemaMetaModels() throws
DataStoreException {
+ private Collection<SchemaMetaModel> getSchemaMetaModels() throws
SQLException, DataStoreException {
if (schemas == null) {
analyze();
}
return schemas.values();
}
- private SchemaMetaModel getSchemaMetaModel(String name) throws
DataStoreException{
+ private SchemaMetaModel getSchemaMetaModel(String name) throws
SQLException, DataStoreException {
if (schemas == null) {
analyze();
}
@@ -138,14 +133,14 @@ public final class DataBaseModel {
schemas = null;
}
- private PrimaryKey getPrimaryKey(final String featureTypeName) throws
DataStoreException{
+ private PrimaryKey getPrimaryKey(final String featureTypeName) throws
SQLException, DataStoreException {
if (schemas == null) {
analyze();
}
return pkIndex.get(store, featureTypeName);
}
- private synchronized Set<GenericName> getNames() throws DataStoreException
{
+ private synchronized Set<GenericName> getNames() throws SQLException,
DataStoreException {
Set<GenericName> ref = nameCache;
if (ref == null) {
analyze();
@@ -162,7 +157,7 @@ public final class DataBaseModel {
return ref;
}
- public FeatureType getFeatureType(final String typeName) throws
DataStoreException {
+ public FeatureType getFeatureType(final String typeName) throws
SQLException, DataStoreException {
if (schemas == null) {
analyze();
}
@@ -172,10 +167,9 @@ public final class DataBaseModel {
/**
* Explores all tables and views then recreate a complex feature model
from relations.
*/
- private synchronized void analyze() throws DataStoreException{
+ private synchronized void analyze() throws SQLException,
DataStoreException {
if (schemas != null) {
- // Already analyzed
- return;
+ return; // Already analyzed
}
clearCache();
schemas = new HashMap<>();
@@ -189,102 +183,40 @@ public final class DataBaseModel {
* 1. TABLE_SCHEM : String => schema name
* 2. TABLE_CATALOG : String => catalog name (may be null)
*/
- final CachedResultSet cacheSchemas = new
CachedResultSet(metadata.getSchemas(),
- Reflection.TABLE_SCHEM);
- /*
- * Description of the tables available:
- * 1. TABLE_SCHEM : String => table schema (may be null)
- * 2. TABLE_NAME : String => table name
- * 3. TABLE_TYPE : String => table type (typically "TABLE" or
"VIEW").
- */
- cacheTables = new CachedResultSet(
- metadata.getTables(null, null, null, new String[]
{TYPE_TABLE, TYPE_VIEW}), // TODO: use metadata.getTableTypes()
- Reflection.TABLE_SCHEM,
- Reflection.TABLE_NAME,
- Reflection.TABLE_TYPE);
- cacheColumns = new CachedResultSet(metadata.getColumns(null, null,
null, null),
- Reflection.TABLE_SCHEM,
- Reflection.TABLE_NAME,
- Reflection.COLUMN_NAME,
- Reflection.COLUMN_SIZE,
- Reflection.DATA_TYPE,
- Reflection.TYPE_NAME,
- Reflection.IS_NULLABLE,
- Reflection.IS_AUTOINCREMENT,
- Reflection.REMARKS);
- cachePrimaryKeys = new
CachedResultSet(metadata.getPrimaryKeys(null, null, null),
- Reflection.TABLE_SCHEM,
- Reflection.TABLE_NAME,
- Reflection.COLUMN_NAME);
- cacheImportedKeys = new
CachedResultSet(metadata.getImportedKeys(null, null, null),
- Reflection.PK_NAME,
- Reflection.FK_NAME,
- Reflection.FKTABLE_SCHEM,
- Reflection.FKTABLE_NAME,
- Reflection.FKCOLUMN_NAME,
- Reflection.PKTABLE_SCHEM,
- Reflection.PKTABLE_NAME,
- Reflection.PKCOLUMN_NAME,
- Reflection.DELETE_RULE);
- cacheExportedKeys = new
CachedResultSet(metadata.getExportedKeys(null, null, null),
- Reflection.PK_NAME,
- Reflection.FK_NAME,
- Reflection.PKTABLE_SCHEM,
- Reflection.PKTABLE_NAME,
- Reflection.PKCOLUMN_NAME,
- Reflection.FKTABLE_SCHEM,
- Reflection.FKTABLE_NAME,
- Reflection.FKCOLUMN_NAME,
- Reflection.DELETE_RULE);
-
-
////////////////////////////////////////////////////////////////////////////////
-
if (databaseSchema != null) {
requieredSchemas.add(databaseSchema);
- } else {
- final Iterator<Map<String,Object>> ite =
cacheSchemas.records.iterator();
- while (ite.hasNext()) {
- requieredSchemas.add((String)
ite.next().get(Reflection.TABLE_SCHEM));
+ } else try (ResultSet reflect = metadata.getSchemas()) {
+ while (reflect.next()) {
+
requieredSchemas.add(reflect.getString(Reflection.TABLE_SCHEM)); //
TODO: use schemas in getTables instead.
}
}
-
- // We need to analyze requiered schema references
+ /*
+ * We need to analyze requiered schema references.
+ */
while (!requieredSchemas.isEmpty()) {
final String sn = requieredSchemas.iterator().next();
visitedSchemas.add(sn);
requieredSchemas.remove(sn);
- final SchemaMetaModel schema = analyzeSchema(sn,cx);
+ // TODO: escape with metadata.getSearchStringEscape().
+ final SchemaMetaModel schema = analyzeSchema(sn,
databaseTable);
schemas.put(schema.name, schema);
}
-
- reverseSimpleFeatureTypes(cx);
-
- } catch (SQLException e) {
- throw new DataStoreException("Error occurred analyzing database
model.\n" + e.getMessage(), e);
+ reverseSimpleFeatureTypes();
} finally {
- cacheTables = null;
- cacheColumns = null;
- cachePrimaryKeys = null;
- cacheImportedKeys = null;
- cacheExportedKeys = null;
- cacheIndexInfos = null;
metadata = null;
visitedSchemas = null;
requieredSchemas = null;
}
-
-
- // Build
indexes---------------------------------------------------------
+ /*
+ * Build indexes.
+ */
final String baseSchemaName = databaseSchema;
-
final Collection<SchemaMetaModel> candidates;
if (baseSchemaName == null) {
- //take all schemas
- candidates = getSchemaMetaModels();
+ candidates = getSchemaMetaModels(); // Take all
schemas.
} else {
candidates =
Collections.singleton(getSchemaMetaModel(baseSchemaName));
}
-
for (SchemaMetaModel schema : candidates) {
if (schema != null) {
for (TableMetaModel table : schema.getTables()) {
@@ -301,78 +233,80 @@ public final class DataBaseModel {
}
} else {
throw new DataStoreException("Specifed schema " +
baseSchemaName + " does not exist.");
- }
- }
-
+ }
+ }
}
- private SchemaMetaModel analyzeSchema(final String schemaName, final
Connection cx) throws SQLException {
- final SchemaMetaModel schema = new SchemaMetaModel(schemaName);
- for (Map<String,Object> info : cacheTables.records) {
- if (!Objects.equals(info.get(Reflection.TABLE_SCHEM), schemaName))
continue;
- if (databaseTable != null && !databaseTable.isEmpty() &&
!Objects.equals(info.get(Reflection.TABLE_NAME), databaseTable)) continue;
- final TableMetaModel table = analyzeTable(info, cx);
- schema.tables.put(table.name, table);
+ /**
+ * @param schemaPattern schema name with "%" and "_" interpreted as
wildcards, or {@code null} for all schemas.
+ */
+ private SchemaMetaModel analyzeSchema(final String schemaPattern, final
String tableNamePattern) throws SQLException, DataStoreException {
+ final SchemaMetaModel schema = new SchemaMetaModel(schemaPattern);
+ /*
+ * Description of the tables available:
+ * 1. TABLE_SCHEM : String => table schema (may be null)
+ * 2. TABLE_NAME : String => table name
+ * 3. TABLE_TYPE : String => table type (typically "TABLE" or
"VIEW").
+ */
+ try (ResultSet reflect = metadata.getTables(null, schemaPattern,
tableNamePattern, new String[] {TYPE_TABLE, TYPE_VIEW})) { // TODO: use
metadata.getTableTypes()
+ while (reflect.next()) {
+ final TableMetaModel table = analyzeTable(reflect);
+ schema.tables.put(table.name, table);
+ }
}
return schema;
}
- private TableMetaModel analyzeTable(final Map<String,Object> tableSet,
final Connection cx) throws SQLException {
- final String schemaName = (String)
tableSet.get(Reflection.TABLE_SCHEM);
- final String tableName = (String) tableSet.get(Reflection.TABLE_NAME);
- final String tableType = (String) tableSet.get(Reflection.TABLE_TYPE);
- final TableMetaModel table = new TableMetaModel(tableName,tableType);
+ private TableMetaModel analyzeTable(final ResultSet tableSet) throws
SQLException, DataStoreException {
+ final String schemaName = tableSet.getString(Reflection.TABLE_SCHEM);
+ final String tableName = tableSet.getString(Reflection.TABLE_NAME);
+ final String tableType = tableSet.getString(Reflection.TABLE_TYPE);
+ final TableMetaModel table = new TableMetaModel(tableName, tableType);
final FeatureTypeBuilder ftb = new FeatureTypeBuilder();
- // Explore all columns ----------------------------------------------
- final Predicate<Map<String,Object>> tableFilter = (Map<String,Object>
info) -> {
- return Objects.equals(info.get(Reflection.TABLE_SCHEM), schemaName)
- && Objects.equals(info.get(Reflection.TABLE_NAME), tableName);
- };
-
- final Iterator<Map<String,Object>> ite1 =
cacheColumns.records.stream().filter(tableFilter).iterator();
- while (ite1.hasNext()) {
- analyzeColumn(ite1.next(), cx, ftb.addAttribute(Object.class));
+ /*
+ * Explore all columns.
+ */
+ try (ResultSet reflect = metadata.getColumns(null, schemaName,
tableName, null)) {
+ while (reflect.next()) {
+ analyzeColumn(reflect, ftb.addAttribute(Object.class));
+ }
}
-
- // Find primary key -------------------------------------------------
+ /*
+ * Find primary keys.
+ */
final List<ColumnMetaModel> cols = new ArrayList<>();
- final Iterator<Map<String,Object>> pkIte =
cachePrimaryKeys.records.stream().filter(tableFilter).iterator();
- while (pkIte.hasNext()) {
- final Map<String,Object> result = pkIte.next();
- final String columnName = (String)
result.get(Reflection.COLUMN_NAME);
-
- final Predicate<Map<String,Object>> colFilter =
(Map<String,Object> info) -> {
- return Objects.equals(info.get(Reflection.COLUMN_NAME),
columnName);
- };
- final Iterator<Map<String,Object>> cite =
cacheColumns.records.stream().filter(tableFilter.and(colFilter)).iterator();
- final Map<String,Object> column = cite.next();
-
- final int sqlType = ((Number)
column.get(Reflection.DATA_TYPE)).intValue();
- final String sqlTypeName = (String)
column.get(Reflection.TYPE_NAME);
- Class<?> columnType = dialect.getJavaType(sqlType, sqlTypeName);
-
- if (columnType == null) {
- listeners.warning("No class for SQL type " + sqlType, null);
- columnType = Object.class;
- }
-
- ColumnMetaModel col = null;
-
- final String str = (String)
column.get(Reflection.IS_AUTOINCREMENT);
- if (VALUE_YES.equalsIgnoreCase(str)) {
- col = new ColumnMetaModel(schemaName, tableName, columnName,
sqlType, sqlTypeName, columnType, ColumnMetaModel.Type.AUTO, null);
- } else {
- // TODO: need to distinguish "NO" and empty string.
- final String sequenceName =
dialect.getColumnSequence(cx,schemaName, tableName, columnName);
- if (sequenceName != null) {
- col = new ColumnMetaModel(schemaName, tableName,
columnName, sqlType,
- sqlTypeName, columnType,
ColumnMetaModel.Type.SEQUENCED,sequenceName);
- } else {
- col = new ColumnMetaModel(schemaName, tableName,
columnName, sqlType,
- sqlTypeName, columnType,
ColumnMetaModel.Type.PROVIDED, null);
+ try (ResultSet rp = metadata.getPrimaryKeys(null, schemaName,
tableName)) {
+ while (rp.next()) {
+ final String columnNamePattern =
rp.getString(Reflection.COLUMN_NAME);
+ // TODO: escape columnNamePattern with
metadata.getSearchStringEscape().
+ try (ResultSet reflect = metadata.getColumns(null, schemaName,
tableName, columnNamePattern)) {
+ while (reflect.next()) {
// Should loop exactly once.
+ final int sqlType =
reflect.getInt(Reflection.DATA_TYPE);
+ final String sqlTypeName =
reflect.getString(Reflection.TYPE_NAME);
+ Class<?> columnType = dialect.getJavaType(sqlType,
sqlTypeName);
+ if (columnType == null) {
+ listeners.warning("No class for SQL type " +
sqlType, null);
+ columnType = Object.class;
+ }
+ ColumnMetaModel col;
+ final String str =
reflect.getString(Reflection.IS_AUTOINCREMENT);
+ if (VALUE_YES.equalsIgnoreCase(str)) {
+ col = new ColumnMetaModel(schemaName, tableName,
columnNamePattern, sqlType, sqlTypeName, columnType, ColumnMetaModel.Type.AUTO,
null);
+ } else {
+ // TODO: need to distinguish "NO" and empty string.
+ final String sequenceName =
dialect.getColumnSequence(metadata.getConnection(), schemaName, tableName,
columnNamePattern);
+ if (sequenceName != null) {
+ col = new ColumnMetaModel(schemaName,
tableName, columnNamePattern, sqlType,
+ sqlTypeName, columnType,
ColumnMetaModel.Type.SEQUENCED,sequenceName);
+ } else {
+ col = new ColumnMetaModel(schemaName,
tableName, columnNamePattern, sqlType,
+ sqlTypeName, columnType,
ColumnMetaModel.Type.PROVIDED, null);
+ }
+ }
+ cols.add(col);
+ }
}
}
- cols.add(col);
}
/*
* Search indexes, they provide informations such as:
@@ -384,37 +318,33 @@ public final class DataBaseModel {
final Map<String,List<String>> uniqueIndexes = new HashMap<>();
String indexname = null;
// We can't cache this one, seems to be a bug in the driver, it won't
find anything for table name like '%'
- cacheIndexInfos = new CachedResultSet(metadata.getIndexInfo(null,
schemaName, tableName, true, false),
- Reflection.TABLE_SCHEM,
- Reflection.TABLE_NAME,
- Reflection.COLUMN_NAME,
- Reflection.INDEX_NAME);
- final Iterator<Map<String,Object>> indexIte =
cacheIndexInfos.records.stream().filter(tableFilter).iterator();
- while (indexIte.hasNext()) {
- final Map<String,Object> result = indexIte.next();
- final String columnName = (String)
result.get(Reflection.COLUMN_NAME);
- final String idxName = (String) result.get(Reflection.INDEX_NAME);
-
- List<String> lst = uniqueIndexes.get(idxName);
- if (lst == null) {
- lst = new ArrayList<>();
- uniqueIndexes.put(idxName, lst);
- }
- lst.add(columnName);
-
- if (pkEmpty) {
- // We use a single index columns set as primary key
- // We must not mix with other potential indexes.
- if (indexname == null) {
- indexname = idxName;
- } else if (!indexname.equals(idxName)) {
- continue;
+ try (ResultSet reflect = metadata.getIndexInfo(null, schemaName,
tableName, true, false)) {
+ while (reflect.next()) {
+ final String columnName =
reflect.getString(Reflection.COLUMN_NAME);
+ final String idxName =
reflect.getString(Reflection.INDEX_NAME);
+ List<String> lst = uniqueIndexes.get(idxName);
+ if (lst == null) {
+ lst = new ArrayList<>();
+ uniqueIndexes.put(idxName, lst);
+ }
+ lst.add(columnName);
+ if (pkEmpty) {
+ /*
+ * We use a single index columns set as primary key
+ * We must not mix with other potential indexes.
+ */
+ if (indexname == null) {
+ indexname = idxName;
+ } else if (!indexname.equals(idxName)) {
+ continue;
+ }
+ names.add(columnName);
}
- names.add(columnName);
}
}
-
- // For each unique index composed of one column add a flag on the
property descriptor
+ /*
+ * For each unique index composed of one column add a flag on the
property descriptor.
+ */
for (Entry<String,List<String>> entry : uniqueIndexes.entrySet()) {
final List<String> columns = entry.getValue();
if (columns.size() == 1) {
@@ -427,44 +357,43 @@ public final class DataBaseModel {
}
}
}
-
if (pkEmpty && !names.isEmpty()) {
- // Build a primary key from unique index
- final Iterator<Map<String,Object>> ite =
cacheColumns.records.stream().filter(tableFilter).iterator();
- while (ite.hasNext()) {
- final Map<String,Object> result = ite.next();
- final String columnName = (String)
result.get(Reflection.COLUMN_NAME);
- if (!names.contains(columnName)) {
- continue;
- }
-
- final int sqlType = ((Number)
result.get(Reflection.DATA_TYPE)).intValue();
- final String sqlTypeName = (String)
result.get(Reflection.TYPE_NAME);
- final Class<?> columnType = dialect.getJavaType(sqlType,
sqlTypeName);
- final ColumnMetaModel col = new ColumnMetaModel(schemaName,
tableName, columnName,
- sqlType, sqlTypeName, columnType,
ColumnMetaModel.Type.PROVIDED, null);
- cols.add(col);
-
- // Set as identifier
- for (PropertyTypeBuilder desc : ftb.properties()) {
- if (desc.getName().tip().toString().equals(columnName)) {
- final AttributeTypeBuilder<?> atb =
(AttributeTypeBuilder) desc;
- atb.addRole(AttributeRole.IDENTIFIER_COMPONENT);
- break;
+ /*
+ * Build a primary key from unique index.
+ */
+ try (ResultSet reflect = metadata.getColumns(null, schemaName,
tableName, null)) {
+ while (reflect.next()) {
+ final String columnName =
reflect.getString(Reflection.COLUMN_NAME);
+ if (names.contains(columnName)) {
+ final int sqlType =
reflect.getInt(Reflection.DATA_TYPE);
+ final String sqlTypeName =
reflect.getString(Reflection.TYPE_NAME);
+ final Class<?> columnType =
dialect.getJavaType(sqlType, sqlTypeName);
+ final ColumnMetaModel col = new
ColumnMetaModel(schemaName, tableName, columnName,
+ sqlType, sqlTypeName, columnType,
ColumnMetaModel.Type.PROVIDED, null);
+ cols.add(col);
+ /*
+ * Set as identifier
+ */
+ for (PropertyTypeBuilder desc : ftb.properties()) {
+ if
(desc.getName().tip().toString().equals(columnName)) {
+ final AttributeTypeBuilder<?> atb =
(AttributeTypeBuilder) desc;
+
atb.addRole(AttributeRole.IDENTIFIER_COMPONENT);
+ break;
+ }
+ }
}
}
}
}
-
-
if (cols.isEmpty()) {
if (TYPE_TABLE.equals(tableType)) {
listeners.warning("No primary key found for " + tableName,
null);
}
}
table.key = new PrimaryKey(tableName, cols);
-
- // Mark primary key columns
+ /*
+ * Mark primary key columns.
+ */
for (PropertyTypeBuilder desc : ftb.properties()) {
for (ColumnMetaModel col : cols) {
if (desc.getName().tip().toString().equals(col.name)) {
@@ -474,97 +403,69 @@ public final class DataBaseModel {
}
}
}
-
-
- // Find imported keys -----------------------------------------------
- final Predicate<Map<String,Object>> fkFilter = (Map<String,Object>
info) -> {
- return Objects.equals(info.get(Reflection.FKTABLE_SCHEM),
schemaName)
- && Objects.equals(info.get(Reflection.FKTABLE_NAME),
tableName);
- };
- Iterator<Map<String,Object>> ite =
cacheImportedKeys.records.stream().filter(fkFilter).iterator();
- while (ite.hasNext()) {
- final Map<String,Object> result = ite.next();
- String relationName = (String) result.get(Reflection.PK_NAME);
- if (relationName == null) relationName = (String)
result.get(Reflection.FK_NAME);
- final String localColumn = (String)
result.get(Reflection.FKCOLUMN_NAME);
- final String refSchemaName = (String)
result.get(Reflection.PKTABLE_SCHEM);
- final String refTableName = (String)
result.get(Reflection.PKTABLE_NAME);
- final String refColumnName = (String)
result.get(Reflection.PKCOLUMN_NAME);
- final int deleteRule = ((Number)
result.get(Reflection.DELETE_RULE)).intValue();
- final boolean deleteCascade = DatabaseMetaData.importedKeyCascade
== deleteRule;
- final RelationMetaModel relation = new
RelationMetaModel(relationName,localColumn,
- refSchemaName, refTableName, refColumnName, true,
deleteCascade);
- table.importedKeys.add(relation);
-
- if (refSchemaName!=null &&
!visitedSchemas.contains(refSchemaName)) requieredSchemas.add(refSchemaName);
-
- // Set the information
- for (PropertyTypeBuilder desc : ftb.properties()) {
- if (desc.getName().tip().toString().equals(localColumn)) {
- final AttributeTypeBuilder<?> atb = (AttributeTypeBuilder)
desc;
-
atb.addCharacteristic(ColumnMetaModel.JDBC_PROPERTY_RELATION).setDefaultValue(relation);
- break;
+ /*
+ * Find imported keys.
+ */
+ try (ResultSet reflect = metadata.getImportedKeys(null, schemaName,
tableName)) {
+ while (reflect.next()) {
+ String relationName = reflect.getString(Reflection.PK_NAME);
+ if (relationName == null) relationName =
reflect.getString(Reflection.FK_NAME);
+ final String localColumn =
reflect.getString(Reflection.FKCOLUMN_NAME);
+ final String refSchemaName =
reflect.getString(Reflection.PKTABLE_SCHEM);
+ final String refTableName =
reflect.getString(Reflection.PKTABLE_NAME);
+ final String refColumnName =
reflect.getString(Reflection.PKCOLUMN_NAME);
+ final int deleteRule = reflect.getInt(Reflection.DELETE_RULE);
+ final boolean deleteCascade =
DatabaseMetaData.importedKeyCascade == deleteRule;
+ final RelationMetaModel relation = new
RelationMetaModel(relationName,localColumn,
+ refSchemaName, refTableName, refColumnName, true,
deleteCascade);
+ table.importedKeys.add(relation);
+ if (refSchemaName!=null &&
!visitedSchemas.contains(refSchemaName)) requieredSchemas.add(refSchemaName);
+ for (PropertyTypeBuilder desc : ftb.properties()) {
+ if (desc.getName().tip().toString().equals(localColumn)) {
+ final AttributeTypeBuilder<?> atb =
(AttributeTypeBuilder) desc;
+
atb.addCharacteristic(ColumnMetaModel.JDBC_PROPERTY_RELATION).setDefaultValue(relation);
+ break;
+ }
}
}
}
-
- // Find exported keys -----------------------------------------------
- final Predicate<Map<String,Object>> ekFilter = (Map<String,Object>
info) -> {
- return Objects.equals(info.get(Reflection.PKTABLE_SCHEM),
schemaName)
- && Objects.equals(info.get(Reflection.PKTABLE_NAME),
tableName);
- };
- ite = cacheExportedKeys.records.stream().filter(ekFilter).iterator();
- while (ite.hasNext()) {
- final Map<String,Object> result = ite.next();
- String relationName = (String)
result.get(Reflection.FKCOLUMN_NAME);
- if (relationName == null) relationName = (String)
result.get(Reflection.FK_NAME);
- final String localColumn = (String)
result.get(Reflection.PKCOLUMN_NAME);
- final String refSchemaName = (String)
result.get(Reflection.FKTABLE_SCHEM);
- final String refTableName = (String)
result.get(Reflection.FKTABLE_NAME);
- final String refColumnName = (String)
result.get(Reflection.FKCOLUMN_NAME);
- final int deleteRule = ((Number)
result.get(Reflection.DELETE_RULE)).intValue();
- final boolean deleteCascade = DatabaseMetaData.importedKeyCascade
== deleteRule;
- table.exportedKeys.add(new RelationMetaModel(relationName,
localColumn,
- refSchemaName, refTableName, refColumnName, false,
deleteCascade));
-
- if (refSchemaName != null &&
!visitedSchemas.contains(refSchemaName)) requieredSchemas.add(refSchemaName);
+ /*
+ * Find exported keys.
+ */
+ try (ResultSet reflect = metadata.getExportedKeys(null, schemaName,
tableName)) {
+ while (reflect.next()) {
+ String relationName =
reflect.getString(Reflection.FKCOLUMN_NAME);
+ if (relationName == null) relationName =
reflect.getString(Reflection.FK_NAME);
+ final String localColumn =
reflect.getString(Reflection.PKCOLUMN_NAME);
+ final String refSchemaName =
reflect.getString(Reflection.FKTABLE_SCHEM);
+ final String refTableName =
reflect.getString(Reflection.FKTABLE_NAME);
+ final String refColumnName =
reflect.getString(Reflection.FKCOLUMN_NAME);
+ final int deleteRule = reflect.getInt(Reflection.DELETE_RULE);
+ final boolean deleteCascade =
DatabaseMetaData.importedKeyCascade == deleteRule;
+ table.exportedKeys.add(new RelationMetaModel(relationName,
localColumn,
+ refSchemaName, refTableName, refColumnName, false,
deleteCascade));
+
+ if (refSchemaName != null &&
!visitedSchemas.contains(refSchemaName)) {
+ requieredSchemas.add(refSchemaName);
+ }
+ }
}
-
- // Find parent table if any -----------------------------------------
-// if (handleSuperTableMetadata == null || handleSuperTableMetadata) {
-// try {
-// result = metadata.getSuperTables(null, schemaName,
tableName);
-// while (result.next()) {
-// final String parentTable =
result.getString(SuperTable.SUPERTABLE_NAME);
-// table.parents.add(parentTable);
-// }
-// } catch (SQLException ex) {
-// //not implemented by database
-// handleSuperTableMetadata = Boolean.FALSE;
-// store.getLogger().log(Level.INFO, "Database does not handle
getSuperTable, feature type hierarchy will be ignored.");
-// } finally {
-// closeSafe(store.getLogger(),result);
-// }
-// }
-
ftb.setName(tableName);
table.tableType = ftb;
return table;
}
- private AttributeType<?> analyzeColumn(final Map<String,Object> columnSet,
final Connection cx, final AttributeTypeBuilder<?> atb)
- throws SQLException
- {
- final String schemaName = (String)
columnSet.get(Reflection.TABLE_SCHEM);
- final String tableName = (String)
columnSet.get(Reflection.TABLE_NAME);
- final String columnName = (String)
columnSet.get(Reflection.COLUMN_NAME);
- final int columnSize = ((Number)
columnSet.get(Reflection.COLUMN_SIZE)).intValue();
- final int columnDataType = ((Number)
columnSet.get(Reflection.DATA_TYPE)).intValue();
- final String columnTypeName = (String)
columnSet.get(Reflection.TYPE_NAME);
- final String columnNullable = (String)
columnSet.get(Reflection.IS_NULLABLE);
+ private AttributeType<?> analyzeColumn(final ResultSet columnSet, final
AttributeTypeBuilder<?> atb) throws SQLException {
+ final String schemaName =
columnSet.getString(Reflection.TABLE_SCHEM);
+ final String tableName =
columnSet.getString(Reflection.TABLE_NAME);
+ final String columnName =
columnSet.getString(Reflection.COLUMN_NAME);
+ final int columnSize = columnSet.getInt
(Reflection.COLUMN_SIZE);
+ final int columnDataType = columnSet.getInt
(Reflection.DATA_TYPE);
+ final String columnTypeName =
columnSet.getString(Reflection.TYPE_NAME);
+ final String columnNullable =
columnSet.getString(Reflection.IS_NULLABLE);
atb.setName(columnName);
atb.setMaximalLength(columnSize);
- dialect.decodeColumnType(atb, cx, columnTypeName, columnDataType,
schemaName, tableName, columnName);
+ dialect.decodeColumnType(atb, metadata.getConnection(),
columnTypeName, columnDataType, schemaName, tableName, columnName);
// TODO: need to distinguish "YES" and empty string?
atb.setMinimumOccurs(VALUE_NO.equalsIgnoreCase(columnNullable) ? 1 :
0);
atb.setMaximumOccurs(1);
@@ -577,14 +478,11 @@ public final class DataBaseModel {
final FeatureType analyzeResult(final ResultSet result, final String name)
throws SQLException, DataStoreException {
final FeatureTypeBuilder ftb = new FeatureTypeBuilder();
ftb.setName(name);
-
final ResultSetMetaData metadata = result.getMetaData();
final int nbcol = metadata.getColumnCount();
-
for (int i=1; i <= nbcol; i++) {
final String columnName = metadata.getColumnName(i);
final String columnLabel = metadata.getColumnLabel(i);
- final String typeName = metadata.getColumnTypeName(i);
final String schemaName = metadata.getSchemaName(i);
final String tableName = metadata.getTableName(i);
final int sqlType = metadata.getColumnType(i);
@@ -599,7 +497,7 @@ public final class DataBaseModel {
try {
desc =
table.getType(TableMetaModel.View.SIMPLE_FEATURE_TYPE).build().getProperty(columnName);
} catch (PropertyNotFoundException ex) {
- //ok
+ // ok
}
}
}
@@ -609,25 +507,12 @@ public final class DataBaseModel {
// could not find the original type
// this column must be calculated
final AttributeTypeBuilder<?> atb =
ftb.addAttribute(Object.class);
-
final int nullable = metadata.isNullable(i);
atb.setName(columnLabel);
atb.setMinimumOccurs(nullable ==
ResultSetMetaData.columnNullable ? 0 : 1);
atb.setMaximumOccurs(1);
atb.setName(columnLabel);
-
- try (Connection cx = store.getDataSource().getConnection()) {
- final Class<?> type = dialect.getJavaType(sqlType,
sqlTypeName);
- //TODO : avoid jts, global geometry interface common to
all ?
-// if (type.equals(Geometry.class)) {
-// // try to determine the real geometric type
-// dialect.decodeGeometryColumnType(atb, cx, result, i,
true);
-// } else {
- atb.setValueClass(type);
-// }
- } catch (SQLException e) {
- throw new DataStoreException("Error occurred analyzing
column : " + columnName, e);
- }
+ atb.setValueClass(dialect.getJavaType(sqlType, sqlTypeName));
}
}
return ftb.build();
@@ -636,78 +521,35 @@ public final class DataBaseModel {
/**
* Rebuild simple feature types for each table.
*/
- private void reverseSimpleFeatureTypes(final Connection cx) throws
SQLException {
+ private void reverseSimpleFeatureTypes() throws SQLException {
for (final SchemaMetaModel schema : schemas.values()) {
for (final TableMetaModel table : schema.getTables()) {
- final String tableName = table.name;
-
final FeatureTypeBuilder ftb = new
FeatureTypeBuilder(table.tableType.build());
final String featureName = ftb.getName().tip().toString();
ftb.setName(featureName);
-
final List<PropertyTypeBuilder> descs = ftb.properties();
-
boolean defaultGeomSet = false;
for (int i=0,n=descs.size(); i<n; i++) {
final AttributeTypeBuilder<?> atb = (AttributeTypeBuilder)
descs.get(i);
final String name = atb.getName().tip().toString();
-
atb.setName(name);
-
- //Configure CRS if it is a geometry
+ /*
+ * Configure CRS if the column contains a geometry or a
raster.
+ */
final Class<?> binding = atb.getValueClass();
- if (Geometries.isKnownType(binding)) {
-
- final Predicate<Map<?,?>> colFilter = (Map<?,?> info)
-> {
- return
Objects.equals(info.get(Reflection.TABLE_SCHEM), schema.name)
- &&
Objects.equals(info.get(Reflection.TABLE_NAME), tableName)
- &&
Objects.equals(info.get(Reflection.COLUMN_NAME), name);
- };
- final Map<String,Object> metas =
cacheColumns.records.stream().filter(colFilter).findFirst().get();
-
- CoordinateReferenceSystem crs = null;
- Integer srid = dialect.getGeometrySRID(databaseSchema,
tableName, name, metas, cx);
- if (srid != null) {
- crs = dialect.createCRS(srid, cx);
- }
- atb.setCRS(crs);
- if (srid != null) {
-
atb.addCharacteristic(ColumnMetaModel.JDBC_PROPERTY_SRID).setDefaultValue(srid);
- if (!defaultGeomSet) {
- atb.addRole(AttributeRole.DEFAULT_GEOMETRY);
- defaultGeomSet = true;
+ final boolean isGeometry = Geometries.isKnownType(binding);
+ if (isGeometry ||
Coverage.class.isAssignableFrom(binding)) {
+ // TODO: escape columnNamePattern with
metadata.getSearchStringEscape().
+ try (ResultSet reflect = metadata.getColumns(null,
schema.name, table.name, name)) {
+ while (reflect.next()) { // Should loop
exactly once.
+ CoordinateReferenceSystem crs =
dialect.createGeometryCRS(reflect);
+ atb.setCRS(crs);
+ if (isGeometry & !defaultGeomSet) {
+
atb.addRole(AttributeRole.DEFAULT_GEOMETRY);
+ defaultGeomSet = true;
+ }
}
}
- } else if (Coverage.class.isAssignableFrom(binding)) {
- final Predicate<Map<String,Object>> colFilter =
(Map<String,Object> info) -> {
- return
Objects.equals(info.get(Reflection.TABLE_SCHEM), schema.name)
- &&
Objects.equals(info.get(Reflection.TABLE_NAME), tableName)
- &&
Objects.equals(info.get(Reflection.COLUMN_NAME), name);
- };
- final Map<String,Object> metas =
cacheColumns.records.stream().filter(colFilter).findFirst().get();
-
- // Add the attribute as a geometry, try to figure out
its srid first
- CoordinateReferenceSystem crs = null;
- Integer srid = dialect.getGeometrySRID(databaseSchema,
tableName, name, metas, cx);
- if (srid != null) {
- crs = dialect.createCRS(srid, cx);
- }
- atb.setCRS(crs);
- if (srid != null) {
-
atb.addCharacteristic(ColumnMetaModel.JDBC_PROPERTY_SRID).setDefaultValue(srid);
- //not working yet, SIS FeatureTypeBuilder do not
reconize Coverage as a geometry type.
-// if (!defaultGeomSet) {
-// //create a computed geometry from coverage
envelope
-// PropertyTypeBuilder geomProp =
ftb.addProperty(new
CoverageGeometryOperation(AttributeConvention.GEOMETRY_PROPERTY,
atb.getName().toString()));
-// try {
-//
ftb.addProperty(FeatureOperations.envelope(
-//
Collections.singletonMap(AbstractOperation.NAME_KEY,AttributeConvention.ENVELOPE_PROPERTY),
null, geomProp.build()));
-// } catch (FactoryException e) {
-// throw new IllegalStateException(e);
-// }
-// defaultGeomSet = true;
-// }
- }
}
}
table.simpleFeatureType = ftb;
diff --git
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/Dialect.java
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/Dialect.java
index ad9930f..102886a 100644
---
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/Dialect.java
+++
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/Dialect.java
@@ -16,10 +16,10 @@
*/
package org.apache.sis.internal.sql.feature;
-import java.util.Map;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
+import java.sql.DatabaseMetaData;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.apache.sis.feature.builder.AttributeTypeBuilder;
import org.apache.sis.storage.DataStoreException;
@@ -114,27 +114,13 @@ public abstract class Dialect {
final ResultSet rs, final int columnIndex, boolean customquery)
throws SQLException;
/**
- * Gets the geometric field SRID.
+ * Creates the CRS associated to the the geometry SRID of a given column.
The {@code reflect} argument
+ * is the result of a call to {@link DatabaseMetaData#getColumns(String,
String, String, String)
+ * DatabaseMetaData.getColumns(…)} with the cursor positioned on the row
to process.
*
- * @param schema name of the database schema.
- * @param table name of the database table.
- * @param column name of the database column.
- * @param cx connection to the database.
+ * @param reflect the result of {@link DatabaseMetaData#getColumns
DatabaseMetaData.getColumns(…)}.
* @return CoordinateReferenceSystem ID in the database
* @throws SQLException if a JDBC error occurred while executing a
statement.
*/
- public abstract Integer getGeometrySRID(final String schema, final String
table,
- final String column, Map<String,Object> metas, final Connection
cx) throws SQLException;
-
- /**
- * Gets a coordinate reference system from database SRID.
- *
- * @param srid Coordinate Reference System identifier in the database.
- * @param cx connection to the database.
- * @return The coordinate reference system for the given identifier.
- * @throws SQLException if a JDBC error occurred while executing a
statement.
- *
- * @todo what happen if no CRS is found for the given identifier?
- */
- public abstract CoordinateReferenceSystem createCRS(final int srid, final
Connection cx) throws SQLException;
+ public abstract CoordinateReferenceSystem createGeometryCRS(ResultSet
reflect) throws SQLException;
}
diff --git
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/InsertRelation.java
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/InsertRelation.java
deleted file mode 100644
index 2d494d0..0000000
---
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/InsertRelation.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sis.internal.sql.feature;
-
-import org.opengis.feature.Feature;
-
-
-/**
- * When inserting a complex feature in base. it must be divided in smaller
elements.
- * Those flat insertions and relations are represented by this class.
- *
- * @author Johann Sorel (Geomatys)
- * @version 1.0
- * @since 1.0
- * @module
- */
-final class InsertRelation {
- Feature parent;
- Feature child;
- RelationMetaModel relation;
-}
diff --git
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/RelationMetaModel.java
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/RelationMetaModel.java
index ab09cf3..bc83e29 100644
---
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/RelationMetaModel.java
+++
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/feature/RelationMetaModel.java
@@ -22,7 +22,7 @@ import org.apache.sis.util.ArgumentChecks;
/**
* Description of a relation between two tables.
*
- * @author Johann Sorel (Geomatys)
+ * @author Johann Sorel (Geomatys)
* @version 1.0
* @since 1.0
* @module
diff --git
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/postgres/PostgresDialect.java
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/postgres/PostgresDialect.java
index 2ec1041..f18fee2 100644
---
a/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/postgres/PostgresDialect.java
+++
b/storage/sis-sql/src/main/java/org/apache/sis/internal/sql/postgres/PostgresDialect.java
@@ -20,7 +20,6 @@ import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashSet;
-import java.util.Map;
import java.util.Set;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.apache.sis.internal.sql.feature.ColumnMetaModel;
@@ -81,12 +80,7 @@ final class PostgresDialect extends Dialect {
}
@Override
- public Integer getGeometrySRID(String schemaName, String tableName, String
columnName, Map<String, Object> metas, Connection cx) throws SQLException {
- throw new UnsupportedOperationException("Not supported yet.");
- }
-
- @Override
- public CoordinateReferenceSystem createCRS(int srid, Connection cx) throws
SQLException {
+ public CoordinateReferenceSystem createGeometryCRS(ResultSet reflect)
throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
}