Revision: 4832 http://sourceforge.net/p/jump-pilot/code/4832 Author: michaudm Date: 2016-02-27 21:27:02 +0000 (Sat, 27 Feb 2016) Log Message: ----------- Typo in I18N
Modified Paths: -------------- core/trunk/ChangeLog core/trunk/src/com/vividsolutions/jump/datastore/h2/H2DSConnection.java core/trunk/src/com/vividsolutions/jump/datastore/h2/H2DSMetadata.java core/trunk/src/com/vividsolutions/jump/datastore/mariadb/MariadbDSConnection.java core/trunk/src/com/vividsolutions/jump/datastore/mariadb/MariadbDSMetadata.java core/trunk/src/com/vividsolutions/jump/datastore/oracle/OracleDSConnection.java core/trunk/src/com/vividsolutions/jump/datastore/oracle/OracleDSMetadata.java core/trunk/src/com/vividsolutions/jump/datastore/postgis/PostgisDSConnection.java core/trunk/src/com/vividsolutions/jump/datastore/postgis/PostgisDSMetadata.java core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/AbstractSpatialDatabasesDSDriver.java core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesDSConnection.java core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesDSMetadata.java core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesSQLBuilder.java core/trunk/src/com/vividsolutions/jump/datastore/spatialite/SpatialiteDSConnection.java core/trunk/src/org/openjump/core/ui/plugin/datastore/WritableDataStoreDataSource.java core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISSaveDataSourceQueryChooser.java core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/SaveToPostGISDataSource.java core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis2/PostGISDataStoreDataSource.java core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis2/PostGISSaveDataSourceQueryChooser.java core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis2/PostGISSaveDriverPanel.java Added Paths: ----------- core/trunk/src/com/vividsolutions/jump/datastore/SQLUtil.java Removed Paths: ------------- core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISConnectionUtil.java core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISQueryUtil.java Modified: core/trunk/ChangeLog =================================================================== --- core/trunk/ChangeLog 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/ChangeLog 2016-02-27 21:27:02 UTC (rev 4832) @@ -3,6 +3,11 @@ # 2. make sure that lines break at 80 chars for constricted display situations #<-------------------------------- 80 chars ----------------------------------># +2016-02-27 mmichaud <m.michael.mich...@orange.fr> + * Null boolean was not handled correctly in datastore datasources. + * Refactoring of DataStore framework. Prepare writing capabilities for + multiple kinds of drivers. + 2016-02-19 mmichaud <m.michael.mich...@orange.fr> * Better handling of readOnly attributes in PostGIS writer * Add Boolean and Long AttributeType in AttributeCalculator (beanshell) Added: core/trunk/src/com/vividsolutions/jump/datastore/SQLUtil.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/SQLUtil.java (rev 0) +++ core/trunk/src/com/vividsolutions/jump/datastore/SQLUtil.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -0,0 +1,212 @@ +package com.vividsolutions.jump.datastore; + +import com.vividsolutions.jts.geom.Geometry; +import com.vividsolutions.jts.io.WKBWriter; +import com.vividsolutions.jump.feature.AttributeType; + +import java.sql.Types; + +/** + * Utililty class containing methods to manipulate SQL Strings + */ +public class SQLUtil { + + private static final WKBWriter WRITER2D = new WKBWriter(2, false); + private static final WKBWriter WRITER3D = new WKBWriter(3, false); + private static final WKBWriter WRITER2D_SRID = new WKBWriter(2, true); + private static final WKBWriter WRITER3D_SRID = new WKBWriter(3, true); + + /** + * Returns a pair of strings containing unquoted schema and table names + * from a full table name. If the fullName contains only one part (table + * name), the returned array contains a null element at index 0<br> + * Examples :<br> + * <ul> + * <li>myschema.mytable -> [myschema, mytable]</li> + * <li>"MySchema"."MyTable" -> [MySchema, MyTable]</li> + * <li>MyTable -> [null, MyTable]</li> + * <li>2_table -> [null, 2_table]</li> + * </ul> + */ + public static String[] splitTableName(String fullName) { + + if (isQuoted(fullName)) { + return splitQuotedTableName(fullName); + } + int index = fullName.indexOf("."); + // no schema + if (index == -1) { + if (fullName.matches("(?i)^[A-Z_].*")) return new String[]{null, fullName}; + else return new String[]{null, "\"" + fullName + "\""}; + } + // schema + table name + else { + String dbSchema = fullName.substring(0, index); + String dbTable = fullName.substring(index+1, fullName.length()); + return new String[]{dbSchema, dbTable}; + } + } + + + private static String[] splitQuotedTableName(String fullName) { + int index = fullName.indexOf("\".\""); + if (index > -1) { + return new String[]{ + unquote(fullName.substring(0, index)), + unquote(fullName.substring(index+1, fullName.length())) + }; + } + else return new String[]{null, unquote(fullName)}; + } + + + /** + * Returns true if this identifier is quoted. + */ + private static boolean isQuoted(String s) { + return s.startsWith("\"") && s.endsWith("\""); + } + + + /** + * Returns s if s is already quoted (with double-quotes), and a quoted + * version of s otherwise. Returns null if s is null. + */ + public static String quote(String s) { + if (s == null) return null; + if (isQuoted(s)) return s; + else return "\"" + s + "\""; + } + + + /** + * Returns s without initial and final double quotes if any. + * Returns null if s is null. + */ + public static String unquote(String s) { + if (s == null) return null; + if (!isQuoted(s)) return s; + else return s.substring(1, s.length()-1); + } + + + /** + * Escape single quotes in the given identifier. + * Replace all single quotes ("'") by double single quotes ("''") + * @param identifier string identifier to escape + * @return the identifier with single quotes escaped, or identifier if no string found + */ + public static String escapeSingleQuote(String identifier) { + return identifier == null ? null : identifier.replaceAll("'", "''"); + } + + + /** + * Compose a concatenated quoted schema name and table name. + * @param schemaName unquoted schema name + * @param tableName unquoted table name + */ + public static String compose(String schemaName, String tableName) { + return schemaName == null ? + "\"" + tableName + "\"" : + "\"" + schemaName + "\".\"" + tableName + "\""; + } + + + /** + * Normalize an identifier name (use only lower case) + * @param name the identifier to normalize + * @return the name writen in lowercase + */ + public static String normalize(String name) { + if (name == null) return null; + StringBuilder sb = new StringBuilder(name.length()); + for (int i = 0 ; i < name.length() ; i++) { + char c = name.charAt(i); + if(i==0) { + if (Character.isLetter(c) || c == '_') sb.append(Character.toLowerCase(c)); + else sb.append('_'); + } else { + if (Character.isLetterOrDigit(c) || c == '_') sb.append(Character.toLowerCase(c)); + else sb.append('_'); + } + } + return sb.toString(); + } + + /** + * Converts the geometry into a byte array in EWKB format + * @param geom the geometry to convert to a byte array + * @param srid the srid of the geometry + * @param dimension geometry dimension (2 or 3) + * @return a byte array containing a EWKB representation of the geometry + */ + public static byte[] getByteArrayFromGeometry(Geometry geom, int srid, int dimension) { + WKBWriter writer; + if (srid > 0) { + geom.setSRID(srid); + writer = dimension == 3 ? WRITER3D_SRID : WRITER2D_SRID; + } else { + writer = dimension == 3 ? WRITER3D : WRITER2D; + } + return writer.write(geom); + } + + + /** + * Returns OpenJUMP attributeType from sql type and datatype name. + * dataTypeName is nullable. + * If dataTypeName = "geometry", binary field will be interpreted into + * AttributeType.GEOMETRY + * @param sqlType jdbc sql datatype + * @param dataTypeName native datatype name + */ + static public AttributeType getAttributeType(int sqlType, String dataTypeName) { + if (sqlType == Types.BIGINT) return AttributeType.LONG; + // PostGIS geometries are stored as OTHER (type=1111) not BINARY (type=-2) + if (sqlType == Types.BINARY && dataTypeName != null && + dataTypeName.toLowerCase().equals("geometry")) + return AttributeType.GEOMETRY; + else if (sqlType == Types.BINARY) return AttributeType.OBJECT; + if (sqlType == Types.BIT) return AttributeType.BOOLEAN; + if (sqlType == Types.BLOB) return AttributeType.OBJECT; + if (sqlType == Types.BOOLEAN) return AttributeType.BOOLEAN; + if (sqlType == Types.CHAR) return AttributeType.STRING; + if (sqlType == Types.CLOB) return AttributeType.STRING; + if (sqlType == Types.DATALINK) return AttributeType.OBJECT; + if (sqlType == Types.DATE) return AttributeType.DATE; + if (sqlType == Types.DECIMAL) return AttributeType.DOUBLE; + if (sqlType == Types.DISTINCT) return AttributeType.OBJECT; + if (sqlType == Types.DOUBLE) return AttributeType.DOUBLE; + if (sqlType == Types.FLOAT) return AttributeType.DOUBLE; + if (sqlType == Types.INTEGER) return AttributeType.INTEGER; + if (sqlType == Types.JAVA_OBJECT) return AttributeType.OBJECT; + if (sqlType == Types.LONGNVARCHAR) return AttributeType.STRING; + if (sqlType == Types.LONGVARBINARY) return AttributeType.OBJECT; + if (sqlType == Types.LONGVARCHAR) return AttributeType.STRING; + if (sqlType == Types.NCHAR) return AttributeType.STRING; + if (sqlType == Types.NCLOB) return AttributeType.STRING; + if (sqlType == Types.NULL) return AttributeType.OBJECT; + if (sqlType == Types.NUMERIC) return AttributeType.DOUBLE; + if (sqlType == Types.NVARCHAR) return AttributeType.STRING; + if (sqlType == Types.OTHER && dataTypeName != null && + dataTypeName.toLowerCase().equals("geometry")) + return AttributeType.GEOMETRY; + else if (sqlType == Types.OTHER) return AttributeType.OBJECT; + if (sqlType == Types.REAL) return AttributeType.DOUBLE; + if (sqlType == Types.REF) return AttributeType.OBJECT; + if (sqlType == Types.ROWID) return AttributeType.INTEGER; + if (sqlType == Types.SMALLINT) return AttributeType.INTEGER; + if (sqlType == Types.SQLXML) return AttributeType.STRING; + if (sqlType == Types.STRUCT) return AttributeType.OBJECT; + if (sqlType == Types.TIME) return AttributeType.DATE; + if (sqlType == Types.TIMESTAMP) return AttributeType.DATE; + if (sqlType == Types.TINYINT) return AttributeType.INTEGER; + if (sqlType == Types.VARBINARY) return AttributeType.OBJECT; + if (sqlType == Types.VARCHAR) return AttributeType.STRING; + throw new IllegalArgumentException("" + sqlType + " is an unknown SQLType"); + } + + + +} Modified: core/trunk/src/com/vividsolutions/jump/datastore/h2/H2DSConnection.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/h2/H2DSConnection.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/h2/H2DSConnection.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -18,8 +18,7 @@ public class H2DSConnection extends SpatialDatabasesDSConnection { public H2DSConnection(Connection con) { - super(con); // ? - connection = con; + super(con); this.dbMetadata = new H2DSMetadata(this); } @@ -68,7 +67,7 @@ H2FeatureInputStream ifs = new H2FeatureInputStream(connection, queryString, query.getPrimaryKey()); // Nicolas Ribot: getting FeatureSchema here actually runs the query: if an error occurs, must trap it here - FeatureSchema fs = null; + FeatureSchema fs; try { fs = ifs.getFeatureSchema(); } catch (Exception e) { @@ -84,4 +83,8 @@ return ifs; } + + public H2ValueConverterFactory getValueConverterFactory() { + return new H2ValueConverterFactory(connection); + } } Modified: core/trunk/src/com/vividsolutions/jump/datastore/h2/H2DSMetadata.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/h2/H2DSMetadata.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/h2/H2DSMetadata.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -2,8 +2,8 @@ import com.vividsolutions.jump.datastore.DataStoreConnection; import com.vividsolutions.jump.datastore.GeometryColumn; +import com.vividsolutions.jump.datastore.SQLUtil; import com.vividsolutions.jump.datastore.spatialdatabases.SpatialDatabasesDSMetadata; -import com.vividsolutions.jump.datastore.spatialdatabases.SpatialDatabasesSQLBuilder; import java.util.List; @@ -37,17 +37,17 @@ @Override public String getGeoColumnsQuery(String datasetName) { // escape single quotes - return String.format(this.geoColumnsQuery, - SpatialDatabasesSQLBuilder.escapeSingleQuote(getSchemaName(datasetName)), - SpatialDatabasesSQLBuilder.escapeSingleQuote(getTableName(datasetName))); + return String.format(this.geoColumnsQuery, + SQLUtil.escapeSingleQuote(getSchemaName(datasetName)), + SQLUtil.escapeSingleQuote(getTableName(datasetName))); } @Override public String getSridQuery(String schemaName, String tableName, String colName) { // escape single quotes - return String.format(this.sridQuery, - SpatialDatabasesSQLBuilder.escapeSingleQuote(schemaName), - SpatialDatabasesSQLBuilder.escapeSingleQuote(tableName), colName); + return String.format(this.sridQuery, + SQLUtil.escapeSingleQuote(schemaName), + SQLUtil.escapeSingleQuote(tableName), colName); } @Override Modified: core/trunk/src/com/vividsolutions/jump/datastore/mariadb/MariadbDSConnection.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/mariadb/MariadbDSConnection.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/mariadb/MariadbDSConnection.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -18,8 +18,7 @@ public class MariadbDSConnection extends SpatialDatabasesDSConnection { public MariadbDSConnection(Connection con) { - super(con); // ? - connection = con; + super(con); this.dbMetadata = new MariadbDSMetadata(this); } @@ -66,7 +65,7 @@ MariadbFeatureInputStream ifs = new MariadbFeatureInputStream(connection, queryString, query.getPrimaryKey()); // Nicolas Ribot: getting FeatureSchema here actually runs the query: if an error occurs, must trap it here - FeatureSchema fs = null; + FeatureSchema fs; try { fs = ifs.getFeatureSchema(); } catch (Exception e) { @@ -83,5 +82,9 @@ return ifs; } + + public MariadbValueConverterFactory getValueConverterFactory() { + return new MariadbValueConverterFactory(connection); + } } Modified: core/trunk/src/com/vividsolutions/jump/datastore/mariadb/MariadbDSMetadata.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/mariadb/MariadbDSMetadata.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/mariadb/MariadbDSMetadata.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -1,6 +1,7 @@ package com.vividsolutions.jump.datastore.mariadb; import com.vividsolutions.jump.datastore.DataStoreConnection; +import com.vividsolutions.jump.datastore.SQLUtil; import com.vividsolutions.jump.datastore.spatialdatabases.*; import com.vividsolutions.jump.datastore.GeometryColumn; import java.sql.DatabaseMetaData; @@ -10,10 +11,12 @@ public class MariadbDSMetadata extends SpatialDatabasesDSMetadata { public static String GC_COLUMN_NAME = "geometry_columns"; + /** * The second query to get geometric columns */ private String geoColumnsQuery2; + /** * The second query to get SRID */ @@ -103,8 +106,8 @@ @Override public String getGeoColumnsQuery(String datasetName) { // escape single quotes in identifier - return String.format(this.geoColumnsQuery, - SpatialDatabasesSQLBuilder.escapeSingleQuote(getTableName(datasetName))); + return String.format(this.geoColumnsQuery, + SQLUtil.escapeSingleQuote(getTableName(datasetName))); } public String getGeoColumnsQuery2(String datasetName) { @@ -115,14 +118,14 @@ public String getSridQuery(String schemaName, String tableName, String colName) { // escape single quotes in identifier // TODO: geom ? - return String.format(this.sridQuery, - SpatialDatabasesSQLBuilder.escapeSingleQuote(tableName), colName); + return String.format(this.sridQuery, + SQLUtil.escapeSingleQuote(tableName), colName); } public String getSridQuery2(String schemaName, String tableName, String colName) { - return String.format(this.sridQuery2, colName, colName, colName, - SpatialDatabasesSQLBuilder.escapeSingleQuote(schemaName), - SpatialDatabasesSQLBuilder.escapeSingleQuote(tableName)); + return String.format(this.sridQuery2, colName, colName, colName, + SQLUtil.escapeSingleQuote(schemaName), + SQLUtil.escapeSingleQuote(tableName)); } @Override Modified: core/trunk/src/com/vividsolutions/jump/datastore/oracle/OracleDSConnection.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/oracle/OracleDSConnection.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/oracle/OracleDSConnection.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -18,8 +18,7 @@ public class OracleDSConnection extends SpatialDatabasesDSConnection { public OracleDSConnection(Connection con) { - super(con); // ? - connection = con; + super(con); this.dbMetadata = new OracleDSMetadata(this); } @@ -66,7 +65,7 @@ OracleFeatureInputStream ifs = new OracleFeatureInputStream(connection, queryString, query.getPrimaryKey()); // Nicolas Ribot: getting FeatureSchema here actually runs the query: if an error occurs, must trap it here - FeatureSchema fs = null; + FeatureSchema fs; try { fs = ifs.getFeatureSchema(); } catch (Exception e) { @@ -83,5 +82,9 @@ return ifs; } + + public OracleValueConverterFactory getValueConverterFactory() { + return new OracleValueConverterFactory(connection); + } } Modified: core/trunk/src/com/vividsolutions/jump/datastore/oracle/OracleDSMetadata.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/oracle/OracleDSMetadata.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/oracle/OracleDSMetadata.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -6,6 +6,7 @@ package com.vividsolutions.jump.datastore.oracle; import com.vividsolutions.jump.datastore.DataStoreConnection; +import com.vividsolutions.jump.datastore.SQLUtil; import com.vividsolutions.jump.datastore.spatialdatabases.*; import com.vividsolutions.jump.datastore.GeometryColumn; import java.sql.SQLException; @@ -58,8 +59,8 @@ public String getSpatialExtentQuery1(String schema, String table, String attributeName) { // escape single quote for table name: // TODO: do it for schema/user name ? - return String.format(this.spatialExtentQuery1, schema, - SpatialDatabasesSQLBuilder.escapeSingleQuote(table), attributeName); + return String.format(this.spatialExtentQuery1, schema, + SQLUtil.escapeSingleQuote(table), attributeName); } @Override @@ -71,16 +72,16 @@ public String getGeoColumnsQuery(String datasetName) { // escape single quote for table name: // TODO: do it for schema/user name ? - return String.format(this.geoColumnsQuery, getSchemaName(datasetName), - SpatialDatabasesSQLBuilder.escapeSingleQuote(getTableName(datasetName))); + return String.format(this.geoColumnsQuery, getSchemaName(datasetName), + SQLUtil.escapeSingleQuote(getTableName(datasetName))); } @Override public String getSridQuery(String schemaName, String tableName, String colName) { // escape single quote for table name: // TODO: do it for schema/user name ? - return String.format(this.sridQuery, schemaName, - SpatialDatabasesSQLBuilder.escapeSingleQuote(tableName), colName); + return String.format(this.sridQuery, schemaName, + SQLUtil.escapeSingleQuote(tableName), colName); } @Override Modified: core/trunk/src/com/vividsolutions/jump/datastore/postgis/PostgisDSConnection.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/postgis/PostgisDSConnection.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/postgis/PostgisDSConnection.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -8,9 +8,6 @@ import com.vividsolutions.jump.datastore.spatialdatabases.SpatialDatabasesSQLBuilder; import com.vividsolutions.jump.feature.FeatureSchema; import com.vividsolutions.jump.io.FeatureInputStream; -import com.vividsolutions.jump.workbench.WorkbenchContext; -import com.vividsolutions.jump.workbench.ui.ErrorDialog; -import com.vividsolutions.jump.workbench.ui.WorkbenchFrame; import java.sql.Connection; import java.sql.SQLException; @@ -21,8 +18,7 @@ public class PostgisDSConnection extends SpatialDatabasesDSConnection { public PostgisDSConnection(Connection con) { - super(con); // ? - connection = con; + super(con); this.dbMetadata = new PostgisDSMetadata(this); } @@ -71,7 +67,7 @@ PostgisFeatureInputStream ifs = new PostgisFeatureInputStream(connection, queryString, query.getPrimaryKey()); // Nicolas Ribot: getting FeatureSchema here actually runs the query: if an error occurs, must trap it here - FeatureSchema fs = null; + FeatureSchema fs; try { fs = ifs.getFeatureSchema(); } catch (Exception e) { @@ -87,5 +83,9 @@ return ifs; } + + public PostgisValueConverterFactory getValueConverterFactory() { + return new PostgisValueConverterFactory(connection); + } } Modified: core/trunk/src/com/vividsolutions/jump/datastore/postgis/PostgisDSMetadata.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/postgis/PostgisDSMetadata.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/postgis/PostgisDSMetadata.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -1,8 +1,11 @@ package com.vividsolutions.jump.datastore.postgis; +import com.vividsolutions.jump.datastore.SQLUtil; import com.vividsolutions.jump.datastore.spatialdatabases.*; import com.vividsolutions.jump.datastore.DataStoreConnection; import com.vividsolutions.jump.datastore.GeometryColumn; +import com.vividsolutions.jump.feature.AttributeType; + import java.util.List; public class PostgisDSMetadata extends SpatialDatabasesDSMetadata { @@ -18,15 +21,16 @@ spatialExtentQuery2 = "SELECT ST_AsBinary(ST_Envelope(ST_Extent(\"%s\"))) FROM \"%s\".\"%s\""; geoColumnsQuery = "SELECT f_geometry_column, srid, type FROM geometry_columns where f_table_schema='%s' and f_table_name = '%s'"; sridQuery = "SELECT srid FROM geometry_columns where f_table_schema = '%s' and f_table_name = '%s' and f_geometry_column = '%s'"; + coordDimQuery = "SELECT coord_dimension FROM geometry_columns where f_table_schema = '%s' and f_table_name = '%s' and f_geometry_column = '%s'"; } @Override public String getSpatialExtentQuery1(String schema, String table, String attributeName) { //must escape single quote in idenfifiers before formatting query return String.format(this.spatialExtentQuery1, - SpatialDatabasesSQLBuilder.escapeSingleQuote(schema), - SpatialDatabasesSQLBuilder.escapeSingleQuote(table), - SpatialDatabasesSQLBuilder.escapeSingleQuote(attributeName)); + SQLUtil.escapeSingleQuote(schema), + SQLUtil.escapeSingleQuote(table), + SQLUtil.escapeSingleQuote(attributeName)); } @Override @@ -37,18 +41,18 @@ @Override public String getGeoColumnsQuery(String datasetName) { //must escape single quote in idenfifiers before formatting query - return String.format(this.geoColumnsQuery, - SpatialDatabasesSQLBuilder.escapeSingleQuote(getSchemaName(datasetName)), - SpatialDatabasesSQLBuilder.escapeSingleQuote(getTableName(datasetName))); + return String.format(this.geoColumnsQuery, + SQLUtil.escapeSingleQuote(getSchemaName(datasetName)), + SQLUtil.escapeSingleQuote(getTableName(datasetName))); } @Override public String getSridQuery(String schemaName, String tableName, String colName) { //must escape single quote in idenfifiers before formatting query - return String.format(this.sridQuery, - SpatialDatabasesSQLBuilder.escapeSingleQuote(schemaName), - SpatialDatabasesSQLBuilder.escapeSingleQuote(tableName), - SpatialDatabasesSQLBuilder.escapeSingleQuote(colName)); + return String.format(this.sridQuery, + SQLUtil.escapeSingleQuote(schemaName), + SQLUtil.escapeSingleQuote(tableName), + SQLUtil.escapeSingleQuote(colName)); } @Override @@ -57,4 +61,43 @@ return getGeometryAttributes(sql, datasetName); } + @Override + public String getCoordinateDimensionQuery(String schemaName, String tableName, String colName) { + //must escape single quote in idenfifiers before formatting query + return String.format(this.coordDimQuery, + SQLUtil.escapeSingleQuote(schemaName), + SQLUtil.escapeSingleQuote(tableName), + SQLUtil.escapeSingleQuote(colName)); + } + + @Override + // Return postgresql/postgis data types + protected String getDbTypeName(AttributeType type) { + if (type == AttributeType.GEOMETRY) return "geometry"; + else if (type == AttributeType.STRING) return "varchar"; + else if (type == AttributeType.INTEGER) return "int4"; + else if (type == AttributeType.LONG) return "int8"; + else if (type == AttributeType.DOUBLE) return "float8"; + else if (type == AttributeType.DATE) return "timestamp"; + else if (type == AttributeType.BOOLEAN) return "bool"; + else if (type == AttributeType.OBJECT) return "bytea"; + else return "bytea"; + } + + /** + * Create statement to add a spatial index on the specified geometry column. + * The geometry column name must have its final form. Attribute name normalization + * is the responsability of the calling method. + * @param schemaName unquoted schema name or null if default schema is used + * @param tableName unquoted table name + * @param geometryColumn unquoted geometry column name + * @return a sql string to add a spatial index + */ + @Override + public String getAddSpatialIndexStatement(String schemaName, String tableName, String geometryColumn) { + return "CREATE INDEX \"" + + SQLUtil.compose(schemaName, tableName).replaceAll("\"","") + "_" + geometryColumn + "_idx\"\n" + + "ON " + SQLUtil.compose(schemaName, tableName) + " USING GIST ( \"" + geometryColumn + "\" );"; + } + } Modified: core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/AbstractSpatialDatabasesDSDriver.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/AbstractSpatialDatabasesDSDriver.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/AbstractSpatialDatabasesDSDriver.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -13,6 +13,7 @@ import com.vividsolutions.jump.parameter.ParameterList; import com.vividsolutions.jump.parameter.ParameterListSchema; import com.vividsolutions.jump.workbench.JUMPWorkbench; +import com.vividsolutions.jump.workbench.Logger; /** * A driver for supplying {@link SpatialDatabasesDSConnection}s @@ -225,6 +226,8 @@ if (password != null) { info.put("password", password); } + Logger.info("java.net.preferIPv4Stack=" + System.getProperty("java.net.preferIPv4Stack")); + Logger.info("java.net.preferIPv6Addresses="+System.getProperty("java.net.preferIPv6Addresses")); Connection conn = driver.connect(url, info); if (savePreferIPv4Stack == null) { Modified: core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesDSConnection.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesDSConnection.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesDSConnection.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -1,17 +1,16 @@ package com.vividsolutions.jump.datastore.spatialdatabases; import com.vividsolutions.jump.I18N; -import com.vividsolutions.jump.datastore.AdhocQuery; -import com.vividsolutions.jump.datastore.DataStoreConnection; -import com.vividsolutions.jump.datastore.DataStoreException; -import com.vividsolutions.jump.datastore.DataStoreMetadata; -import com.vividsolutions.jump.datastore.FilterQuery; -import com.vividsolutions.jump.datastore.Query; -import com.vividsolutions.jump.datastore.SpatialReferenceSystemID; +import com.vividsolutions.jump.datastore.*; +import com.vividsolutions.jump.feature.AttributeType; +import com.vividsolutions.jump.feature.FeatureSchema; import com.vividsolutions.jump.io.FeatureInputStream; -import com.vividsolutions.jump.workbench.JUMPWorkbench; import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; /** * Base class for all spatial databases DataStore connections. No need to @@ -35,7 +34,7 @@ } @Override - public DataStoreMetadata getMetadata() { + public SpatialDatabasesDSMetadata getMetadata() { return dbMetadata; } @@ -105,4 +104,28 @@ throw new DataStoreException(e); } } + + public SpatialDatabasesValueConverterFactory getValueConverterFactory() { + return new SpatialDatabasesValueConverterFactory(connection); + } + + public String[] getCompatibleSchemaSubset(String schemaName, String tableName, + FeatureSchema featureSchema, boolean normalizedColumnNames) throws Exception { + SpatialDatabasesValueConverterFactory factory = getValueConverterFactory(); + ResultSet rs = connection.createStatement().executeQuery("SELECT * FROM " + + SQLUtil.compose(schemaName, tableName) + " LIMIT 0"); + ResultSetMetaData rsMetaData = rs.getMetaData(); + List<String> commonAttributes = new ArrayList<String>(); + for (int i = 0 ; i < featureSchema.getAttributeCount() ; i++) { + String attribut = featureSchema.getAttributeName(i); + if (normalizedColumnNames) attribut = SQLUtil.normalize(attribut); + try { + AttributeType type = factory.getConverter(rsMetaData, rs.findColumn(attribut)).getType(); + if (type == featureSchema.getAttributeType(i)); + commonAttributes.add(featureSchema.getAttributeName(i)); + } catch(SQLException e) {} + } + return commonAttributes.toArray(new String[0]); + } + } Modified: core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesDSMetadata.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesDSMetadata.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesDSMetadata.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -4,13 +4,11 @@ import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.io.WKBReader; import com.vividsolutions.jts.io.WKTReader; -import com.vividsolutions.jump.datastore.DataStoreConnection; -import com.vividsolutions.jump.datastore.DataStoreMetadata; -import com.vividsolutions.jump.datastore.GeometryColumn; -import com.vividsolutions.jump.datastore.PrimaryKeyColumn; -import com.vividsolutions.jump.datastore.SpatialReferenceSystemID; +import com.vividsolutions.jump.datastore.*; import com.vividsolutions.jump.datastore.jdbc.JDBCUtil; import com.vividsolutions.jump.datastore.jdbc.ResultSetBlock; +import com.vividsolutions.jump.feature.AttributeType; +import com.vividsolutions.jump.feature.FeatureSchema; import com.vividsolutions.jump.workbench.JUMPWorkbench; import java.sql.*; @@ -37,18 +35,22 @@ * OGC WKB reader if needed: TODO: keep only in needed classes ? */ protected final WKBReader reader = new WKBReader(); + /** * OGC WKB reader if needed: TODO: keep only in needed classes ? */ protected final WKTReader txtReader = new WKTReader(); + /** * The dataStoreConnection to get MD from */ protected DataStoreConnection conn; + /** * The map of SRIDs found for these MD */ protected Map sridMap = new HashMap(); + /** * query to get list of spatial tables from the connection. Must return * following columns: distinct table_schema, table_name (if several geo @@ -57,23 +59,28 @@ * @param conn */ protected String datasetNameQuery = null; + /** * the name of the default schema */ protected String defaultSchemaName = null; + /** * The name of this SpatialDatabase */ protected String spatialDbName = null; + /** * The SQL query to get spatial extent. Must return following columns: a * geometric column representing the extent */ protected String spatialExtentQuery1 = null; + /** * The alternate SQL query to get spatial extent (for instance for postgis) */ protected String spatialExtentQuery2 = null; + /** * The SQL query to get list of geo columns. Must return column name (String), * srid(int) and type (string) (if spatial database does not store type in @@ -81,13 +88,19 @@ * 'SDO_GEOMETRY' as type from all_sdo_geom_metadata */ protected String geoColumnsQuery = null; + /** - * The SQL query to get a SRID for a given schema name, table name and geo - * column - * + * The SQL query to get a SRID for a given schema name, table name and + * geometry column */ protected String sridQuery = null; + /** + * The SQL query to get the coordinate dimension for a given schema name, + * table name and geometry column + */ + protected String coordDimQuery = null; + public SpatialDatabasesDSMetadata() { } @@ -137,6 +150,11 @@ return String.format(this.sridQuery, schemaName, tableName, colName); } + public String getCoordinateDimensionQuery(String schemaName, String tableName, String colName) { + // TODO + return String.format(this.coordDimQuery, schemaName, tableName, colName); + } + /** * Returns the schema name based on the given tableName: string before . if * exists, else returns schemaName @@ -451,9 +469,178 @@ return srid.toString(); } + + public int getCoordinateDimension(String datasetName, String colName) { + final StringBuffer coordDim = new StringBuffer(); + String sql = this.getCoordinateDimensionQuery(this.getSchemaName(datasetName), + this.getTableName(datasetName), colName); + JDBCUtil.execute(conn.getJdbcConnection(), sql, new ResultSetBlock() { + public void yield(ResultSet resultSet) throws SQLException { + if (resultSet.next()) { + // Nicolas Ribot: test if a null is returned + String s = resultSet.getString(1); + coordDim.append(s == null ? "0" : s); + } + } + }); + + return Integer.parseInt(coordDim.toString()); + } @Override public DataStoreConnection getDataStoreConnection() { return this.conn; } + + /** + * Returns the CREATE TABLE statement corresponding to this feature schema. + * The statement includes column names and data types, but neither geometry + * column nor primary key. + * @fSchema client feature schema + * @schemaName unquoted schema name or null to use default schema + * @tableName unquoted table name + * @param normalizeColumnNames whether column names must be normalized (lowercased + * and without special characters) or not + */ + public String getCreateTableStatement(FeatureSchema fSchema, + String schemaName, String tableName, boolean normalizeColumnNames) { + return "CREATE TABLE " + SQLUtil.compose(schemaName, tableName) + + " (" + createColumnList(fSchema, true, false, false, true, normalizeColumnNames) + ");"; + } + + /** + * Returns a comma-separated list of attributes included in schema. + * @param schema the FeatureSchema + * @param includeSQLDataType if true, each attribute name is immediately + * followed by its corresponding sql DataType + * @param includeGeometry if true, the geometry attribute is included + * @param includeExternalPK if true, the external primary key is included + * @param includeReadOnly if true, readOnly attributes are included + * @param normalizeColumnNames whether feature attribute names must be normalized + * (lower case without spacial characters) to specify + * table column names. + */ + public String createColumnList(FeatureSchema schema, + boolean includeSQLDataType, + boolean includeGeometry, + boolean includeExternalPK, + boolean includeReadOnly, + boolean normalizeColumnNames) { + StringBuilder sb = new StringBuilder(); + int count = 0; + for (int i = 0 ; i < schema.getAttributeCount() ; i++) { + AttributeType type = schema.getAttributeType(i); + if (type == AttributeType.GEOMETRY && !includeGeometry) continue; + if (!includeExternalPK && schema.getExternalPrimaryKeyIndex() == i) continue; + if (!includeReadOnly && schema.getExternalPrimaryKeyIndex()!=i && schema.isAttributeReadOnly(i)) continue; + String name = normalizeColumnNames ? + SQLUtil.normalize(schema.getAttributeName(i)) + :schema.getAttributeName(i); + if (0 < count++) sb.append(", "); + sb.append("\"").append(name).append("\""); + if (includeSQLDataType) sb.append(" ").append(getDbTypeName(type)); + } + return sb.toString(); + } + + /** + * Create statement to add a spatial index on the specified geometry column. + * The geometry column name must have its final form. Attribute name normalization + * is the responsability of the calling method. + * @param schemaName unquoted schema name or null if default schema is used + * @param tableName unquoted table name + * @param geometryColumn unquoted geometry column name + * @return a sql string to add a spatial index + */ + public String getAddSpatialIndexStatement(String schemaName, String tableName, String geometryColumn) { + // Geometry index creation is different on different spatial databases + // Do not add if it is not defined + return ";"; + } + + /** + * Creates the query String to add a GeometryColumn. + * <p>Note 1 : In PostGIS 2.x, srid=-1 is automatically converted to srid=0 by + * AddGeometryColumn function.</p> + * <p>Note 2 : To stay compatible with PostGIS 1.x, last argument of + * AddGeometryColumn is omitted. As a consequence, geometry type is inserted + * a the column type rather than a constraint (new default behaviour in 2.x)</p> + * <p>The geometry column name must have its final form. Attribute name normalization + * is the responsability of the calling method.</p> + */ + public String getAddGeometryColumnStatement(String schemaName, String tableName, + String geometryColumn, int srid, String geometryType, int dim) { + if (schemaName == null) { + return "SELECT AddGeometryColumn('" + tableName + "','" + + geometryColumn + "'," + + srid + ",'" + + geometryType.toUpperCase() + "'," + + dim + ");"; + } else { + return "SELECT AddGeometryColumn('" + schemaName + "','" + + tableName + "','" + + geometryColumn + "'," + + srid + ",'" + + geometryType.toUpperCase() + "'," + + dim + ");"; + } + } + + /** + * Return standard SQL data type for OpenJUMP AttributeType. + * This method must be overloaded by specific database oj2dbType + * @param type OpenJUMP attribute type + * @return + */ + protected String getDbTypeName(AttributeType type) { + if (type == AttributeType.GEOMETRY) return "varbinary"; + else if (type == AttributeType.STRING) return "varchar"; + else if (type == AttributeType.INTEGER) return "integer"; + else if (type == AttributeType.LONG) return "bigint"; + else if (type == AttributeType.DOUBLE) return "double precision"; + else if (type == AttributeType.NUMERIC) return "numeric"; + else if (type == AttributeType.DATE) return "timestamp"; + else if (type == AttributeType.BOOLEAN) return "boolean"; + else if (type == AttributeType.OBJECT) return "varbinary"; + else return "varchar"; + } + + /** + * Return the JDBC datatype from the native datatype. + * This method is implemented for PostgreSQL datatypes. It must be overloaded + * by specific database mapping. + * @param sqlType + * @return + */ + //protected int getJdbcTypeFromSQL(String sqlType) { + // if (sqlType.equals("character")) return Types.VARCHAR; + // else if (sqlType.equals("character varying")) return Types.VARCHAR; + // else if (sqlType.equals("text")) return Types.VARCHAR; + // else if (sqlType.equals("integer")) return Types.INTEGER; + // else if (sqlType.equals("bigint")) return Types.BIGINT; + // else if (sqlType.equals("bigserial")) return Types.BIGINT; + // else if (sqlType.equals("bit")) return Types.BIT; + // else if (sqlType.equals("boolean")) return Types.BOOLEAN; + // else if (sqlType.equals("date")) return Types.DATE; + // else if (sqlType.equals("decimal")) return Types.NUMERIC; + // else if (sqlType.equals("double")) return Types.DOUBLE; + // else if (sqlType.equals("double precision")) return Types.DOUBLE; + // else if (sqlType.equals("int4")) return Types.INTEGER; + // else if (sqlType.equals("int8")) return Types.BIGINT; + // else if (sqlType.equals("json")) return Types.VARCHAR; + // else if (sqlType.equals("numeric")) return Types.NUMERIC; + // else if (sqlType.equals("real")) return Types.REAL; + // else if (sqlType.equals("smallint")) return Types.SMALLINT; + // else if (sqlType.equals("serial")) return Types.BIGINT; + // else if (sqlType.equals("serial4")) return Types.INTEGER; + // else if (sqlType.equals("serial8")) return Types.BIGINT; + // else if (sqlType.equals("timestamp")) return Types.TIMESTAMP; + // else if (sqlType.equals("timestamp with time zone")) return Types.TIMESTAMP; + // else if (sqlType.equals("timestamp without time zone")) return Types.TIMESTAMP; + // else if (sqlType.equals("time")) return Types.TIME; + // else if (sqlType.equals("varchar")) return Types.VARCHAR; + // else return Types.JAVA_OBJECT; + //} +// + } Modified: core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesSQLBuilder.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesSQLBuilder.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/spatialdatabases/SpatialDatabasesSQLBuilder.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -1,5 +1,7 @@ package com.vividsolutions.jump.datastore.spatialdatabases; +import com.vividsolutions.jts.geom.Geometry; +import com.vividsolutions.jts.io.WKBWriter; import com.vividsolutions.jump.datastore.DataStoreLayer; import com.vividsolutions.jump.datastore.FilterQuery; import com.vividsolutions.jump.datastore.SpatialReferenceSystemID; @@ -24,7 +26,7 @@ /** * Builds a valid SQL spatial query with the given spatial filter. - * @param query + * @param query the filter query * @return a SQL query to get column names */ public String getSQL(FilterQuery query) { @@ -43,8 +45,8 @@ /** * Returns the string representing a SQL column definition. * Implementors should take care of column names (case, quotes) - * @param colNames - * @param geomColName + * @param colNames list of column names + * @param geomColName name of the geometry column * @return column list */ protected String getColumnListSpecifier(String[] colNames, String geomColName) { @@ -69,14 +71,6 @@ else return srid.getString(); } - - /** - * Utility method to escape single quotes in given identifier. - * Replace all single quotes ("'") by double single quotes ("''") - * @param identifier - * @return the identifier with single quotes escaped, or identifier if no string found - */ - public static String escapeSingleQuote(String identifier) { - return identifier == null ? null : identifier.replaceAll("'", "''"); - } + + } Modified: core/trunk/src/com/vividsolutions/jump/datastore/spatialite/SpatialiteDSConnection.java =================================================================== --- core/trunk/src/com/vividsolutions/jump/datastore/spatialite/SpatialiteDSConnection.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/com/vividsolutions/jump/datastore/spatialite/SpatialiteDSConnection.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -19,8 +19,7 @@ public class SpatialiteDSConnection extends SpatialDatabasesDSConnection { public SpatialiteDSConnection(Connection con) { - super(con); // ? - connection = con; + super(con); this.dbMetadata = new SpatialiteDSMetadata(this); } @@ -28,14 +27,12 @@ * Keeps a reference on SpatialiteDSMetadata into the SQL builder, to access * Spatialite preferences necessary in order to build proper queries according to * geometric type - * @param srid - * @param colNames - * @return + * @param srid srid of the dataset + * @param colNames list of column names */ @Override public SpatialDatabasesSQLBuilder getSqlBuilder(SpatialReferenceSystemID srid, String[] colNames) { - SpatialiteSQLBuilder ret = new SpatialiteSQLBuilder((SpatialiteDSMetadata)this.dbMetadata, srid, colNames); - return ret; + return new SpatialiteSQLBuilder((SpatialiteDSMetadata)this.dbMetadata, srid, colNames); } /** @@ -81,7 +78,7 @@ ifs.setMetadata((SpatialiteDSMetadata)dbMetadata); // Nicolas Ribot: getting FeatureSchema here actually runs the query: if an error occurs, must trap it here - FeatureSchema fs = null; + FeatureSchema fs; try { fs = ifs.getFeatureSchema(); } catch (Exception e) { @@ -98,5 +95,9 @@ return ifs; } + + public SpatialiteValueConverterFactory getValueConverterFactory() { + return new SpatialiteValueConverterFactory(connection); + } } Modified: core/trunk/src/org/openjump/core/ui/plugin/datastore/WritableDataStoreDataSource.java =================================================================== --- core/trunk/src/org/openjump/core/ui/plugin/datastore/WritableDataStoreDataSource.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/org/openjump/core/ui/plugin/datastore/WritableDataStoreDataSource.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -1,18 +1,15 @@ package org.openjump.core.ui.plugin.datastore; -import static org.openjump.core.ui.plugin.datastore.postgis.PostGISQueryUtil.compose; -import static org.openjump.core.ui.plugin.datastore.postgis.PostGISQueryUtil.getGeometryDimension; - -import java.sql.DatabaseMetaData; -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.sql.Timestamp; +import java.sql.*; import java.util.*; +import java.util.Date; import javax.swing.JOptionPane; -import org.openjump.core.ui.plugin.datastore.postgis.PostGISConnectionUtil; -import org.openjump.core.ui.plugin.datastore.postgis.PostGISQueryUtil; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jump.datastore.SQLUtil; +import com.vividsolutions.jump.datastore.spatialdatabases.SpatialDatabasesDSConnection; +import com.vividsolutions.jump.datastore.spatialdatabases.SpatialDatabasesSQLBuilder; import org.openjump.core.ui.plugin.datastore.transaction.DataStoreTransactionManager; import org.openjump.core.ui.plugin.datastore.transaction.Evolution; import org.openjump.core.ui.plugin.datastore.transaction.EvolutionOperationException; @@ -147,7 +144,7 @@ public FeatureCollection executeQuery(String query, Collection exceptions, TaskMonitor monitor) { try { - String[] datasetName = PostGISQueryUtil.splitTableName((String)getProperties().get(DATASET_NAME_KEY)); + String[] datasetName = SQLUtil.splitTableName((String) getProperties().get(DATASET_NAME_KEY)); schemaName = datasetName[0]; tableName = datasetName[1]; primaryKeyName = (String)getProperties().get(EXTERNAL_PK_KEY); @@ -179,7 +176,7 @@ boolean normalizedColumnNames = getProperties().containsKey(NORMALIZED_COLUMN_NAMES) ? (Boolean)getProperties().get(NORMALIZED_COLUMN_NAMES) : false; - String[] datasetName = PostGISQueryUtil.splitTableName((String)getProperties().get(DATASET_NAME_KEY)); + String[] datasetName = SQLUtil.splitTableName((String) getProperties().get(DATASET_NAME_KEY)); schemaName = datasetName[0]; tableName = datasetName[1]; String geometryColumn = (String)getProperties().get(WritableDataStoreDataSource.GEOMETRY_ATTRIBUTE_NAME_KEY); @@ -189,12 +186,12 @@ getGeometryDimension(featureCollection, 3) : (Integer)getProperties().get(GEOM_DIM_KEY); - PostgisDSConnection pgConnection = + SpatialDatabasesDSConnection conn = (PostgisDSConnection)new PostgisDataStoreDriver() .createConnection(connectionDescriptor.getParameterList()); - java.sql.Connection conn = pgConnection.getJdbcConnection(); + java.sql.Connection jdbcConn = conn.getJdbcConnection(); try { - conn.setAutoCommit(false); + jdbcConn.setAutoCommit(false); if (!tableAlreadyCreated) { Logger.debug("Update mode: create table"); boolean exists = tableExists(conn); @@ -216,7 +213,7 @@ // This instruction is not compatible with the transaction mode // ==> If PostGISFeatureInputStream#init() is made transactionnal // we must check that all calling methods do commit it. - conn.commit(); + jdbcConn.commit(); reloadDataFromDataStore(this, monitor); } tableAlreadyCreated = true; @@ -225,18 +222,17 @@ Logger.debug("Update mode: update table"); primaryKeyName = (String)getProperties().get(EXTERNAL_PK_KEY); FeatureSchema featureSchema = featureCollection.getFeatureSchema(); - PostGISConnectionUtil connUtil = new PostGISConnectionUtil(conn); - if (connUtil.compatibleSchemaSubset(schemaName, tableName, featureSchema, normalizedColumnNames) + if (conn.getCompatibleSchemaSubset(schemaName, tableName, featureSchema, normalizedColumnNames) .length < featureSchema.getAttributeCount()) { if (!confirmWriteDespiteDifferentSchemas()) return; } commit(conn, srid, dim, normalizedColumnNames); evolutions.clear(); } - conn.commit(); + jdbcConn.commit(); } finally { - if (conn != null) conn.setAutoCommit(true); + if (jdbcConn != null) jdbcConn.setAutoCommit(true); } // Adding vacuum analyze seems to be necessary to be able to use // ST_Estimated_Extent on the newly created table @@ -256,10 +252,10 @@ * Example : perform a vacuum analyze in PostgreSQL to compact database and to * update statistics (needed by ST_Estimated_Extent function) */ - public abstract void finalizeUpdate(java.sql.Connection conn) throws Exception; + public abstract void finalizeUpdate(SpatialDatabasesDSConnection conn) throws Exception; - private void commit(java.sql.Connection conn, + private void commit(SpatialDatabasesDSConnection conn, int srid, int dim, boolean normalizedColumnNames) throws Exception { Logger.info("Evolutions to commit to " + schemaName + "." + tableName + " (PK=" + primaryKeyName +")"); @@ -304,12 +300,12 @@ * @return a PreparedStatement * @throws SQLException */ - protected PreparedStatement insertStatement(java.sql.Connection conn, + protected PreparedStatement insertStatement(SpatialDatabasesDSConnection conn, FeatureSchema fSchema, boolean normalizedColumnNames) throws SQLException { - StringBuilder sb = new StringBuilder("INSERT INTO " + compose(schemaName, tableName) + "("); + StringBuilder sb = new StringBuilder("INSERT INTO " + SQLUtil.compose(schemaName, tableName) + "("); // create a column name list without datatypes, including geometry and excluding primary key - sb.append(PostGISQueryUtil.createColumnList(fSchema, false, true, false, false, normalizedColumnNames)) + sb.append(conn.getMetadata().createColumnList(fSchema, false, true, false, false, normalizedColumnNames)) .append(") VALUES("); //int nbValues = fSchema.getAttributeCount(); //if (primaryKeyName != null && fSchema.hasAttribute(primaryKeyName)) nbValues --; @@ -322,28 +318,28 @@ } sb.append(");"); Logger.trace(sb.toString()); - PreparedStatement pstmt = conn.prepareStatement(sb.toString()); + PreparedStatement pstmt = conn.getJdbcConnection().prepareStatement(sb.toString()); return pstmt; } - private PreparedStatement updateOneAttributeStatement(java.sql.Connection conn, + private PreparedStatement updateOneAttributeStatement(SpatialDatabasesDSConnection conn, Feature feature, int attribute, int srid, int dim) throws SQLException { FeatureSchema schema = feature.getSchema(); boolean quoted = schema.getAttributeType(schema.getExternalPrimaryKeyIndex()) == AttributeType.STRING; String quoteKey = quoted ? "'" : ""; - StringBuilder sb = new StringBuilder("UPDATE ").append(compose(schemaName, tableName)) + StringBuilder sb = new StringBuilder("UPDATE ").append(SQLUtil.compose(schemaName, tableName)) .append(" SET \"").append(schema.getAttributeName(attribute)).append("\" = ?") .append(" WHERE \"").append(primaryKeyName).append("\" = ") .append(quoteKey).append(feature.getAttribute(primaryKeyName)).append(quoteKey).append(";"); - PreparedStatement pstmt = conn.prepareStatement(sb.toString()); + PreparedStatement pstmt = conn.getJdbcConnection().prepareStatement(sb.toString()); AttributeType type = schema.getAttributeType(attribute); if (feature.getAttribute(attribute) == null) pstmt.setObject(1, null); else if (type == AttributeType.STRING) pstmt.setString(1, feature.getString(attribute)); else if (type == AttributeType.GEOMETRY) { - pstmt.setBytes(1, PostGISQueryUtil.getByteArrayFromGeometry((Geometry)feature.getAttribute(attribute), srid, dim)); + pstmt.setBytes(1, SQLUtil.getByteArrayFromGeometry((Geometry) feature.getAttribute(attribute), srid, dim)); } else if (type == AttributeType.INTEGER) pstmt.setInt(1, feature.getInteger(attribute)); else if (type == AttributeType.LONG) pstmt.setLong(1, (Long) feature.getAttribute(attribute)); @@ -356,8 +352,9 @@ return pstmt; } - private PreparedStatement deleteStatement(java.sql.Connection conn, Feature feature) throws SQLException { - PreparedStatement pstmt = conn.prepareStatement("DELETE FROM " + compose(schemaName, tableName) + " WHERE \"" + primaryKeyName + "\" = ?"); + private PreparedStatement deleteStatement(SpatialDatabasesDSConnection conn, Feature feature) throws SQLException { + PreparedStatement pstmt = conn.getJdbcConnection() + .prepareStatement("DELETE FROM " + SQLUtil.compose(schemaName, tableName) + " WHERE \"" + primaryKeyName + "\" = ?"); pstmt.setObject(1,feature.getAttribute(primaryKeyName)); Logger.debug(pstmt.toString()); return pstmt; @@ -380,7 +377,7 @@ if (feature.getAttribute(i) == null) pstmt.setObject(index++, null); else if (type == AttributeType.STRING) pstmt.setString(index++, feature.getString(i)); else if (type == AttributeType.GEOMETRY) { - pstmt.setBytes(index++, PostGISQueryUtil.getByteArrayFromGeometry((Geometry)feature.getAttribute(i), srid, dim)); + pstmt.setBytes(index++, SQLUtil.getByteArrayFromGeometry((Geometry)feature.getAttribute(i), srid, dim)); } else if (type == AttributeType.INTEGER) pstmt.setInt(index++, feature.getInteger(i)); else if (type == AttributeType.LONG) pstmt.setLong(index++, (Long) feature.getAttribute(i)); @@ -391,7 +388,8 @@ if (feature.getAttribute(i) instanceof Geometry) { // In our use case, other geometry attributes use the same srid as the main geometry // but always have dimension = 2. This use case does not fit all ! - pstmt.setBytes(index++, PostGISQueryUtil.getByteArrayFromGeometry((Geometry)feature.getAttribute(i), false, 2)); + int object_srid = ((Geometry)feature.getAttribute(i)).getSRID(); + pstmt.setBytes(index++, SQLUtil.getByteArrayFromGeometry((Geometry)feature.getAttribute(i), object_srid, 2)); } else pstmt.setObject(index++, feature.getAttribute(i)); } @@ -441,7 +439,7 @@ * To be used cautiously : this method is used by DataStoreTransactionManager to remove * an evolution when the newFeature of this evolution happens to be the same as the last * version updated from the server (false conflict). - * @param fid + * @param fid id of the feature to be removed in the evolution stack */ public void removeEvolution(int fid) { evolutions.remove(fid); @@ -486,8 +484,8 @@ /** * Check if this [schema.]table exists in this database. */ - private boolean tableExists(java.sql.Connection connection) throws SQLException { - DatabaseMetaData metadata = connection.getMetaData(); + private boolean tableExists(SpatialDatabasesDSConnection conn) throws SQLException { + DatabaseMetaData metadata = conn.getJdbcConnection().getMetaData(); return metadata.getTables(null, schemaName, tableName, new String[]{"TABLE"}).next(); } @@ -497,7 +495,7 @@ * @schemaName unquoted schema name * @tableName unquoted table name */ - abstract protected void deleteTableQuery(java.sql.Connection connection) throws SQLException; + abstract protected void deleteTableQuery(SpatialDatabasesDSConnection conn) throws SQLException; /** * Create and populate a table with features from a dataset. @@ -510,7 +508,7 @@ * @throws SQLException */ abstract protected void createAndPopulateTable( - java.sql.Connection conn, + SpatialDatabasesDSConnection conn, FeatureCollection fc, int srid, String geometryType, int dim, boolean normalizedColumnNames) throws SQLException; @@ -519,7 +517,7 @@ /** * Add an automatically named primary key constraint to the table. */ - protected abstract void addDBPrimaryKey(java.sql.Connection conn, String primaryKey) throws SQLException; + protected abstract void addDBPrimaryKey(SpatialDatabasesDSConnection conn, String primaryKey) throws SQLException; // @TODO Bad design : it should be possible to do this kind of post-processing @@ -543,4 +541,34 @@ } } + /** + * Return 3 if coll contains at least one 3d geometry, 2 if coll contains + * only 2d geometries and defaultDim if coll is empty. + */ + public static int getGeometryDimension(FeatureCollection coll, int defaultDim) { + if (coll.size() > 0) { + // will explore up to 1000 features regularly distributed in the dataset + // if none of these feature has dim = 3, return 2, else return 3 + int step = 1 + coll.size()/1000; + int count = 0; + for (Iterator it = coll.iterator() ; it.hasNext() ; ) { + if (count%step == 0 && + getGeometryDimension(((Feature)it.next()).getGeometry()) == 3) { + return 3; + } + count++; + } + return 2; + } else return defaultDim; + } + + + private static int getGeometryDimension(Geometry g) { + Coordinate[] cc = g.getCoordinates(); + for (Coordinate c : cc) { + if (!Double.isNaN(c.z)) return 3; + } + return 2; + } + } Deleted: core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISConnectionUtil.java =================================================================== --- core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISConnectionUtil.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISConnectionUtil.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -1,86 +0,0 @@ -package org.openjump.core.ui.plugin.datastore.postgis; - -import com.vividsolutions.jump.feature.AttributeType; -import com.vividsolutions.jump.feature.FeatureSchema; - -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.*; - -/** - * static methods to help formatting sql statements for PostGIS - */ -public class PostGISConnectionUtil { - - Connection connection; - - public PostGISConnectionUtil(Connection connection) { - this.connection = connection; - } - - /** - * Returns the geometry dimension defined in geometry_columns for this table. - */ - public int getGeometryDimension(String dbSchema, String dbTable, int defaultDim) { - try { - StringBuilder query = new StringBuilder("SELECT coord_dimension FROM geometry_columns WHERE "); - if (dbSchema != null) query.append("f_table_schema = '").append(dbSchema).append("' AND "); - query.append("f_table_name = '").append(dbTable).append("';"); - ResultSet rs = connection.createStatement().executeQuery(query.toString()); - if (rs.next()) return rs.getInt(1); - else return defaultDim; - } catch(SQLException sqle) { - return defaultDim; - } - } - - /** - * Returns the srid defined in geometry_columns for this table. - */ - public int getGeometrySrid(String dbSchema, String dbTable, int defaultSrid) { - try { - StringBuilder query = new StringBuilder("SELECT srid FROM geometry_columns WHERE "); - if (dbSchema != null) query.append("f_table_schema = '").append(dbSchema).append("' AND "); - query.append("f_table_name = '").append(dbTable).append("';"); - ResultSet rs = connection.createStatement().executeQuery(query.toString()); - if (rs.next()) return rs.getInt(1); - else return defaultSrid; - } catch(SQLException sqle) { - return defaultSrid; - } - } - - /** - * Returns a list of attributes compatible between postgis table and featureSchema. - */ - public String[] compatibleSchemaSubset(String schemaName, String tableName, - FeatureSchema featureSchema, boolean normalizedColumnNames) throws SQLException { - DatabaseMetaData metadata = connection.getMetaData(); - ResultSet rs = metadata.getColumns(null, schemaName, tableName, null); - // map database column names to cooresponding feature attribute types - Map<String,AttributeType> map = new HashMap<String,AttributeType>(); - while (rs.next()) { - String name = rs.getString("COLUMN_NAME"); - AttributeType type = PostGISQueryUtil.getAttributeType(rs.getInt("DATA_TYPE"), rs.getString("TYPE_NAME")); - // Only one attribute must use the AttributeType.GEOMETRY - if (type == AttributeType.GEOMETRY && featureSchema.getAttributeType(name) != AttributeType.GEOMETRY) { - map.put(name, AttributeType.OBJECT); - } - else map.put(name, type); - } - List<String> subset = new ArrayList<String>(); - for (int i = 0 ; i < featureSchema.getAttributeCount() ; i++) { - String attribute = normalizedColumnNames ? - PostGISQueryUtil.normalize(featureSchema.getAttributeName(i)) - :featureSchema.getAttributeName(i); - AttributeType type = featureSchema.getAttributeType(i); - if (map.containsKey(attribute) && (map.get(attribute)==type)) { - subset.add(attribute); - } - } - return subset.toArray(new String[subset.size()]); - } - -} \ No newline at end of file Deleted: core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISQueryUtil.java =================================================================== --- core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISQueryUtil.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISQueryUtil.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -1,378 +0,0 @@ -package org.openjump.core.ui.plugin.datastore.postgis; - -import com.vividsolutions.jts.geom.Coordinate; -import com.vividsolutions.jts.geom.Geometry; -import com.vividsolutions.jts.io.WKBWriter; -import com.vividsolutions.jump.coordsys.CoordinateSystem; -import com.vividsolutions.jump.feature.AttributeType; -import com.vividsolutions.jump.feature.Feature; -import com.vividsolutions.jump.feature.FeatureCollection; -import com.vividsolutions.jump.feature.FeatureSchema; - -import java.sql.Types; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; - -/** - * static methods to help formatting sql statements for PostGIS - * @see org.openjump.core.ui.plugin.datastore.postgis.PostGISConnectionUtil - * for tools relying on the current connection - */ -public class PostGISQueryUtil { - - private static final WKBWriter WRITER2D = new WKBWriter(2, false); - private static final WKBWriter WRITER3D = new WKBWriter(3, false); - private static final WKBWriter WRITER2D_SRID = new WKBWriter(2, true); - private static final WKBWriter WRITER3D_SRID = new WKBWriter(3, true); - - /** - * Returns a pair of strings containing unquoted schema and table names - * from a full table name. If the fullName contains only one part (table - * name), the returned array contains a null element at index 0<br> - * Examples :<br> - * <ul> - * <li>myschema.mytable -> [myschema, mytable]</li> - * <li>"MySchema"."MyTable" -> [MySchema, MyTable]</li> - * <li>MyTable -> [null, MyTable]</li> - * <li>2_table -> [null, 2_table]</li> - * </ul> - */ - public static String[] splitTableName(String fullName) { - - if (isQuoted(fullName)) { - return splitQuotedTableName(fullName); - } - int index = fullName.indexOf("."); - // no schema - if (index == -1) { - if (fullName.matches("(?i)^[A-Z_].*")) return new String[]{null, fullName}; - else return new String[]{null, "\"" + fullName + "\""}; - } - // schema + table name - else { - String dbSchema = fullName.substring(0, index); - String dbTable = fullName.substring(index+1, fullName.length()); - return new String[]{dbSchema, dbTable}; - } - } - - private static String[] splitQuotedTableName(String fullName) { - int index = fullName.indexOf("\".\""); - if (index > -1) { - return new String[]{ - unquote(fullName.substring(0, index)), - unquote(fullName.substring(index+1, fullName.length())) - }; - } - else return new String[]{null, unquote(fullName)}; - } - - private static boolean isQuoted(String s) { - return s.startsWith("\"") && s.endsWith("\""); - } - - /** - * Returns s if s is already quoted (with double-quotes), and a quoted - * version of s otherwise. Returns null if s is null. - */ - public static String quote(String s) { - if (s == null) return null; - if (isQuoted(s)) return s; - else return "\"" + s + "\""; - } - - /** - * Returns s without initial and final double quotes if any. - * Returns null if s is null. - */ - public static String unquote(String s) { - if (s == null) return null; - if (!isQuoted(s)) return s; - else return s.substring(1, s.length()-1); - } - - /** - * Compose concatenated quoted schema name and table name. - * @schemaName unquoted schema name - * @tableName unquoted table name - */ - public static String compose(String schemaName, String tableName) { - return schemaName == null ? - "\"" + tableName + "\"" : - "\"" + schemaName + "\".\"" + tableName + "\""; - } - - /** - * Returns the CREATE TABLE statement corresponding to this feature schema. - * The statement includes column names and data types, but neither geometry - * column nor primary key. - * @fSchema client feature schema - * @schemaName unquoted schema name or null to use default schema - * @tableName unquoted table name - * @param normalizeColumnNames whether column names must be normalized (lowercased - * and without special characters) or not - */ - public static String getCreateTableStatement(FeatureSchema fSchema, - String schemaName, String tableName, boolean normalizeColumnNames) { - return "CREATE TABLE " + compose(schemaName, tableName) + - " (" + createColumnList(fSchema, true, false, false, true, normalizeColumnNames) + ");"; - } - - - /** - * Create statement to add a spatial index on the specified geometry column. - * The geometry column name must have its final form. Attribute name normalization - * is the responsability of the calling method. - * @param schemaName unquoted schema name or null if default schema is used - * @param tableName unquoted table name - * @param geometryColumn unquoted geometry column name - * @return a sql string to add a spatial index - */ - public static String getAddSpatialIndexStatement(String schemaName, String tableName, - String geometryColumn) { - return "CREATE INDEX \"" + - compose(schemaName, tableName).replaceAll("\"","") + "_" + geometryColumn + "_idx\"\n" + - "ON " + compose(schemaName, tableName) + " USING GIST ( \"" + geometryColumn + "\" );"; - } - - /** - * Returns the comma-separated list of attributes included in schema. - * @param schema the FeatureSchema - * @param includeSQLDataType if true, each attribute name is immediately - * followed by its corresponding sql DataType - * @param includeGeometry if true, the geometry attribute is included - * @param includeExternalPK if true, the external primary key is included - * @param normalizeColumnNames whether feature attribute names must be normalized - * (lower case without spacial characters) to specify - * table column names. - */ - public static String createColumnList(FeatureSchema schema, - boolean includeSQLDataType, - boolean includeGeometry, - boolean includeExternalPK, - boolean includeReadOnly, - boolean normalizeColumnNames) { - StringBuilder sb = new StringBuilder(); - int count = 0; - for (int i = 0 ; i < schema.getAttributeCount() ; i++) { - AttributeType type = schema.getAttributeType(i); - if (type == AttributeType.GEOMETRY && !includeGeometry) continue; - if (!includeExternalPK && schema.getExternalPrimaryKeyIndex() == i) continue; - if (!includeReadOnly && schema.getExternalPrimaryKeyIndex()!=i && schema.isAttributeReadOnly(i)) continue; - String name = normalizeColumnNames ? - normalize(schema.getAttributeName(i)) - :schema.getAttributeName(i); - if (0 < count++) sb.append(", "); - sb.append("\"").append(name).append("\""); - if (includeSQLDataType) sb.append(" ").append(getSQLType(type)); - } - return sb.toString(); - } - - public static String escapeApostrophes(String value) { - return value.replaceAll("'", "''"); - } - - /** - * Returns the sql data type matching this OpenJUMP AttributeType - */ - public static String getSQLType(AttributeType type) { - if (type == AttributeType.STRING) return "varchar"; - if (type == AttributeType.INTEGER) return "integer"; - if (type == AttributeType.LONG) return "bigint"; - if (type == AttributeType.DOUBLE) return "double precision"; - if (type == AttributeType.BOOLEAN) return "boolean"; - if (type == AttributeType.DATE) return "timestamp"; - if (type == AttributeType.OBJECT) return "bytea"; - if (type == AttributeType.GEOMETRY) return "geometry"; - throw new IllegalArgumentException("" + type + " is an unknown AttributeType"); - } - - - /** - * Returns the OpenJUMP AttributeType matching this sql data type - */ - public static AttributeType getAttributeType(int sqlType, String sqlName) { - if (sqlType == Types.BIGINT) return AttributeType.LONG; - // PostGIS geometries are stored as OTHER (type=1111) not BINARY (type=-2) - if (sqlType == Types.BINARY && - sqlName.toLowerCase().equals("geometry")) return AttributeType.GEOMETRY; - else if (sqlType == Types.BINARY) return AttributeType.OBJECT; - if (sqlType == Types.BIT) return AttributeType.BOOLEAN; - if (sqlType == Types.BLOB) return AttributeType.OBJECT; - if (sqlType == Types.BOOLEAN) return AttributeType.BOOLEAN; - if (sqlType == Types.CHAR) return AttributeType.STRING; - if (sqlType == Types.CLOB) return AttributeType.STRING; - if (sqlType == Types.DATALINK) return AttributeType.OBJECT; - if (sqlType == Types.DATE) return AttributeType.DATE; - if (sqlType == Types.DECIMAL) return AttributeType.DOUBLE; - if (sqlType == Types.DISTINCT) return AttributeType.OBJECT; - if (sqlType == Types.DOUBLE) return AttributeType.DOUBLE; - if (sqlType == Types.FLOAT) return AttributeType.DOUBLE; - if (sqlType == Types.INTEGER) return AttributeType.INTEGER; - if (sqlType == Types.JAVA_OBJECT) return AttributeType.OBJECT; - if (sqlType == Types.LONGNVARCHAR) return AttributeType.STRING; - if (sqlType == Types.LONGVARBINARY) return AttributeType.OBJECT; - if (sqlType == Types.LONGVARCHAR) return AttributeType.STRING; - if (sqlType == Types.NCHAR) return AttributeType.STRING; - if (sqlType == Types.NCLOB) return AttributeType.STRING; - if (sqlType == Types.NULL) return AttributeType.OBJECT; - if (sqlType == Types.NUMERIC) return AttributeType.DOUBLE; - if (sqlType == Types.NVARCHAR) return AttributeType.STRING; - if (sqlType == Types.OTHER && - sqlName.toLowerCase().equals("geometry")) return AttributeType.GEOMETRY; - else if (sqlType == Types.OTHER) return AttributeType.OBJECT; - if (sqlType == Types.REAL) return AttributeType.DOUBLE; - if (sqlType == Types.REF) return AttributeType.OBJECT; - if (sqlType == Types.ROWID) return AttributeType.INTEGER; - if (sqlType == Types.SMALLINT) return AttributeType.INTEGER; - if (sqlType == Types.SQLXML) return AttributeType.STRING; - if (sqlType == Types.STRUCT) return AttributeType.OBJECT; - if (sqlType == Types.TIME) return AttributeType.DATE; - if (sqlType == Types.TIMESTAMP) return AttributeType.DATE; - if (sqlType == Types.TINYINT) return AttributeType.INTEGER; - if (sqlType == Types.VARBINARY) return AttributeType.OBJECT; - if (sqlType == Types.VARCHAR) return AttributeType.STRING; - throw new IllegalArgumentException("" + sqlType + " is an unknown SQLType"); - } - - /** - * Creates the query String to add a GeometryColumn. - * <p>Note 1 : In PostGIS 2.x, srid=-1 is automatically converted to srid=0 by - * AddGeometryColumn function.</p> - * <p>Note 2 : To stay compatible with PostGIS 1.x, last argument of - * AddGeometryColumn is omitted. As a consequence, geometry type is inserted - * a the column type rather than a constraint (new default behaviour in 2.x)</p> - * <p>The geometry column name must have its final form. Attribute name normalization - * is the responsability of the calling method.</p> - */ - public static String getAddGeometryColumnStatement(String schemaName, String tableName, - String geometryColumn, int srid, String geometryType, int dim) { - if (schemaName == null) { - return "SELECT AddGeometryColumn('" + tableName + "','" + - geometryColumn + "'," + - srid + ",'" + - geometryType.toUpperCase() + "'," + - dim + ");"; - } else { - return "SELECT AddGeometryColumn('" + schemaName + "','" + - tableName + "','" + - geometryColumn + "'," + - srid + ",'" + - geometryType.toUpperCase() + "'," + - dim + ");"; - } - } - - /** - * Converts the geometry into a byte array in EWKB format - * @param geom the geometry to convert to a byte array - * @param hasSrid whether the geometry srid has to be included in the byte array or not - * @param dimension geometry dimension (2 or 3) - * @return a byte array containing a EWKB representation of the geometry - */ - public static byte[] getByteArrayFromGeometry(Geometry geom, boolean hasSrid, int dimension) { - WKBWriter writer; - if (hasSrid) { - writer = dimension==3? WRITER3D_SRID : WRITER2D_SRID; - } - else writer = dimension==3? WRITER3D : WRITER2D; - return writer.write(geom); - } - - /** - * Converts the geometry into a byte array in EWKB format - * @param geom the geometry to convert to a byte array - * @param srid the srid of the geometry - * @param dimension geometry dimension (2 or 3) - * @return a byte array containing a EWKB representation of the geometry - */ - public static byte[] getByteArrayFromGeometry(Geometry geom, int srid, int dimension) { - WKBWriter writer; - geom.setSRID(srid); - writer = dimension==3? WRITER3D_SRID : WRITER2D_SRID; - return writer.write(geom); - } - - - /** - * Return 3 if coll contains at least one 3d geometry, 2 if coll contains - * only 2d geometries and defaultDim if coll is empty. - */ - public static int getGeometryDimension(FeatureCollection coll, int defaultDim) { - if (coll.size() > 0) { - // will explore up to 1000 features regularly distributed in the dataset - // if none of these feature has dim = 3, return 2, else return 3 - int step = 1 + coll.size()/1000; - int count = 0; - for (Iterator it = coll.iterator() ; it.hasNext() ; ) { - if (count%step == 0 && - getGeometryDimension(((Feature)it.next()).getGeometry()) == 3) { - return 3; - } - count++; - } - return 2; - } else return defaultDim; - } - - - private static int getGeometryDimension(Geometry g) { - Coordinate[] cc = g.getCoordinates(); - for (Coordinate c : cc) { - if (!Double.isNaN(c.z)) return 3; - } - return 2; - } - - - /** - * Get this FeatureCollection geometry type. - * Returns defaultType if coll is empty or if coll contains two geometries - * with different types. - */ - public static String getGeometryType(FeatureCollection coll, String defaultType) { - if (coll.size() > 0) { - Feature f = (Feature)coll.iterator().next(); - String firstGeometryType = f.getGeometry().getGeometryType(); - for (Iterator it = coll.iterator() ; it.hasNext() ; ) { - f = (Feature)it.next(); - if (!f.getGeometry().getGeometryType().equals(firstGeometryType)) { - return defaultType; - } - } - return firstGeometryType; - } - else return defaultType; - } - - public static int getSrid(FeatureCollection coll, int defaultSrid) { - CoordinateSystem cs = coll.getFeatureSchema().getCoordinateSystem(); - if (cs != null) { - try { - return cs.getEPSGCode(); - } - catch(UnsupportedOperationException e) { - return defaultSrid; - } - } else return defaultSrid; - } - - public static String normalize(String name) { - if (name == null) return null; - StringBuilder sb = new StringBuilder(name.length()); - for (int i = 0 ; i < name.length() ; i++) { - char c = name.charAt(i); - if(i==0) { - if (Character.isLetter(c) || c == '_') sb.append(Character.toLowerCase(c)); - else sb.append('_'); - } else { - if (Character.isLetterOrDigit(c) || c == '_') sb.append(Character.toLowerCase(c)); - else sb.append('_'); - } - } - return sb.toString(); - } - -} \ No newline at end of file Modified: core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISSaveDataSourceQueryChooser.java =================================================================== --- core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISSaveDataSourceQueryChooser.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/PostGISSaveDataSourceQueryChooser.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -17,6 +17,7 @@ package org.openjump.core.ui.plugin.datastore.postgis; import com.vividsolutions.jump.I18N; +import com.vividsolutions.jump.datastore.SQLUtil; import com.vividsolutions.jump.io.datasource.DataSourceQuery; import com.vividsolutions.jump.workbench.datasource.DataSourceQueryChooser; import com.vividsolutions.jump.workbench.model.Layer; @@ -138,11 +139,10 @@ Layer[] layers = context.getWorkbenchContext().getLayerNamePanel().getSelectedLayers(); if (layers.length == 1) { properties.put(SaveToPostGISDataSource.DATASET_NAME_KEY, layers[0].getName()); - //FeatureSchema schema = layers[0].getFeatureCollectionWrapper().getFeatureSchema(); - String[] schema_table = PostGISQueryUtil.splitTableName(panel.getTableName()); + String[] schema_table = SQLUtil.splitTableName(panel.getTableName()); properties.put(SaveToPostGISDataSource.SQL_QUERY_KEY, "SELECT * FROM " + - PostGISQueryUtil.compose(schema_table[0], schema_table[1]) + " LIMIT 100000"); + SQLUtil.compose(schema_table[0], schema_table[1]) + " LIMIT 100000"); // OpenJUMP has now a better support of Coordinate System at // FeatureCollection and FeatureSchema level, but this one is simple // and makes it easy to set the SRID the user want before an update Modified: core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/SaveToPostGISDataSource.java =================================================================== --- core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/SaveToPostGISDataSource.java 2016-02-27 17:28:40 UTC (rev 4831) +++ core/trunk/src/org/openjump/core/ui/plugin/datastore/postgis/SaveToPostGISDataSource.java 2016-02-27 21:27:02 UTC (rev 4832) @@ -1,9 +1,14 @@ package org.openjump.core.ui.plugin.datastore.postgis; +import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jump.I18N; +import com.vividsolutions.jump.datastore.SQLUtil; import com.vividsolutions.jump.datastore.postgis.PostgisDSConnection; import com.vividsolutions.jump.datastore.postgis.PostgisDataStoreDriver; +import com.vividsolutions.jump.datastore.spatialdatabases.SpatialDatabasesDSConnection; +import com.vividsolutions.jump.datastore.spatialdatabases.SpatialDatabasesDSMetadata; +import com.vividsolutions.jump.datastore.spatialdatabases.SpatialDatabasesSQLBuilder; import com.vividsolutions.jump.feature.AttributeType; import com.vividsolutions.jump.feature.Feature; import com.vividsolutions.jump.feature.FeatureCollection; @@ -21,13 +26,9 @@ import java.io.UnsupportedEncodingException; import java.math.BigInteger; import java.sql.*; -import java.util.ArrayList; -import java.util.Collection; +import java.util.*; import java.util.Date; -import java.util.Iterator; -import static org.openjump.core.ui.plugin.datastore.postgis.PostGISQueryUtil.*; - /** * Add Write capabilities to DataStoreQueryDataSource for PostGIS table. * <p>There is now a more poweful way to connect to a postgis table with @@ -134,9 +135,9 @@ // Get schema and table names String table = (String)getProperties().get(TABLE_KEY); - String[] dbSchemaTable = PostGISQueryUtil.splitTableName(table); - String schemaName = unquote(dbSchemaTable[0]); - String tableName = unquote(dbSchemaTable[1]); + String[] dbSchemaTable = SQLUtil.splitTableName(table); + String schemaName = SQLUtil.unquote(dbSchemaTable[0]); + String tableName = SQLUtil.unquote(dbSchemaTable[1]); boolean normalizedColumnNames = false; String primary_key = (String)getProperties().get(PRIMARY_KEY); @@ -148,22 +149,24 @@ // default dim value (may be overloaded by a value read from the database) int dim = getGeometryDimension(featureCollection, 3); - PostgisDSConnection pgConnection = + SpatialDatabasesDSConnection conn = (PostgisDSConnection)new PostgisDataStoreDriver() .createConnection(connectionDescriptor.getParameterList()); - java.sql.Connection conn = pgConnection.getJdbcConnection(); - PostGISConnectionUtil connUtil = new PostGISConnectionUtil(conn); + java.sql.Connection jdbcConn = conn.getJdbcConnection(); // For update operations, use the dimension defined in geometry_column if any + String geomName = featureCollection.getFeatureSchema() + .getAttributeName(featureCollection.getFeatureSchema().getGeometryIndex()); + if (normalizedColumnNames) geomName = geomName.toLowerCase(); if (!method.equals(SAVE_METHOD_CREATE)) { - dim = connUtil.getGeometryDimension(schemaName, tableName, dim); + dim = conn.getMetadata().getCoordinateDimension(table, geomName); } if (method.equals(SAVE_METHOD_CREATE)) { boolean exists = tableExists(conn, schemaName, tableName); if (exists && !confirmOverwrite()) return; try { - conn.setAutoCommit(false); + jdbcConn.setAutoCommit(false); if (exists) { deleteTableQuery(conn, schemaName, tableName); } @@ -172,31 +175,33 @@ if (createPrimaryKey) { addDBPrimaryKey(conn, schemaName, tableName, DEFAULT_PK_NAME); } - conn.commit(); - conn.setAutoCommit(true); + jdbcConn.commit(); + jdbcConn.setAutoCommit(true); if (createPrimaryKey) { reloadDataFromDataStore(this, connectionDescriptor, schemaName, tableName, DEFAULT_PK_NAME, monitor); } // Adding vacuum analyze seems to be necessary to be able to use // ST_Estimated_Extent on the newly created table - conn.createStatement().execute("VACUUM ANALYZE " + - PostGISQueryUtil.compose(schemaName, tableName)); + jdbcConn.createStatement().execute("VACUUM ANALYZE " + + SQLUtil.compose(schemaName, tableName)); } catch(SQLException e) { throw e; } } if (method.equals(SAVE_METHOD_REPLACE)) { try { - conn.setAutoCommit(false); + jdbcConn.setAutoCommit(false); FeatureSchema featureSchema = featureCollection.getFeatureSchema(); - if (connUtil.compatibleSchemaSubset(schemaName, tableName, featureSchema, normalizedColumnNames).length < featureSchema.getAttributeCount()) { + if (conn.getCompatibleSchemaSubset( + schemaName, tableName, featureSchema, normalizedColumnNames).length < + featureSchema.getAttributeCount()) { if (!confirmWriteDespiteDifferentSchemas()) return; } truncateTable(conn, schemaName, tableName); insertInTable(conn, featureCollection, schemaName, tableName, primary_key, - srid>0, dim, normalizedColumnNames); - conn.commit(); - conn.setAutoCommit(true); + srid, dim, normalizedColumnNames); + jdbcConn.commit(); + jdbcConn.setAutoCommit(true); if (featureSchema.getExternalPrimaryKeyIndex() > -1) { reloadDataFromDataStore(this, connectionDescriptor, schemaName, tableName, DEFAULT_PK_NAME, monitor); } @@ -206,18 +211,20 @@ } if (method.equals(SAVE_METHOD_INSERT)) { try { - conn.setAutoCommit(false); + jdbcConn.setAutoCommit(false); FeatureSchema featureSchema = featureCollection.getFeatureSchema(); if (primary_key != null) { featureSchema.setExternalPrimaryKeyIndex(featureSchema.getAttributeIndex(primary_key)); } - if (connUtil.compatibleSchemaSubset(schemaName, tableName, featureSchema, normalizedColumnNames).length < featureSchema.getAttributeCount()) { + if (conn.getCompatibleSchemaSubset( + schemaName, tableName, featureSchema, normalizedColumnNames).length < + featureSchema.getAttributeCount()) { if (!confirmWriteDespiteDifferentSchemas()) return; } insertInTable(conn, featureCollection, schemaName, tableName, primary_key, - srid>0, dim, normalizedColumnNames); - conn.commit(); - conn.setAutoCommit(true); + srid, dim, normalizedColumnNames); + jdbcConn.commit(); + jdbcConn.setAutoCommit(true); if (featureSchema.getExternalPrimaryKeyIndex() > -1) { reloadDataFromDataStore(this, connectionDescriptor, schemaName, tableName, DEFAULT_PK_NAME, monitor); } @@ -228,18 +235,20 @@ if (method.equals(SAVE_METHOD_UPDATE)) { try { // Makes delete previous table and create new table atomic - conn.setAutoCommit(false); + jdbcConn.setAutoCommit(false); FeatureSchema featureSchema = featureCollection.getFeatureSchema(); if (primary_key != null) { featureSchema.setExternalPrimaryKeyIndex(featureSchema.getAttributeIndex(primary_key)); } - if (connUtil.compatibleSchemaSubset(schemaName, tableName, featureSchema, normalizedColumnNames).length < featureSchema.getAttributeCount()) { + if (conn.getCompatibleSchemaSubset( + schemaName, tableName, featureSchema, normalizedColumnNames).length < + featureSchema.getAttributeCount()) { if (!confirmWriteDespiteDifferentSchemas()) return; } insertUpdateTable(conn, featureCollection, schemaName, tableName, primary_key, - srid>0, dim, normalizedColumnNames); - conn.commit(); - conn.setAutoCommit(true); + srid, dim, normalizedColumnNames); + jdbcConn.commit(); + jdbcConn.setAutoCommit(true); if (featureSchema.getExternalPrimaryKeyIndex() > -1) { reloadDataFromDataStore(this, connectionDescriptor, schemaName, tableName, DEFAULT_PK_NAME, monitor); } @@ -249,15 +258,17 @@ } if (method.equals(SAVE_METHOD_DELETE)) { try { - conn.setAutoCommit(false); + jdbcConn.setAutoCommit(false); FeatureSchema featureSchema = featureCollection.getFeatureSchema(); - if (connUtil.compatibleSchemaSubset(schemaName, tableName, featureSchema, normalizedColumnNames).length < featureSchema.getAttributeCount()) { + if (conn.getCompatibleSchemaSubset( + schemaName, tableName, featureSchema, normalizedColumnNames).length < + featureSchema.getAttributeCount()) { if (!confirmWriteDespiteDifferentSchemas()) return; } deleteNotExistingFeaturesFromTable(conn, featureCollection, schemaName, tableName, primary_key); insertUpdateTable(conn, featureCollection, schemaName, tableName, primary_key, - srid > 0, dim, normalizedColumnNames); - conn.commit(); + srid, dim, normalizedColumnNames); + jdbcConn.commit(); } catch(SQLException e) { throw e; } @@ -294,8 +305,8 @@ /** * Check if this [schema.]table exists in this database. */ - private boolean tableExists(java.sql.Connection connection, String dbSchema, String dbTable) throws SQLException { - DatabaseMetaData metadata = connection.getMetaData(); + private boolean tableExists(SpatialDatabasesDSConnection conn, String dbSchema, String dbTable) throws SQLException { + DatabaseMetaData metadata = conn.getJdbcConnection().getMetaData(); return metadata.getTables(null, dbSchema, dbTable, new String[]{"TABLE"}).next(); } @@ -306,28 +317,28 @@ * @schemaName unquoted schema name or null to use default schema * @tableName unquoted table name */ - private void deleteTableQuery(java.sql.Connection connection, + private void deleteTableQuery(SpatialDatabasesDSConnection conn, String schemaName, String tableName) throws SQLException { try { // Try to delete dbTable AND the corresponding rows in geometry_columns table if (schemaName == null) { - connection.createStatement().execute("SELECT DropGeometryTable( '" + + conn.getJdbcConnection().createStatement().execute("SELECT DropGeometryTable( '" + tableName + "' );"); } else { - connection.createStatement().execute("SELECT DropGeometryTable( '" + + conn.getJdbcConnection().createStatement().execute("SELECT DropGeometryTable( '" + schemaName + "' , '" + tableName + "' );"); } } catch(SQLException e) { // If DropGeometryTable failed, try a simple DROP TABLE statement - connection.createStatement().execute("DROP TABLE " + compose(schemaName, tableName) + ";"); + conn.getJdbcConnection().createStatement().execute("DROP TABLE " + SQLUtil.compose(schemaName, tableName) + ";"); } } - private void truncateTable(java.sql.Connection conn, String schemaName, String tableName) throws SQLException { - String tableQName = compose(schemaName, tableName); + private void truncateTable(SpatialDatabasesDSConnection conn, String schemaName, String tableName) throws SQLException { + String tableQName = SQLUtil.compose(schemaName, tableName); try { - conn.createStatement().execute("TRUNCATE TABLE " + tableQName); + conn.getJdbcConnection().createStatement().execute("TRUNCATE TABLE " + tableQName); } catch (SQLException sqle) { throw new SQLException("Error executing query: TRUNCATE TABLE " + tableQName, sqle); } @@ -335,31 +346,32 @@ private void createAndPopulateTable( - java.sql.Connection conn, + SpatialDatabasesDSConnection conn, FeatureCollection fc, String schemaName, String tableName, int srid, String geometryType, int dim, - boolean normalizeColumnNames) throws SQLException { + boolean normalizeColumnNames) throws Exception { FeatureSchema schema = fc.getFeatureSchema(); String geometryColumn = schema.getAttributeName(schema.getGeometryIndex()); + SpatialDatabasesDSMetadata metadata = conn.getMetadata(); try { - conn.createStatement() - .execute(PostGISQueryUtil - .getCreateTableStatement(fc.getFeatureSchema(), schemaName, tableName, false)); @@ Diff output truncated at 100000 characters. @@ ------------------------------------------------------------------------------ Site24x7 APM Insight: Get Deep Visibility into Application Performance APM + Mobile APM + RUM: Monitor 3 App instances at just $35/Month Monitor end-to-end web transactions and take corrective actions now Troubleshoot faster and improve end-user experience. Signup Now! http://pubads.g.doubleclick.net/gampad/clk?id=272487151&iu=/4140 _______________________________________________ Jump-pilot-devel mailing list Jump-pilot-devel@lists.sourceforge.net https://lists.sourceforge.net/lists/listinfo/jump-pilot-devel