http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleProcedureAction.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleProcedureAction.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleProcedureAction.java index 5e18d16..86e70b5 100644 --- a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleProcedureAction.java +++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleProcedureAction.java @@ -42,68 +42,56 @@ import org.apache.cayenne.query.ProcedureQuery; */ class OracleProcedureAction extends ProcedureAction { - OracleProcedureAction(ProcedureQuery query, DataNode dataNode) { - super(query, dataNode); - } + OracleProcedureAction(ProcedureQuery query, DataNode dataNode) { + super(query, dataNode); + } - /** - * Helper method that reads OUT parameters of a CallableStatement. - */ - @Override - protected void readProcedureOutParameters( - CallableStatement statement, - OperationObserver delegate) throws SQLException, Exception { + /** + * Helper method that reads OUT parameters of a CallableStatement. + */ + @Override + protected void readProcedureOutParameters(CallableStatement statement, OperationObserver delegate) + throws SQLException, Exception { - long t1 = System.currentTimeMillis(); + long t1 = System.currentTimeMillis(); - // build result row... - DataRow result = null; - List<ProcedureParameter> parameters = getProcedure().getCallParameters(); - for (int i = 0; i < parameters.size(); i++) { - ProcedureParameter parameter = parameters.get(i); + // build result row... + DataRow result = null; + List<ProcedureParameter> parameters = getProcedure().getCallParameters(); + for (int i = 0; i < parameters.size(); i++) { + ProcedureParameter parameter = parameters.get(i); - if (!parameter.isOutParam()) { - continue; - } + if (!parameter.isOutParam()) { + continue; + } - // ==== start Oracle-specific part - if (parameter.getType() == OracleAdapter.getOracleCursorType()) { - ResultSet rs = (ResultSet) statement.getObject(i + 1); + // ==== start Oracle-specific part + if (parameter.getType() == OracleAdapter.getOracleCursorType()) { - try { - RowDescriptor rsDescriptor = describeResultSet( - rs, - processedResultSets++); - readResultSet(rs, rsDescriptor, query, delegate); - } - finally { - try { - rs.close(); - } - catch (SQLException ex) { - } - } - } - // ==== end Oracle-specific part - else { - if (result == null) { - result = new DataRow(2); - } + try (ResultSet rs = (ResultSet) statement.getObject(i + 1);) { + RowDescriptor rsDescriptor = describeResultSet(rs, processedResultSets++); + readResultSet(rs, rsDescriptor, query, delegate); + } + } + // ==== end Oracle-specific part + else { + if (result == null) { + result = new DataRow(2); + } - ColumnDescriptor descriptor = new ColumnDescriptor(parameter); - ExtendedType type = dataNode.getAdapter().getExtendedTypes().getRegisteredType( - descriptor.getJavaClass()); - Object val = type.materializeObject(statement, i + 1, descriptor - .getJdbcType()); + ColumnDescriptor descriptor = new ColumnDescriptor(parameter); + ExtendedType type = dataNode.getAdapter().getExtendedTypes() + .getRegisteredType(descriptor.getJavaClass()); + Object val = type.materializeObject(statement, i + 1, descriptor.getJdbcType()); - result.put(descriptor.getDataRowKey(), val); - } - } + result.put(descriptor.getDataRowKey(), val); + } + } - if (result != null && !result.isEmpty()) { - // treat out parameters as a separate data row set - dataNode.getJdbcEventLogger().logSelectCount(1, System.currentTimeMillis() - t1); - delegate.nextRows(query, Collections.singletonList(result)); - } - } + if (result != null && !result.isEmpty()) { + // treat out parameters as a separate data row set + dataNode.getJdbcEventLogger().logSelectCount(1, System.currentTimeMillis() - t1); + delegate.nextRows(query, Collections.singletonList(result)); + } + } }
http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleSQLTemplateAction.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleSQLTemplateAction.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleSQLTemplateAction.java index a105834..9df9591 100644 --- a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleSQLTemplateAction.java +++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OracleSQLTemplateAction.java @@ -93,6 +93,7 @@ class OracleSQLTemplateAction extends SQLTemplateAction { private ResultSet delegate; + @Override public ResultSetMetaData getMetaData() throws SQLException { return new OracleResultSetMetadata(delegate.getMetaData()); } @@ -101,10 +102,12 @@ class OracleSQLTemplateAction extends SQLTemplateAction { this.delegate = delegate; } + @Override public boolean absolute(int row) throws SQLException { return delegate.absolute(row); } + @Override public void afterLast() throws SQLException { delegate.afterLast(); } http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/postgres/PostgresPkGenerator.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/postgres/PostgresPkGenerator.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/postgres/PostgresPkGenerator.java index 78287b9..429bc25 100644 --- a/cayenne-server/src/main/java/org/apache/cayenne/dba/postgres/PostgresPkGenerator.java +++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/postgres/PostgresPkGenerator.java @@ -39,90 +39,75 @@ import org.apache.cayenne.map.DbKeyGenerator; */ public class PostgresPkGenerator extends OraclePkGenerator { - protected PostgresPkGenerator(JdbcAdapter adapter) { - super(adapter); - } - - @Override - protected String createSequenceString(DbEntity ent) { - // note that PostgreSQL 7.4 and newer supports INCREMENT BY and START - // WITH - // however 7.3 doesn't like BY and WITH, so using older more neutral - // syntax - // that works with all tested versions. - return "CREATE SEQUENCE " + sequenceName(ent) + " INCREMENT " + pkCacheSize(ent) + " START " + pkStartValue; - } - - /** - * @since 3.0 - */ - @Override - protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception { - - DbKeyGenerator pkGenerator = entity.getPrimaryKeyGenerator(); - String pkGeneratingSequenceName; - if (pkGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(pkGenerator.getGeneratorType()) - && pkGenerator.getGeneratorName() != null) { - pkGeneratingSequenceName = pkGenerator.getGeneratorName(); - } else { - pkGeneratingSequenceName = sequenceName(entity); - } - - Connection con = node.getDataSource().getConnection(); - try { - Statement st = con.createStatement(); - try { - String sql = "SELECT nextval('" + pkGeneratingSequenceName + "')"; - adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST); - ResultSet rs = st.executeQuery(sql); - try { - // Object pk = null; - if (!rs.next()) { - throw new CayenneRuntimeException("Error generating pk for DbEntity " + entity.getName()); - } - return rs.getLong(1); - } finally { - rs.close(); - } - } finally { - st.close(); - } - } finally { - con.close(); - } - - } - - /** - * Fetches a list of existing sequences that might match Cayenne generated - * ones. - */ - @Override - protected List<String> getExistingSequences(DataNode node) throws SQLException { - - // check existing sequences - Connection con = node.getDataSource().getConnection(); - - try { - Statement sel = con.createStatement(); - try { - String sql = "SELECT relname FROM pg_class WHERE relkind='S'"; - adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST); - ResultSet rs = sel.executeQuery(sql); - try { - List<String> sequenceList = new ArrayList<String>(); - while (rs.next()) { - sequenceList.add(rs.getString(1)); - } - return sequenceList; - } finally { - rs.close(); - } - } finally { - sel.close(); - } - } finally { - con.close(); - } - } + protected PostgresPkGenerator(JdbcAdapter adapter) { + super(adapter); + } + + @Override + protected String createSequenceString(DbEntity ent) { + // note that PostgreSQL 7.4 and newer supports INCREMENT BY and START + // WITH + // however 7.3 doesn't like BY and WITH, so using older more neutral + // syntax + // that works with all tested versions. + return "CREATE SEQUENCE " + sequenceName(ent) + " INCREMENT " + pkCacheSize(ent) + " START " + pkStartValue; + } + + /** + * @since 3.0 + */ + @Override + protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception { + + DbKeyGenerator pkGenerator = entity.getPrimaryKeyGenerator(); + String pkGeneratingSequenceName; + if (pkGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(pkGenerator.getGeneratorType()) + && pkGenerator.getGeneratorName() != null) { + pkGeneratingSequenceName = pkGenerator.getGeneratorName(); + } else { + pkGeneratingSequenceName = sequenceName(entity); + } + + try (Connection con = node.getDataSource().getConnection();) { + + try (Statement st = con.createStatement();) { + String sql = "SELECT nextval('" + pkGeneratingSequenceName + "')"; + adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST); + + try (ResultSet rs = st.executeQuery(sql);) { + // Object pk = null; + if (!rs.next()) { + throw new CayenneRuntimeException("Error generating pk for DbEntity " + entity.getName()); + } + return rs.getLong(1); + } + } + } + } + + /** + * Fetches a list of existing sequences that might match Cayenne generated + * ones. + */ + @Override + protected List<String> getExistingSequences(DataNode node) throws SQLException { + + // check existing sequences + + try (Connection con = node.getDataSource().getConnection();) { + + try (Statement sel = con.createStatement();) { + String sql = "SELECT relname FROM pg_class WHERE relkind='S'"; + adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST); + + try (ResultSet rs = sel.executeQuery(sql);) { + List<String> sequenceList = new ArrayList<String>(); + while (rs.next()) { + sequenceList.add(rs.getString(1)); + } + return sequenceList; + } + } + } + } } http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerBatchAction.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerBatchAction.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerBatchAction.java index 4f89e05..bc757ef 100644 --- a/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerBatchAction.java +++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerBatchAction.java @@ -36,79 +36,68 @@ import org.apache.cayenne.query.InsertBatchQuery; */ public class SQLServerBatchAction extends BatchAction { - public SQLServerBatchAction(BatchQuery batchQuery, DataNode dataNode, boolean runningAsBatch) { - super(batchQuery, dataNode, runningAsBatch); - } - - @Override - public void performAction(Connection connection, OperationObserver observer) - throws SQLException, Exception { - - // this condition checks if identity columns are present in the query and adapter - // is not ready to process them... e.g. if we are using a MS driver... - boolean identityOverride = expectsToOverrideIdentityColumns(); - if (identityOverride) { - setIdentityInsert(connection, true); - } - - try { - super.performAction(connection, observer); - } - finally { - - // important: turn off IDENTITY_INSERT as SQL Server won't be able to process - // other identity columns in the same transaction - - // TODO: if an error happens here this would mask the parent error - if (identityOverride) { - setIdentityInsert(connection, false); - } - } - } - - protected void setIdentityInsert(Connection connection, boolean on) - throws SQLException { - - String flag = on ? " ON" : " OFF"; - String configSQL = "SET IDENTITY_INSERT " - + query.getDbEntity().getFullyQualifiedName() - + flag; - - dataNode.getJdbcEventLogger().logQuery(configSQL, Collections.EMPTY_LIST); - - Statement statement = connection.createStatement(); - try { - statement.execute(configSQL); - } - finally { - try { - statement.close(); - } - catch (Exception e) { - } - } - } - - /** - * Returns whether a table has identity columns. - */ - protected boolean expectsToOverrideIdentityColumns() { - // jTDS driver supports identity columns, no need for tricks... - if (dataNode.getAdapter().supportsGeneratedKeys()) { - return false; - } - - if (!(query instanceof InsertBatchQuery) || query.getDbEntity() == null) { - return false; - } - - // find identity attributes - for (DbAttribute attribute : query.getDbEntity().getAttributes()) { - if (attribute.isGenerated()) { - return true; - } - } - - return false; - } + public SQLServerBatchAction(BatchQuery batchQuery, DataNode dataNode, boolean runningAsBatch) { + super(batchQuery, dataNode, runningAsBatch); + } + + @Override + public void performAction(Connection connection, OperationObserver observer) throws SQLException, Exception { + + // this condition checks if identity columns are present in the query + // and adapter + // is not ready to process them... e.g. if we are using a MS driver... + boolean identityOverride = expectsToOverrideIdentityColumns(); + if (identityOverride) { + setIdentityInsert(connection, true); + } + + try { + super.performAction(connection, observer); + } finally { + + // important: turn off IDENTITY_INSERT as SQL Server won't be able + // to process + // other identity columns in the same transaction + + // TODO: if an error happens here this would mask the parent error + if (identityOverride) { + setIdentityInsert(connection, false); + } + } + } + + protected void setIdentityInsert(Connection connection, boolean on) throws SQLException { + + String flag = on ? " ON" : " OFF"; + String configSQL = "SET IDENTITY_INSERT " + query.getDbEntity().getFullyQualifiedName() + flag; + + dataNode.getJdbcEventLogger().logQuery(configSQL, Collections.EMPTY_LIST); + + try (Statement statement = connection.createStatement();) { + statement.execute(configSQL); + } + } + + /** + * Returns whether a table has identity columns. + */ + protected boolean expectsToOverrideIdentityColumns() { + // jTDS driver supports identity columns, no need for tricks... + if (dataNode.getAdapter().supportsGeneratedKeys()) { + return false; + } + + if (!(query instanceof InsertBatchQuery) || query.getDbEntity() == null) { + return false; + } + + // find identity attributes + for (DbAttribute attribute : query.getDbEntity().getAttributes()) { + if (attribute.isGenerated()) { + return true; + } + } + + return false; + } } http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerProcedureAction.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerProcedureAction.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerProcedureAction.java index c0afc25..256f68d 100644 --- a/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerProcedureAction.java +++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/sqlserver/SQLServerProcedureAction.java @@ -37,9 +37,9 @@ import org.apache.cayenne.query.ProcedureQuery; import org.apache.cayenne.query.Query; /** - * ProcedureAction for SQLServer MS JDBC driver. Customizes OUT parameter processing - it - * has to be done AFTER the ResultSets are read (note that jTDS driver works fine with - * normal ProcedureAction). + * ProcedureAction for SQLServer MS JDBC driver. Customizes OUT parameter + * processing - it has to be done AFTER the ResultSets are read (note that jTDS + * driver works fine with normal ProcedureAction). * <p> * <i>See JIRA CAY-251 for details. </i> * </p> @@ -48,157 +48,141 @@ import org.apache.cayenne.query.Query; */ public class SQLServerProcedureAction extends ProcedureAction { - /** - * @since 4.0 - */ - public SQLServerProcedureAction(ProcedureQuery query, DataNode dataNode) { - super(query, dataNode); - } - - @Override - public void performAction(Connection connection, OperationObserver observer) - throws SQLException, Exception { - - ProcedureTranslator transl = createTranslator(connection); - CallableStatement statement = (CallableStatement) transl.createStatement(); - - try { - // stored procedure may contain a mixture of update counts and result sets, - // and out parameters. Read out parameters first, then - // iterate until we exhaust all results - boolean hasResultSet = statement.execute(); - - // local observer to cache results and provide them to the external observer - // in the order consistent with other adapters. - - Observer localObserver = new Observer(observer); - - // read query, using local observer - - while (true) { - if (hasResultSet) { - ResultSet rs = statement.getResultSet(); - try { - RowDescriptor descriptor = describeResultSet( - rs, - processedResultSets++); - readResultSet(rs, descriptor, query, localObserver); - } - finally { - try { - rs.close(); - } - catch (SQLException ex) { - } - } - } - else { - int updateCount = statement.getUpdateCount(); - if (updateCount == -1) { - break; - } - dataNode.getJdbcEventLogger().logUpdateCount(updateCount); - localObserver.nextCount(query, updateCount); - } - - hasResultSet = statement.getMoreResults(); - } - - // read out parameters to the main observer ... AFTER the main result set - // TODO: I hope SQLServer does not support ResultSets as OUT parameters, - // otherwise - // the order of custom result descriptors will be messed up - readProcedureOutParameters(statement, observer); - - // add results back to main observer - localObserver.flushResults(query); - } - finally { - try { - statement.close(); - } - catch (SQLException ex) { - - } - } - } - - class Observer implements OperationObserver { - - List<List<?>> results; - List<Integer> counts; - OperationObserver observer; - - Observer(OperationObserver observer) { - this.observer = observer; - } - - void flushResults(Query query) { - if (results != null) { - for (List<?> result : results) { - observer.nextRows(query, result); - } - results = null; - } - - if (counts != null) { - for (Integer count : counts) { - observer.nextCount(query, count); - } - counts = null; - } - } - - @Override - public void nextBatchCount(Query query, int[] resultCount) { - observer.nextBatchCount(query, resultCount); - } - - @Override - public void nextCount(Query query, int resultCount) { - // does not delegate to wrapped observer - // but instead caches results locally. - if (counts == null) { - counts = new ArrayList<Integer>(); - } - - counts.add(Integer.valueOf(resultCount)); - } - - @Override - public void nextRows(Query query, List<?> dataRows) { - // does not delegate to wrapped observer - // but instead caches results locally. - if (results == null) { - results = new ArrayList<List<?>>(); - } - - results.add(dataRows); - } - - @Override - public void nextRows(Query q, ResultIterator it) { - observer.nextRows(q, it); - } - - @Override - public void nextGlobalException(Exception ex) { - observer.nextGlobalException(ex); - } - - @Override - public void nextGeneratedRows(Query query, ResultIterator keys, ObjectId idToUpdate) { - observer.nextGeneratedRows(query, keys, idToUpdate); - } - - @Override - public void nextQueryException(Query query, Exception ex) { - observer.nextQueryException(query, ex); - } - - @Override - public boolean isIteratedResult() { - return observer.isIteratedResult(); - } - } + /** + * @since 4.0 + */ + public SQLServerProcedureAction(ProcedureQuery query, DataNode dataNode) { + super(query, dataNode); + } + + @Override + public void performAction(Connection connection, OperationObserver observer) throws SQLException, Exception { + + ProcedureTranslator transl = createTranslator(connection); + + try (CallableStatement statement = (CallableStatement) transl.createStatement();) { + // stored procedure may contain a mixture of update counts and + // result sets, + // and out parameters. Read out parameters first, then + // iterate until we exhaust all results + boolean hasResultSet = statement.execute(); + + // local observer to cache results and provide them to the external + // observer + // in the order consistent with other adapters. + + Observer localObserver = new Observer(observer); + + // read query, using local observer + + while (true) { + if (hasResultSet) { + + try (ResultSet rs = statement.getResultSet();) { + RowDescriptor descriptor = describeResultSet(rs, processedResultSets++); + readResultSet(rs, descriptor, query, localObserver); + } + } else { + int updateCount = statement.getUpdateCount(); + if (updateCount == -1) { + break; + } + dataNode.getJdbcEventLogger().logUpdateCount(updateCount); + localObserver.nextCount(query, updateCount); + } + + hasResultSet = statement.getMoreResults(); + } + + // read out parameters to the main observer ... AFTER the main + // result set + // TODO: I hope SQLServer does not support ResultSets as OUT + // parameters, + // otherwise + // the order of custom result descriptors will be messed up + readProcedureOutParameters(statement, observer); + + // add results back to main observer + localObserver.flushResults(query); + } + } + + class Observer implements OperationObserver { + + List<List<?>> results; + List<Integer> counts; + OperationObserver observer; + + Observer(OperationObserver observer) { + this.observer = observer; + } + + void flushResults(Query query) { + if (results != null) { + for (List<?> result : results) { + observer.nextRows(query, result); + } + results = null; + } + + if (counts != null) { + for (Integer count : counts) { + observer.nextCount(query, count); + } + counts = null; + } + } + + @Override + public void nextBatchCount(Query query, int[] resultCount) { + observer.nextBatchCount(query, resultCount); + } + + @Override + public void nextCount(Query query, int resultCount) { + // does not delegate to wrapped observer + // but instead caches results locally. + if (counts == null) { + counts = new ArrayList<Integer>(); + } + + counts.add(Integer.valueOf(resultCount)); + } + + @Override + public void nextRows(Query query, List<?> dataRows) { + // does not delegate to wrapped observer + // but instead caches results locally. + if (results == null) { + results = new ArrayList<List<?>>(); + } + + results.add(dataRows); + } + + @Override + public void nextRows(Query q, ResultIterator it) { + observer.nextRows(q, it); + } + + @Override + public void nextGlobalException(Exception ex) { + observer.nextGlobalException(ex); + } + + @Override + public void nextGeneratedRows(Query query, ResultIterator keys, ObjectId idToUpdate) { + observer.nextGeneratedRows(query, keys, idToUpdate); + } + + @Override + public void nextQueryException(Query query, Exception ex) { + observer.nextQueryException(query, ex); + } + + @Override + public boolean isIteratedResult() { + return observer.isIteratedResult(); + } + } } http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/sybase/SybasePkGenerator.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/sybase/SybasePkGenerator.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/sybase/SybasePkGenerator.java index 65552be..6e73561 100644 --- a/cayenne-server/src/main/java/org/apache/cayenne/dba/sybase/SybasePkGenerator.java +++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/sybase/SybasePkGenerator.java @@ -35,198 +35,184 @@ import org.apache.cayenne.tx.Transaction; /** * Primary key generator implementation for Sybase. Uses a lookup table named - * "AUTO_PK_SUPPORT" and a stored procedure "auto_pk_for_table" to search and increment - * primary keys for tables. + * "AUTO_PK_SUPPORT" and a stored procedure "auto_pk_for_table" to search and + * increment primary keys for tables. */ public class SybasePkGenerator extends JdbcPkGenerator { - protected SybasePkGenerator(JdbcAdapter adapter) { - super(adapter); - } - - @Override - protected String pkTableCreateString() { - return "CREATE TABLE AUTO_PK_SUPPORT ( TABLE_NAME CHAR(100) NOT NULL, NEXT_ID DECIMAL(19,0) NOT NULL, PRIMARY KEY(TABLE_NAME))"; - } - - /** - * Generates database objects to provide automatic primary key support. Method will - * execute the following SQL statements: - * <p> - * 1. Executed only if a corresponding table does not exist in the database. - * </p> - * - * <pre> - * CREATE TABLE AUTO_PK_SUPPORT ( - * TABLE_NAME VARCHAR(32) NOT NULL, - * NEXT_ID DECIMAL(19,0) NOT NULL - * ) - * </pre> - * <p> - * 2. Executed under any circumstances. - * </p> - * - * <pre> - * if exists (SELECT * FROM sysobjects WHERE name = 'auto_pk_for_table') - * BEGIN - * DROP PROCEDURE auto_pk_for_table - * END - * </pre> - * <p> - * 3. Executed under any circumstances. - * </p> - * CREATE PROCEDURE auto_pk_for_table - * - * <pre> - * @tname VARCHAR(32), - * @pkbatchsize INT AS BEGIN BEGIN TRANSACTION UPDATE AUTO_PK_SUPPORT set NEXT_ID = - * NEXT_ID + - * @pkbatchsize WHERE TABLE_NAME = - * @tname SELECT NEXT_ID from AUTO_PK_SUPPORT where NEXT_ID = - * @tname COMMIT END - * </pre> - * - * @param node node that provides access to a DataSource. - */ - @Override - public void createAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception { - super.createAutoPk(node, dbEntities); - super.runUpdate(node, safePkProcDrop()); - super.runUpdate(node, unsafePkProcCreate()); - } - - @Override - public List<String> createAutoPkStatements(List<DbEntity> dbEntities) { - List<String> list = super.createAutoPkStatements(dbEntities); - - // add stored procedure drop code - list.add(safePkProcDrop()); - - // add stored procedure creation code - list.add(unsafePkProcCreate()); - - return list; - } - - /** - * Drops database objects related to automatic primary key support. Method will - * execute the following SQL statements: - * - * <pre> - * if exists (SELECT * FROM sysobjects WHERE name = 'AUTO_PK_SUPPORT') - * BEGIN - * DROP TABLE AUTO_PK_SUPPORT - * END - * - * - * if exists (SELECT * FROM sysobjects WHERE name = 'auto_pk_for_table') - * BEGIN - * DROP PROCEDURE auto_pk_for_table - * END - * </pre> - * - * @param node node that provides access to a DataSource. - */ - @Override - public void dropAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception { - super.runUpdate(node, safePkProcDrop()); - super.runUpdate(node, safePkTableDrop()); - } - - @Override - public List<String> dropAutoPkStatements(List<DbEntity> dbEntities) { - List<String> list = new ArrayList<String>(); - list.add(safePkProcDrop()); - list.add(safePkTableDrop()); - return list; - } - - /** - * @since 3.0 - */ - @Override - protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception { - // handle CAY-588 - get connection that is separate from the connection in the - // current transaction. - - // TODO (andrus, 7/6/2006) Note that this will still work in a pool with a single - // connection, as PK generator is invoked early in the transaction, before the - // connection is grabbed for commit... So maybe promote this to other adapters in - // 3.0? - - Transaction transaction = BaseTransaction.getThreadTransaction(); - BaseTransaction.bindThreadTransaction(null); - - try { - - Connection connection = node.getDataSource().getConnection(); - try { - CallableStatement statement = connection - .prepareCall("{call auto_pk_for_table(?, ?)}"); - try { - statement.setString(1, entity.getName()); - statement.setInt(2, super.getPkCacheSize()); - - // can't use "executeQuery" - // per http://jtds.sourceforge.net/faq.html#expectingResultSet - statement.execute(); - if (statement.getMoreResults()) { - ResultSet rs = statement.getResultSet(); - - try { - if (rs.next()) { - return rs.getLong(1); - } - else { - throw new CayenneRuntimeException( - "Error generating pk for DbEntity " - + entity.getName()); - } - } - finally { - rs.close(); - } - } - else { - throw new CayenneRuntimeException( - "Error generating pk for DbEntity " - + entity.getName() - + ", no result set from stored procedure."); - } - } - finally { - statement.close(); - } - } - finally { - connection.close(); - } - } - finally { - BaseTransaction.bindThreadTransaction(transaction); - } - } - - private String safePkTableDrop() { - StringBuilder buf = new StringBuilder(); - buf - .append( - "if exists (SELECT * FROM sysobjects WHERE name = 'AUTO_PK_SUPPORT')") - .append(" BEGIN ") - .append(" DROP TABLE AUTO_PK_SUPPORT") - .append(" END"); - - return buf.toString(); - } - - private String unsafePkProcCreate() { - return " CREATE PROCEDURE auto_pk_for_table @tname VARCHAR(32), @pkbatchsize INT AS BEGIN BEGIN TRANSACTION" - + " UPDATE AUTO_PK_SUPPORT set NEXT_ID = NEXT_ID + @pkbatchsize WHERE TABLE_NAME = @tname" - + " SELECT NEXT_ID FROM AUTO_PK_SUPPORT WHERE TABLE_NAME = @tname COMMIT END"; - } - - private String safePkProcDrop() { - return "if exists (SELECT * FROM sysobjects WHERE name = 'auto_pk_for_table') BEGIN DROP PROCEDURE auto_pk_for_table END"; - } + protected SybasePkGenerator(JdbcAdapter adapter) { + super(adapter); + } + + @Override + protected String pkTableCreateString() { + return "CREATE TABLE AUTO_PK_SUPPORT ( TABLE_NAME CHAR(100) NOT NULL, NEXT_ID DECIMAL(19,0) NOT NULL, PRIMARY KEY(TABLE_NAME))"; + } + + /** + * Generates database objects to provide automatic primary key support. + * Method will execute the following SQL statements: + * <p> + * 1. Executed only if a corresponding table does not exist in the database. + * </p> + * + * <pre> + * CREATE TABLE AUTO_PK_SUPPORT ( + * TABLE_NAME VARCHAR(32) NOT NULL, + * NEXT_ID DECIMAL(19,0) NOT NULL + * ) + * </pre> + * <p> + * 2. Executed under any circumstances. + * </p> + * + * <pre> + * if exists (SELECT * FROM sysobjects WHERE name = 'auto_pk_for_table') + * BEGIN + * DROP PROCEDURE auto_pk_for_table + * END + * </pre> + * <p> + * 3. Executed under any circumstances. + * </p> + * CREATE PROCEDURE auto_pk_for_table + * + * <pre> + * @tname VARCHAR(32), + * @pkbatchsize INT AS BEGIN BEGIN TRANSACTION UPDATE AUTO_PK_SUPPORT set NEXT_ID = + * NEXT_ID + + * @pkbatchsize WHERE TABLE_NAME = + * @tname SELECT NEXT_ID from AUTO_PK_SUPPORT where NEXT_ID = + * @tname COMMIT END + * </pre> + * + * @param node + * node that provides access to a DataSource. + */ + @Override + public void createAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception { + super.createAutoPk(node, dbEntities); + super.runUpdate(node, safePkProcDrop()); + super.runUpdate(node, unsafePkProcCreate()); + } + + @Override + public List<String> createAutoPkStatements(List<DbEntity> dbEntities) { + List<String> list = super.createAutoPkStatements(dbEntities); + + // add stored procedure drop code + list.add(safePkProcDrop()); + + // add stored procedure creation code + list.add(unsafePkProcCreate()); + + return list; + } + + /** + * Drops database objects related to automatic primary key support. Method + * will execute the following SQL statements: + * + * <pre> + * if exists (SELECT * FROM sysobjects WHERE name = 'AUTO_PK_SUPPORT') + * BEGIN + * DROP TABLE AUTO_PK_SUPPORT + * END + * + * + * if exists (SELECT * FROM sysobjects WHERE name = 'auto_pk_for_table') + * BEGIN + * DROP PROCEDURE auto_pk_for_table + * END + * </pre> + * + * @param node + * node that provides access to a DataSource. + */ + @Override + public void dropAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception { + super.runUpdate(node, safePkProcDrop()); + super.runUpdate(node, safePkTableDrop()); + } + + @Override + public List<String> dropAutoPkStatements(List<DbEntity> dbEntities) { + List<String> list = new ArrayList<String>(); + list.add(safePkProcDrop()); + list.add(safePkTableDrop()); + return list; + } + + /** + * @since 3.0 + */ + @Override + protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception { + // handle CAY-588 - get connection that is separate from the connection + // in the + // current transaction. + + // TODO (andrus, 7/6/2006) Note that this will still work in a pool with + // a single + // connection, as PK generator is invoked early in the transaction, + // before the + // connection is grabbed for commit... So maybe promote this to other + // adapters in + // 3.0? + + Transaction transaction = BaseTransaction.getThreadTransaction(); + BaseTransaction.bindThreadTransaction(null); + + try { + + try (Connection connection = node.getDataSource().getConnection();) { + + try (CallableStatement statement = connection.prepareCall("{call auto_pk_for_table(?, ?)}");) { + statement.setString(1, entity.getName()); + statement.setInt(2, super.getPkCacheSize()); + + // can't use "executeQuery" + // per + // http://jtds.sourceforge.net/faq.html#expectingResultSet + statement.execute(); + if (statement.getMoreResults()) { + + try (ResultSet rs = statement.getResultSet();) { + if (rs.next()) { + return rs.getLong(1); + } else { + throw new CayenneRuntimeException("Error generating pk for DbEntity " + + entity.getName()); + } + } + + } else { + throw new CayenneRuntimeException("Error generating pk for DbEntity " + entity.getName() + + ", no result set from stored procedure."); + } + } + } + } finally { + BaseTransaction.bindThreadTransaction(transaction); + } + } + + private String safePkTableDrop() { + StringBuilder buf = new StringBuilder(); + buf.append("if exists (SELECT * FROM sysobjects WHERE name = 'AUTO_PK_SUPPORT')").append(" BEGIN ") + .append(" DROP TABLE AUTO_PK_SUPPORT").append(" END"); + + return buf.toString(); + } + + private String unsafePkProcCreate() { + return " CREATE PROCEDURE auto_pk_for_table @tname VARCHAR(32), @pkbatchsize INT AS BEGIN BEGIN TRANSACTION" + + " UPDATE AUTO_PK_SUPPORT set NEXT_ID = NEXT_ID + @pkbatchsize WHERE TABLE_NAME = @tname" + + " SELECT NEXT_ID FROM AUTO_PK_SUPPORT WHERE TABLE_NAME = @tname COMMIT END"; + } + + private String safePkProcDrop() { + return "if exists (SELECT * FROM sysobjects WHERE name = 'auto_pk_for_table') BEGIN DROP PROCEDURE auto_pk_for_table END"; + } } http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/merge/AbstractToDbToken.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/main/java/org/apache/cayenne/merge/AbstractToDbToken.java b/cayenne-server/src/main/java/org/apache/cayenne/merge/AbstractToDbToken.java index c6896e8..e22d789 100644 --- a/cayenne-server/src/main/java/org/apache/cayenne/merge/AbstractToDbToken.java +++ b/cayenne-server/src/main/java/org/apache/cayenne/merge/AbstractToDbToken.java @@ -30,113 +30,97 @@ import org.apache.cayenne.map.DbEntity; import org.apache.cayenne.validation.SimpleValidationFailure; /** - * Common abstract superclass for all {@link MergerToken}s going from the model to the - * database. + * Common abstract superclass for all {@link MergerToken}s going from the model + * to the database. */ public abstract class AbstractToDbToken implements MergerToken, Comparable<MergerToken> { - private final String tokenName; - - protected AbstractToDbToken(String tokenName) { - this.tokenName = tokenName; - } - - @Override - public final String getTokenName() { - return tokenName; - } - - public final MergeDirection getDirection() { - return MergeDirection.TO_DB; - } - - public void execute(MergerContext mergerContext) { - for (String sql : createSql(mergerContext.getAdapter())) { - executeSql(mergerContext, sql); - } - } - - protected void executeSql(MergerContext mergerContext, String sql) { - Connection conn = null; - Statement st = null; - JdbcEventLogger logger = mergerContext.getDataNode().getJdbcEventLogger(); - try { - logger.log(sql); - conn = mergerContext.getDataNode().getDataSource().getConnection(); - st = conn.createStatement(); - st.execute(sql); - } - catch (SQLException e) { - mergerContext.getValidationResult().addFailure( - new SimpleValidationFailure(sql, e.getMessage())); - logger.logQueryError(e); - } - finally { - if (st != null) { - try { - st.close(); - } - catch (SQLException e) { - } - } - if (conn != null) { - try { - conn.close(); - } - catch (SQLException e) { - } - } - } - } - - @Override - public String toString() { - return getTokenName() + ' ' + getTokenValue() + ' ' + getDirection(); - } - - public abstract List<String> createSql(DbAdapter adapter); - - abstract static class Entity extends AbstractToDbToken { - - private DbEntity entity; - - public Entity(String tokenName, DbEntity entity) { - super(tokenName); - this.entity = entity; - } - - public DbEntity getEntity() { - return entity; - } - - public String getTokenValue() { - return getEntity().getName(); - } - - public int compareTo(MergerToken o) { - // default order as tokens are created - return 0; - } - - } - - abstract static class EntityAndColumn extends Entity { - - private DbAttribute column; - - public EntityAndColumn(String tokenName, DbEntity entity, DbAttribute column) { - super(tokenName, entity); - this.column = column; - } - - public DbAttribute getColumn() { - return column; - } - - @Override - public String getTokenValue() { - return getEntity().getName() + "." + getColumn().getName(); - } - - } + private final String tokenName; + + protected AbstractToDbToken(String tokenName) { + this.tokenName = tokenName; + } + + @Override + public final String getTokenName() { + return tokenName; + } + + @Override + public final MergeDirection getDirection() { + return MergeDirection.TO_DB; + } + + @Override + public void execute(MergerContext mergerContext) { + for (String sql : createSql(mergerContext.getAdapter())) { + executeSql(mergerContext, sql); + } + } + + protected void executeSql(MergerContext mergerContext, String sql) { + JdbcEventLogger logger = mergerContext.getDataNode().getJdbcEventLogger(); + logger.log(sql); + + try (Connection conn = mergerContext.getDataNode().getDataSource().getConnection();) { + + try (Statement st = conn.createStatement();) { + st.execute(sql); + } + } catch (SQLException e) { + mergerContext.getValidationResult().addFailure(new SimpleValidationFailure(sql, e.getMessage())); + logger.logQueryError(e); + } + } + + @Override + public String toString() { + return getTokenName() + ' ' + getTokenValue() + ' ' + getDirection(); + } + + public abstract List<String> createSql(DbAdapter adapter); + + abstract static class Entity extends AbstractToDbToken { + + private DbEntity entity; + + public Entity(String tokenName, DbEntity entity) { + super(tokenName); + this.entity = entity; + } + + public DbEntity getEntity() { + return entity; + } + + public String getTokenValue() { + return getEntity().getName(); + } + + public int compareTo(MergerToken o) { + // default order as tokens are created + return 0; + } + + } + + abstract static class EntityAndColumn extends Entity { + + private DbAttribute column; + + public EntityAndColumn(String tokenName, DbEntity entity, DbAttribute column) { + super(tokenName, entity); + this.column = column; + } + + public DbAttribute getColumn() { + return column; + } + + @Override + public String getTokenValue() { + return getEntity().getName() + "." + getColumn().getName(); + } + + } } http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/merge/DbMerger.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/main/java/org/apache/cayenne/merge/DbMerger.java b/cayenne-server/src/main/java/org/apache/cayenne/merge/DbMerger.java index d9cf154..3070cf1 100644 --- a/cayenne-server/src/main/java/org/apache/cayenne/merge/DbMerger.java +++ b/cayenne-server/src/main/java/org/apache/cayenne/merge/DbMerger.java @@ -18,6 +18,19 @@ */ package org.apache.cayenne.merge; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Types; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import javax.sql.DataSource; + import org.apache.cayenne.CayenneRuntimeException; import org.apache.cayenne.access.DataNode; import org.apache.cayenne.access.DbLoader; @@ -35,18 +48,6 @@ import org.apache.cayenne.map.DetectedDbEntity; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import javax.sql.DataSource; -import java.sql.Connection; -import java.sql.SQLException; -import java.sql.Types; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - /** * Traverse a {@link DataNode} and a {@link DataMap} and create a group of * {@link MergerToken}s to alter the {@link DataNode} data store to match the @@ -55,357 +56,350 @@ import java.util.Set; */ public class DbMerger { - private static final Log LOGGER = LogFactory.getLog(DbMerger.class); - - private final MergerFactory factory; - - private final ValueForNullProvider valueForNull; - - public DbMerger(MergerFactory factory) { - this(factory, null); - } - - public DbMerger(MergerFactory factory, ValueForNullProvider valueForNull) { - this.factory = factory; - this.valueForNull = valueForNull == null ? new EmptyValueForNullProvider() : valueForNull; - } - - /** - * Create and return a {@link List} of {@link MergerToken}s to alter the given - * {@link DataNode} to match the given {@link DataMap} - */ - public List<MergerToken> createMergeTokens(DataSource dataSource, DbAdapter adapter, DataMap existingDataMap, DbLoaderConfiguration config) { - return createMergeTokens( - existingDataMap, - loadDataMapFromDb(dataSource, adapter, config), - config - ); - } - - /** - * Create and return a {@link List} of {@link MergerToken}s to alter the given - * {@link DataNode} to match the given {@link DataMap} - */ - public List<MergerToken> createMergeTokens(DataMap existing, DataMap loadedFomDb, DbLoaderConfiguration config) { - - loadedFomDb.setQuotingSQLIdentifiers(existing.isQuotingSQLIdentifiers()); - - List<MergerToken> tokens - = createMergeTokens(filter(existing, config.getFiltersConfig()), loadedFomDb.getDbEntities(), config); - - // sort. use a custom Comparator since only toDb tokens are comparable by now - Collections.sort(tokens, new Comparator<MergerToken>() { - - public int compare(MergerToken o1, MergerToken o2) { - if (o1 instanceof AbstractToDbToken - && o2 instanceof AbstractToDbToken) { - - return ((AbstractToDbToken) o1).compareTo(o2); - } - return 0; - } - }); - - return tokens; - } - - private Collection<DbEntity> filter(DataMap existing, FiltersConfig filtersConfig) { - Collection<DbEntity> existingFiltered = new LinkedList<DbEntity>(); - for (DbEntity entity : existing.getDbEntities()) { - if (filtersConfig.tableFilter(entity.getCatalog(), entity.getSchema()).isIncludeTable(entity.getName()) != null) { - existingFiltered.add(entity); - } - } - return existingFiltered; - } - - private DataMap loadDataMapFromDb(DataSource dataSource, DbAdapter adapter, DbLoaderConfiguration config) { - Connection conn = null; - try { - conn = dataSource.getConnection(); - - return new DbLoader(conn, adapter, new LoggingDbLoaderDelegate(LOGGER)).load(config); - } catch (SQLException e) { - throw new CayenneRuntimeException("Can't doLoad dataMap from db.", e); - } finally { - if (conn != null) { - try { - conn.close(); - } catch (SQLException e) { - // Do nothing. - } - } - } - } - - public List<MergerToken> createMergeTokens(Collection<DbEntity> existing, Collection<DbEntity> loadedFromDb, - DbLoaderConfiguration config) { - Collection<DbEntity> dbEntitiesToDrop = new LinkedList<DbEntity>(loadedFromDb); - - List<MergerToken> tokens = new LinkedList<MergerToken>(); - for (DbEntity dbEntity : existing) { - String tableName = dbEntity.getName(); - - // look for table - DbEntity detectedEntity = findDbEntity(loadedFromDb, tableName); - if (detectedEntity == null) { - tokens.add(factory.createCreateTableToDb(dbEntity)); - // TODO: does this work properly with createReverse? - for (DbRelationship rel : dbEntity.getRelationships()) { - tokens.add(factory.createAddRelationshipToDb(dbEntity, rel)); - } - continue; - } - - dbEntitiesToDrop.remove(detectedEntity); - - tokens.addAll(checkRelationshipsToDrop(dbEntity, detectedEntity)); - if (!config.isSkipRelationshipsLoading()) { - tokens.addAll(checkRelationshipsToAdd(dbEntity, detectedEntity)); - } - tokens.addAll(checkRows(dbEntity, detectedEntity)); - - if (!config.isSkipPrimaryKeyLoading()) { - MergerToken token = checkPrimaryKeyChange(dbEntity, detectedEntity); - if (token != null) { - tokens.add(token); - } - } - } - - // drop table - // TODO: support drop table. currently, too many tables are marked for drop - for (DbEntity e : dbEntitiesToDrop) { - tokens.add(factory.createDropTableToDb(e)); - for (DbRelationship relationship : e.getRelationships()) { - DbEntity detectedEntity = findDbEntity(existing, relationship.getTargetEntityName()); - if (detectedEntity != null) { - tokens.add(factory.createDropRelationshipToDb(detectedEntity, relationship.getReverseRelationship())); - } - } - } - - return tokens; - } - - private List<MergerToken> checkRows(DbEntity existing, DbEntity loadedFromDb) { - List<MergerToken> tokens = new LinkedList<MergerToken>(); - - // columns to drop - for (DbAttribute detected : loadedFromDb.getAttributes()) { - if (findDbAttribute(existing, detected.getName()) == null) { - tokens.add(factory.createDropColumnToDb(existing, detected)); - } - } - - // columns to add or modify - for (DbAttribute attr : existing.getAttributes()) { - String columnName = attr.getName().toUpperCase(); - - DbAttribute detected = findDbAttribute(loadedFromDb, columnName); - - if (detected == null) { - tokens.add(factory.createAddColumnToDb(existing, attr)); - if (attr.isMandatory()) { - if (valueForNull.hasValueFor(existing, attr)) { - tokens.add(factory.createSetValueForNullToDb(existing, attr, valueForNull)); - } - tokens.add(factory.createSetNotNullToDb(existing, attr)); - } - continue; - } - - // check for not null - if (attr.isMandatory() != detected.isMandatory()) { - if (attr.isMandatory()) { - if (valueForNull.hasValueFor(existing, attr)) { - tokens.add(factory.createSetValueForNullToDb(existing, attr, valueForNull)); - } - tokens.add(factory.createSetNotNullToDb(existing, attr)); - } - else { - tokens.add(factory.createSetAllowNullToDb(existing, attr)); - } - } - - // TODO: check more types than char/varchar - // TODO: psql report VARCHAR for text column, not clob - switch (detected.getType()) { - case Types.VARCHAR: - case Types.CHAR: - if (attr.getMaxLength() != detected.getMaxLength()) { - tokens.add(factory.createSetColumnTypeToDb(existing, detected, attr)); - } - break; - } - } - - return tokens; - } - - private List<MergerToken> checkRelationshipsToDrop(DbEntity dbEntity, DbEntity detectedEntity) { - List<MergerToken> tokens = new LinkedList<MergerToken>(); - - // relationships to drop - for (DbRelationship detected : detectedEntity.getRelationships()) { - if (findDbRelationship(dbEntity, detected) == null) { - - // alter detected relationship to match entity and attribute names. - // (case sensitively) - - DbEntity targetEntity = findDbEntity(dbEntity.getDataMap().getDbEntities(), detected.getTargetEntityName()); - if (targetEntity == null) { - continue; - } - - detected.setSourceEntity(dbEntity); - detected.setTargetEntityName(targetEntity); - - // manipulate the joins to match the DbAttributes in the model - for (DbJoin join : detected.getJoins()) { - DbAttribute sattr = findDbAttribute(dbEntity, join.getSourceName()); - if (sattr != null) { - join.setSourceName(sattr.getName()); - } - DbAttribute tattr = findDbAttribute(targetEntity, join.getTargetName()); - if (tattr != null) { - join.setTargetName(tattr.getName()); - } - } - - MergerToken token = factory.createDropRelationshipToDb(dbEntity, detected); - if (detected.isToMany()) { - // default toModel as we can not do drop a toMany in the db. only - // toOne are represented using foreign key - token = token.createReverse(factory); - } - tokens.add(token); - } - } - - return tokens; - } - - private List<MergerToken> checkRelationshipsToAdd(DbEntity dbEntity, DbEntity detectedEntity) { - - List<MergerToken> tokens = new LinkedList<MergerToken>(); - - for (DbRelationship rel : dbEntity.getRelationships()) { - if (findDbRelationship(detectedEntity, rel) == null) { - AddRelationshipToDb token = (AddRelationshipToDb) - factory.createAddRelationshipToDb(dbEntity, rel); - - if (token.shouldGenerateFkConstraint()) { - // TODO I guess we should add relationship always; in order to have ability - // TODO generate reverse relationship. If it doesn't have anything to execute it will be passed - // TODO through execution without any affect on db - tokens.add(token); - } - } - } - - return tokens; - } - - private MergerToken checkPrimaryKeyChange( - DbEntity dbEntity, - DbEntity detectedEntity) { - Collection<DbAttribute> primaryKeyOriginal = detectedEntity.getPrimaryKeys(); - Collection<DbAttribute> primaryKeyNew = dbEntity.getPrimaryKeys(); - - String primaryKeyName = null; - if (detectedEntity instanceof DetectedDbEntity) { - primaryKeyName = ((DetectedDbEntity) detectedEntity).getPrimaryKeyName(); - } - - if (upperCaseEntityNames(primaryKeyOriginal).equals(upperCaseEntityNames(primaryKeyNew))) { - return null; - } - - return factory.createSetPrimaryKeyToDb(dbEntity, primaryKeyOriginal, primaryKeyNew, primaryKeyName); - } - - private Set<String> upperCaseEntityNames(Collection<? extends Attribute> attrs) { - Set<String> names = new HashSet<String>(); - for (Attribute attr : attrs) { - names.add(attr.getName().toUpperCase()); - } - return names; - } - - /** - * case insensitive search for a {@link DbEntity} in a {@link DataMap} by name - */ - private DbEntity findDbEntity(Collection<DbEntity> dbEntities, String caseInsensitiveName) { - // TODO: create a Map with upper case keys? - for (DbEntity e : dbEntities) { - if (e.getName().equalsIgnoreCase(caseInsensitiveName)) { - return e; - } - } - return null; - } - - /** - * case insensitive search for a {@link DbAttribute} in a {@link DbEntity} by name - */ - private DbAttribute findDbAttribute(DbEntity entity, String caseInsensitiveName) { - for (DbAttribute a : entity.getAttributes()) { - if (a.getName().equalsIgnoreCase(caseInsensitiveName)) { - return a; - } - } - return null; - } - - /** - * search for a {@link DbRelationship} like rel in the given {@link DbEntity} - */ - private DbRelationship findDbRelationship(DbEntity entity, DbRelationship rel) { - for (DbRelationship candidate : entity.getRelationships()) { - if (equalDbJoinCollections(candidate.getJoins(), rel.getJoins())) { - return candidate; - } - } - return null; - } - - /** - * Return true if the two unordered {@link Collection}s of {@link DbJoin}s are - * equal. Entity and Attribute names are compared case insensitively. - * - * TODO complexity n^2; sort both collection and go through them to compare = 2*n*log(n) + n - */ - private static boolean equalDbJoinCollections(Collection<DbJoin> j1s, Collection<DbJoin> j2s) { - if (j1s.size() != j2s.size()) { - return false; - } - - for (DbJoin j1 : j1s) { - if (!havePair(j2s, j1)) { - return false; - } - } - - return true; - } - - private static boolean havePair(Collection<DbJoin> j2s, DbJoin j1) { - for (DbJoin j2 : j2s) { - if (!isNull(j1.getSource()) && !isNull(j1.getTarget()) && - !isNull(j2.getSource()) && !isNull(j2.getTarget()) && - j1.getSource().getEntity().getName().equalsIgnoreCase(j2.getSource().getEntity().getName()) && - j1.getTarget().getEntity().getName().equalsIgnoreCase(j2.getTarget().getEntity().getName()) && - j1.getSourceName().equalsIgnoreCase(j2.getSourceName()) && - j1.getTargetName().equalsIgnoreCase(j2.getTargetName())) { - - return true; - } - } - return false; - } - - private static boolean isNull(DbAttribute attribute) { - return attribute == null || attribute.getEntity() == null; - } + private static final Log LOGGER = LogFactory.getLog(DbMerger.class); + + private final MergerFactory factory; + + private final ValueForNullProvider valueForNull; + + public DbMerger(MergerFactory factory) { + this(factory, null); + } + + public DbMerger(MergerFactory factory, ValueForNullProvider valueForNull) { + this.factory = factory; + this.valueForNull = valueForNull == null ? new EmptyValueForNullProvider() : valueForNull; + } + + /** + * Create and return a {@link List} of {@link MergerToken}s to alter the + * given {@link DataNode} to match the given {@link DataMap} + */ + public List<MergerToken> createMergeTokens(DataSource dataSource, DbAdapter adapter, DataMap existingDataMap, + DbLoaderConfiguration config) { + return createMergeTokens(existingDataMap, loadDataMapFromDb(dataSource, adapter, config), config); + } + + /** + * Create and return a {@link List} of {@link MergerToken}s to alter the + * given {@link DataNode} to match the given {@link DataMap} + */ + public List<MergerToken> createMergeTokens(DataMap existing, DataMap loadedFomDb, DbLoaderConfiguration config) { + + loadedFomDb.setQuotingSQLIdentifiers(existing.isQuotingSQLIdentifiers()); + + List<MergerToken> tokens = createMergeTokens(filter(existing, config.getFiltersConfig()), + loadedFomDb.getDbEntities(), config); + + // sort. use a custom Comparator since only toDb tokens are comparable + // by now + Collections.sort(tokens, new Comparator<MergerToken>() { + + public int compare(MergerToken o1, MergerToken o2) { + if (o1 instanceof AbstractToDbToken && o2 instanceof AbstractToDbToken) { + + return ((AbstractToDbToken) o1).compareTo(o2); + } + return 0; + } + }); + + return tokens; + } + + private Collection<DbEntity> filter(DataMap existing, FiltersConfig filtersConfig) { + Collection<DbEntity> existingFiltered = new LinkedList<DbEntity>(); + for (DbEntity entity : existing.getDbEntities()) { + if (filtersConfig.tableFilter(entity.getCatalog(), entity.getSchema()).isIncludeTable(entity.getName()) != null) { + existingFiltered.add(entity); + } + } + return existingFiltered; + } + + private DataMap loadDataMapFromDb(DataSource dataSource, DbAdapter adapter, DbLoaderConfiguration config) { + try (Connection conn = dataSource.getConnection();) { + + return new DbLoader(conn, adapter, new LoggingDbLoaderDelegate(LOGGER)).load(config); + } catch (SQLException e) { + throw new CayenneRuntimeException("Can't doLoad dataMap from db.", e); + } + } + + public List<MergerToken> createMergeTokens(Collection<DbEntity> existing, Collection<DbEntity> loadedFromDb, + DbLoaderConfiguration config) { + Collection<DbEntity> dbEntitiesToDrop = new LinkedList<DbEntity>(loadedFromDb); + + List<MergerToken> tokens = new LinkedList<MergerToken>(); + for (DbEntity dbEntity : existing) { + String tableName = dbEntity.getName(); + + // look for table + DbEntity detectedEntity = findDbEntity(loadedFromDb, tableName); + if (detectedEntity == null) { + tokens.add(factory.createCreateTableToDb(dbEntity)); + // TODO: does this work properly with createReverse? + for (DbRelationship rel : dbEntity.getRelationships()) { + tokens.add(factory.createAddRelationshipToDb(dbEntity, rel)); + } + continue; + } + + dbEntitiesToDrop.remove(detectedEntity); + + tokens.addAll(checkRelationshipsToDrop(dbEntity, detectedEntity)); + if (!config.isSkipRelationshipsLoading()) { + tokens.addAll(checkRelationshipsToAdd(dbEntity, detectedEntity)); + } + tokens.addAll(checkRows(dbEntity, detectedEntity)); + + if (!config.isSkipPrimaryKeyLoading()) { + MergerToken token = checkPrimaryKeyChange(dbEntity, detectedEntity); + if (token != null) { + tokens.add(token); + } + } + } + + // drop table + // TODO: support drop table. currently, too many tables are marked for + // drop + for (DbEntity e : dbEntitiesToDrop) { + tokens.add(factory.createDropTableToDb(e)); + for (DbRelationship relationship : e.getRelationships()) { + DbEntity detectedEntity = findDbEntity(existing, relationship.getTargetEntityName()); + if (detectedEntity != null) { + tokens.add(factory.createDropRelationshipToDb(detectedEntity, relationship.getReverseRelationship())); + } + } + } + + return tokens; + } + + private List<MergerToken> checkRows(DbEntity existing, DbEntity loadedFromDb) { + List<MergerToken> tokens = new LinkedList<MergerToken>(); + + // columns to drop + for (DbAttribute detected : loadedFromDb.getAttributes()) { + if (findDbAttribute(existing, detected.getName()) == null) { + tokens.add(factory.createDropColumnToDb(existing, detected)); + } + } + + // columns to add or modify + for (DbAttribute attr : existing.getAttributes()) { + String columnName = attr.getName().toUpperCase(); + + DbAttribute detected = findDbAttribute(loadedFromDb, columnName); + + if (detected == null) { + tokens.add(factory.createAddColumnToDb(existing, attr)); + if (attr.isMandatory()) { + if (valueForNull.hasValueFor(existing, attr)) { + tokens.add(factory.createSetValueForNullToDb(existing, attr, valueForNull)); + } + tokens.add(factory.createSetNotNullToDb(existing, attr)); + } + continue; + } + + // check for not null + if (attr.isMandatory() != detected.isMandatory()) { + if (attr.isMandatory()) { + if (valueForNull.hasValueFor(existing, attr)) { + tokens.add(factory.createSetValueForNullToDb(existing, attr, valueForNull)); + } + tokens.add(factory.createSetNotNullToDb(existing, attr)); + } else { + tokens.add(factory.createSetAllowNullToDb(existing, attr)); + } + } + + // TODO: check more types than char/varchar + // TODO: psql report VARCHAR for text column, not clob + switch (detected.getType()) { + case Types.VARCHAR: + case Types.CHAR: + if (attr.getMaxLength() != detected.getMaxLength()) { + tokens.add(factory.createSetColumnTypeToDb(existing, detected, attr)); + } + break; + } + } + + return tokens; + } + + private List<MergerToken> checkRelationshipsToDrop(DbEntity dbEntity, DbEntity detectedEntity) { + List<MergerToken> tokens = new LinkedList<MergerToken>(); + + // relationships to drop + for (DbRelationship detected : detectedEntity.getRelationships()) { + if (findDbRelationship(dbEntity, detected) == null) { + + // alter detected relationship to match entity and attribute + // names. + // (case sensitively) + + DbEntity targetEntity = findDbEntity(dbEntity.getDataMap().getDbEntities(), + detected.getTargetEntityName()); + if (targetEntity == null) { + continue; + } + + detected.setSourceEntity(dbEntity); + detected.setTargetEntityName(targetEntity); + + // manipulate the joins to match the DbAttributes in the model + for (DbJoin join : detected.getJoins()) { + DbAttribute sattr = findDbAttribute(dbEntity, join.getSourceName()); + if (sattr != null) { + join.setSourceName(sattr.getName()); + } + DbAttribute tattr = findDbAttribute(targetEntity, join.getTargetName()); + if (tattr != null) { + join.setTargetName(tattr.getName()); + } + } + + MergerToken token = factory.createDropRelationshipToDb(dbEntity, detected); + if (detected.isToMany()) { + // default toModel as we can not do drop a toMany in the db. + // only + // toOne are represented using foreign key + token = token.createReverse(factory); + } + tokens.add(token); + } + } + + return tokens; + } + + private List<MergerToken> checkRelationshipsToAdd(DbEntity dbEntity, DbEntity detectedEntity) { + + List<MergerToken> tokens = new LinkedList<MergerToken>(); + + for (DbRelationship rel : dbEntity.getRelationships()) { + if (findDbRelationship(detectedEntity, rel) == null) { + AddRelationshipToDb token = (AddRelationshipToDb) factory.createAddRelationshipToDb(dbEntity, rel); + + if (token.shouldGenerateFkConstraint()) { + // TODO I guess we should add relationship always; in order + // to have ability + // TODO generate reverse relationship. If it doesn't have + // anything to execute it will be passed + // TODO through execution without any affect on db + tokens.add(token); + } + } + } + + return tokens; + } + + private MergerToken checkPrimaryKeyChange(DbEntity dbEntity, DbEntity detectedEntity) { + Collection<DbAttribute> primaryKeyOriginal = detectedEntity.getPrimaryKeys(); + Collection<DbAttribute> primaryKeyNew = dbEntity.getPrimaryKeys(); + + String primaryKeyName = null; + if (detectedEntity instanceof DetectedDbEntity) { + primaryKeyName = ((DetectedDbEntity) detectedEntity).getPrimaryKeyName(); + } + + if (upperCaseEntityNames(primaryKeyOriginal).equals(upperCaseEntityNames(primaryKeyNew))) { + return null; + } + + return factory.createSetPrimaryKeyToDb(dbEntity, primaryKeyOriginal, primaryKeyNew, primaryKeyName); + } + + private Set<String> upperCaseEntityNames(Collection<? extends Attribute> attrs) { + Set<String> names = new HashSet<String>(); + for (Attribute attr : attrs) { + names.add(attr.getName().toUpperCase()); + } + return names; + } + + /** + * case insensitive search for a {@link DbEntity} in a {@link DataMap} by + * name + */ + private DbEntity findDbEntity(Collection<DbEntity> dbEntities, String caseInsensitiveName) { + // TODO: create a Map with upper case keys? + for (DbEntity e : dbEntities) { + if (e.getName().equalsIgnoreCase(caseInsensitiveName)) { + return e; + } + } + return null; + } + + /** + * case insensitive search for a {@link DbAttribute} in a {@link DbEntity} + * by name + */ + private DbAttribute findDbAttribute(DbEntity entity, String caseInsensitiveName) { + for (DbAttribute a : entity.getAttributes()) { + if (a.getName().equalsIgnoreCase(caseInsensitiveName)) { + return a; + } + } + return null; + } + + /** + * search for a {@link DbRelationship} like rel in the given + * {@link DbEntity} + */ + private DbRelationship findDbRelationship(DbEntity entity, DbRelationship rel) { + for (DbRelationship candidate : entity.getRelationships()) { + if (equalDbJoinCollections(candidate.getJoins(), rel.getJoins())) { + return candidate; + } + } + return null; + } + + /** + * Return true if the two unordered {@link Collection}s of {@link DbJoin}s + * are equal. Entity and Attribute names are compared case insensitively. + * + * TODO complexity n^2; sort both collection and go through them to compare + * = 2*n*log(n) + n + */ + private static boolean equalDbJoinCollections(Collection<DbJoin> j1s, Collection<DbJoin> j2s) { + if (j1s.size() != j2s.size()) { + return false; + } + + for (DbJoin j1 : j1s) { + if (!havePair(j2s, j1)) { + return false; + } + } + + return true; + } + + private static boolean havePair(Collection<DbJoin> j2s, DbJoin j1) { + for (DbJoin j2 : j2s) { + if (!isNull(j1.getSource()) && !isNull(j1.getTarget()) && !isNull(j2.getSource()) + && !isNull(j2.getTarget()) + && j1.getSource().getEntity().getName().equalsIgnoreCase(j2.getSource().getEntity().getName()) + && j1.getTarget().getEntity().getName().equalsIgnoreCase(j2.getTarget().getEntity().getName()) + && j1.getSourceName().equalsIgnoreCase(j2.getSourceName()) + && j1.getTargetName().equalsIgnoreCase(j2.getTargetName())) { + + return true; + } + } + return false; + } + + private static boolean isNull(DbAttribute attribute) { + return attribute == null || attribute.getEntity() == null; + } } http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/util/Util.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/main/java/org/apache/cayenne/util/Util.java b/cayenne-server/src/main/java/org/apache/cayenne/util/Util.java index 1700b67..e0f5571 100644 --- a/cayenne-server/src/main/java/org/apache/cayenne/util/Util.java +++ b/cayenne-server/src/main/java/org/apache/cayenne/util/Util.java @@ -105,15 +105,12 @@ public class Util { */ public static String stringFromFile(File file, String joinWith) throws IOException { StringBuilder buf = new StringBuilder(); - BufferedReader in = new BufferedReader(new FileReader(file)); - try { + try (BufferedReader in = new BufferedReader(new FileReader(file));) { String line = null; while ((line = in.readLine()) != null) { buf.append(line).append(joinWith); } - } finally { - in.close(); } return buf.toString(); } @@ -232,6 +229,7 @@ public class Util { /** * Creates Serializable object copy using serialization/deserialization. */ + @SuppressWarnings("unchecked") public static <T extends Serializable> T cloneViaSerialization(T object) throws Exception { ByteArrayOutputStream bytes = new ByteArrayOutputStream() { @@ -241,18 +239,13 @@ public class Util { } }; - ObjectOutputStream out = new ObjectOutputStream(bytes); - out.writeObject(object); - out.close(); - - ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray())); - T copy = (T) in.readObject(); - - // no need to close the stream - we created it and now will be throwing - // away... - // in.close(); + try (ObjectOutputStream out = new ObjectOutputStream(bytes)) { + out.writeObject(object); + } - return copy; + try (ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { + return (T) in.readObject(); + } } /** http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/test/java/org/apache/cayenne/access/DataContextIT.java ---------------------------------------------------------------------- diff --git a/cayenne-server/src/test/java/org/apache/cayenne/access/DataContextIT.java b/cayenne-server/src/test/java/org/apache/cayenne/access/DataContextIT.java index 88bf277..5715dac 100644 --- a/cayenne-server/src/test/java/org/apache/cayenne/access/DataContextIT.java +++ b/cayenne-server/src/test/java/org/apache/cayenne/access/DataContextIT.java @@ -666,8 +666,7 @@ public class DataContextIT extends ServerCase { SelectQuery<Artist> q1 = new SelectQuery<Artist>(Artist.class); - ResultIterator<Artist> it = context.iterator(q1); - try { + try (ResultIterator<Artist> it = context.iterator(q1);) { int count = 0; for (Artist a : it) { @@ -675,8 +674,6 @@ public class DataContextIT extends ServerCase { } assertEquals(7, count); - } finally { - it.close(); } } @@ -685,9 +682,8 @@ public class DataContextIT extends ServerCase { createLargeArtistsDataSet(); SelectQuery<Artist> q1 = new SelectQuery<Artist>(Artist.class); - ResultBatchIterator<Artist> it = context.batchIterator(q1, 5); - try { + try (ResultBatchIterator<Artist> it = context.batchIterator(q1, 5);) { int count = 0; for (List<Artist> artistList : it) { @@ -696,8 +692,6 @@ public class DataContextIT extends ServerCase { } assertEquals(4, count); - } finally { - it.close(); } } @@ -707,9 +701,8 @@ public class DataContextIT extends ServerCase { createArtistsDataSet(); SelectQuery<Artist> q1 = new SelectQuery<Artist>(Artist.class); - ResultIterator<?> it = context.performIteratedQuery(q1); - try { + try (ResultIterator<?> it = context.performIteratedQuery(q1);) { int count = 0; while (it.hasNextRow()) { it.nextRow(); @@ -717,8 +710,6 @@ public class DataContextIT extends ServerCase { } assertEquals(7, count); - } finally { - it.close(); } } @@ -726,9 +717,7 @@ public class DataContextIT extends ServerCase { public void testPerformIteratedQuery2() throws Exception { createArtistsAndPaintingsDataSet(); - ResultIterator<?> it = context.performIteratedQuery(SelectQuery.query(Artist.class)); - - try { + try (ResultIterator<?> it = context.performIteratedQuery(SelectQuery.query(Artist.class));) { while (it.hasNextRow()) { DataRow row = (DataRow) it.nextRow(); @@ -738,8 +727,6 @@ public class DataContextIT extends ServerCase { assertNotNull(paintings); assertEquals("Expected one painting for artist: " + artist, 1, paintings.size()); } - } finally { - it.close(); } }
