http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/ProcedureAction.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/ProcedureAction.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/ProcedureAction.java
index d1068ea..67ee201 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/ProcedureAction.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/ProcedureAction.java
@@ -37,219 +37,193 @@ import org.apache.cayenne.map.ProcedureParameter;
 import org.apache.cayenne.query.ProcedureQuery;
 
 /**
- * A SQLAction that runs a stored procedure. Note that ProcedureAction has 
internal state
- * and is not thread-safe.
+ * A SQLAction that runs a stored procedure. Note that ProcedureAction has
+ * internal state and is not thread-safe.
  * 
  * @since 1.2
  */
 public class ProcedureAction extends BaseSQLAction {
 
-    protected ProcedureQuery query;
-
-    /**
-     * Holds a number of ResultSets processed by the action. This value is 
reset to zero
-     * on every "performAction" call.
-     */
-    protected int processedResultSets;
-
-    /**
-     * @since 4.0
-     */
-    public ProcedureAction(ProcedureQuery query, DataNode dataNode) {
-        super(dataNode);
-        this.query = query;
-    }
-
-    @Override
-    public void performAction(Connection connection, OperationObserver 
observer)
-            throws SQLException, Exception {
-
-        processedResultSets = 0;
-
-        ProcedureTranslator transl = createTranslator(connection);
-
-        CallableStatement statement = (CallableStatement) 
transl.createStatement();
-
-        try {
-            initStatement(statement);
-
-            // stored procedure may contain a mixture of update counts and 
result sets,
-            // and out parameters. Read out parameters first, then
-            // iterate until we exhaust all results
-
-            // TODO: andrus, 4/2/2007 - according to the docs we should store 
the boolean
-            // return value of this method and avoid calling 'getMoreResults' 
if it is
-            // true.
-            // some db's handle this well, some don't (MySQL).
-            
-            // 09/23/2013: almost all adapters except Oracle (and maybe a few
-            // more?) are actually using the correct strategy, so making it a
-            // default in the superclass, and isolating hack to subclasses is
-            // probably a good idea
-
-            statement.execute();
-
-            // read out parameters
-            readProcedureOutParameters(statement, observer);
-
-            // read the rest of the query
-            while (true) {
-                if (statement.getMoreResults()) {
-                    ResultSet rs = statement.getResultSet();
-
-                    try {
-                        RowDescriptor descriptor = describeResultSet(
-                                rs,
-                                processedResultSets++);
-                        readResultSet(rs, descriptor, query, observer);
-                    }
-                    finally {
-                        try {
-                            rs.close();
-                        }
-                        catch (SQLException ex) {
-                        }
-                    }
-                }
-                else {
-                    int updateCount = statement.getUpdateCount();
-                    if (updateCount == -1) {
-                        break;
-                    }
-                    dataNode.getJdbcEventLogger().logUpdateCount(updateCount);
-                    observer.nextCount(query, updateCount);
-                }
-            }
-        }
-        finally {
-            try {
-                statement.close();
-            }
-            catch (SQLException ex) {
-
-            }
-        }
-    }
-
-    /**
-     * Returns the ProcedureTranslator to use for this ProcedureAction.
-     * 
-     * @param connection JDBC connection
-     */
-    protected ProcedureTranslator createTranslator(Connection connection) {
-        ProcedureTranslator translator = new ProcedureTranslator();
-        translator.setAdapter(dataNode.getAdapter());
-        translator.setQuery(query);
-        translator.setEntityResolver(dataNode.getEntityResolver());
-        translator.setConnection(connection);
-        translator.setJdbcEventLogger(dataNode.getJdbcEventLogger());
-        return translator;
-    }
-
-    /**
-     * Creates a RowDescriptor for result set.
-     * 
-     * @param resultSet JDBC ResultSet
-     * @param setIndex a zero-based index of the ResultSet in the query 
results.
-     */
-    protected RowDescriptor describeResultSet(ResultSet resultSet, int 
setIndex)
-            throws SQLException {
-
-        if (setIndex < 0) {
-            throw new IllegalArgumentException(
-                    "Expected a non-negative result set index. Got: " + 
setIndex);
-        }
-
-        RowDescriptorBuilder builder = new RowDescriptorBuilder();
-
-        List<ColumnDescriptor[]> descriptors = query.getResultDescriptors();
-
-        if (descriptors.isEmpty()) {
-            builder.setResultSet(resultSet);
-        }
-        else {
-
-            // if one result is described, all of them must be present...
-            if (setIndex >= descriptors.size() || descriptors.get(setIndex) == 
null) {
-                throw new CayenneRuntimeException(
-                        "No descriptor for result set at index '"
-                                + setIndex
-                                + "' configured.");
-            }
-
-            ColumnDescriptor[] columns = descriptors.get(setIndex);
-            builder.setColumns(columns);
-        }
-
-        switch (query.getColumnNamesCapitalization()) {
-            case LOWER:
-                builder.useLowercaseColumnNames();
-                break;
-            case UPPER:
-                builder.useUppercaseColumnNames();
-                break;
-        }
-
-        return builder.getDescriptor(dataNode.getAdapter().getExtendedTypes());
-    }
-
-    /**
-     * Returns stored procedure for an internal query.
-     */
-    protected Procedure getProcedure() {
-        return query.getMetaData(dataNode.getEntityResolver()).getProcedure();
-    }
-
-    /**
-     * Helper method that reads OUT parameters of a CallableStatement.
-     */
-    protected void readProcedureOutParameters(
-            CallableStatement statement,
-            OperationObserver delegate) throws SQLException, Exception {
-
-        long t1 = System.currentTimeMillis();
-
-        // build result row...
-        DataRow result = null;
-        List<ProcedureParameter> parameters = 
getProcedure().getCallParameters();
-        for (int i = 0; i < parameters.size(); i++) {
-            ProcedureParameter parameter = parameters.get(i);
-
-            if (!parameter.isOutParam()) {
-                continue;
-            }
-
-            if (result == null) {
-                result = new DataRow(2);
-            }
-
-            ColumnDescriptor descriptor = new ColumnDescriptor(parameter);
-            ExtendedType type = 
dataNode.getAdapter().getExtendedTypes().getRegisteredType(
-                    descriptor.getJavaClass());
-            Object val = type.materializeObject(statement, i + 1, descriptor
-                    .getJdbcType());
-
-            result.put(descriptor.getDataRowKey(), val);
-        }
-
-        if (result != null && !result.isEmpty()) {
-            // treat out parameters as a separate data row set
-            dataNode.getJdbcEventLogger().logSelectCount(1, 
System.currentTimeMillis() - t1);
-            delegate.nextRows(query, Collections.singletonList(result));
-        }
-    }
-
-    /**
-     * Initializes statement with query parameters
-     * 
-     * @throws Exception
-     */
-    protected void initStatement(CallableStatement statement) throws Exception 
{
-        int statementFetchSize = query
-                .getMetaData(dataNode.getEntityResolver())
-                .getStatementFetchSize();
-        if (statementFetchSize != 0) {
-            statement.setFetchSize(statementFetchSize);
-        }
-    }
+       protected ProcedureQuery query;
+
+       /**
+        * Holds a number of ResultSets processed by the action. This value is 
reset
+        * to zero on every "performAction" call.
+        */
+       protected int processedResultSets;
+
+       /**
+        * @since 4.0
+        */
+       public ProcedureAction(ProcedureQuery query, DataNode dataNode) {
+               super(dataNode);
+               this.query = query;
+       }
+
+       @Override
+       public void performAction(Connection connection, OperationObserver 
observer) throws SQLException, Exception {
+
+               processedResultSets = 0;
+
+               ProcedureTranslator transl = createTranslator(connection);
+
+               try (CallableStatement statement = (CallableStatement) 
transl.createStatement();) {
+                       initStatement(statement);
+
+                       // stored procedure may contain a mixture of update 
counts and
+                       // result sets,
+                       // and out parameters. Read out parameters first, then
+                       // iterate until we exhaust all results
+
+                       // TODO: andrus, 4/2/2007 - according to the docs we 
should store
+                       // the boolean
+                       // return value of this method and avoid calling 
'getMoreResults' if
+                       // it is
+                       // true.
+                       // some db's handle this well, some don't (MySQL).
+
+                       // 09/23/2013: almost all adapters except Oracle (and 
maybe a few
+                       // more?) are actually using the correct strategy, so 
making it a
+                       // default in the superclass, and isolating hack to 
subclasses is
+                       // probably a good idea
+
+                       statement.execute();
+
+                       // read out parameters
+                       readProcedureOutParameters(statement, observer);
+
+                       // read the rest of the query
+                       while (true) {
+                               if (statement.getMoreResults()) {
+
+                                       try (ResultSet rs = 
statement.getResultSet();) {
+                                               RowDescriptor descriptor = 
describeResultSet(rs, processedResultSets++);
+                                               readResultSet(rs, descriptor, 
query, observer);
+                                       }
+                               } else {
+                                       int updateCount = 
statement.getUpdateCount();
+                                       if (updateCount == -1) {
+                                               break;
+                                       }
+                                       
dataNode.getJdbcEventLogger().logUpdateCount(updateCount);
+                                       observer.nextCount(query, updateCount);
+                               }
+                       }
+               }
+       }
+
+       /**
+        * Returns the ProcedureTranslator to use for this ProcedureAction.
+        * 
+        * @param connection
+        *            JDBC connection
+        */
+       protected ProcedureTranslator createTranslator(Connection connection) {
+               ProcedureTranslator translator = new ProcedureTranslator();
+               translator.setAdapter(dataNode.getAdapter());
+               translator.setQuery(query);
+               translator.setEntityResolver(dataNode.getEntityResolver());
+               translator.setConnection(connection);
+               translator.setJdbcEventLogger(dataNode.getJdbcEventLogger());
+               return translator;
+       }
+
+       /**
+        * Creates a RowDescriptor for result set.
+        * 
+        * @param resultSet
+        *            JDBC ResultSet
+        * @param setIndex
+        *            a zero-based index of the ResultSet in the query results.
+        */
+       protected RowDescriptor describeResultSet(ResultSet resultSet, int 
setIndex) throws SQLException {
+
+               if (setIndex < 0) {
+                       throw new IllegalArgumentException("Expected a 
non-negative result set index. Got: " + setIndex);
+               }
+
+               RowDescriptorBuilder builder = new RowDescriptorBuilder();
+
+               List<ColumnDescriptor[]> descriptors = 
query.getResultDescriptors();
+
+               if (descriptors.isEmpty()) {
+                       builder.setResultSet(resultSet);
+               } else {
+
+                       // if one result is described, all of them must be 
present...
+                       if (setIndex >= descriptors.size() || 
descriptors.get(setIndex) == null) {
+                               throw new CayenneRuntimeException("No 
descriptor for result set at index '" + setIndex
+                                               + "' configured.");
+                       }
+
+                       ColumnDescriptor[] columns = descriptors.get(setIndex);
+                       builder.setColumns(columns);
+               }
+
+               switch (query.getColumnNamesCapitalization()) {
+               case LOWER:
+                       builder.useLowercaseColumnNames();
+                       break;
+               case UPPER:
+                       builder.useUppercaseColumnNames();
+                       break;
+               }
+
+               return 
builder.getDescriptor(dataNode.getAdapter().getExtendedTypes());
+       }
+
+       /**
+        * Returns stored procedure for an internal query.
+        */
+       protected Procedure getProcedure() {
+               return 
query.getMetaData(dataNode.getEntityResolver()).getProcedure();
+       }
+
+       /**
+        * Helper method that reads OUT parameters of a CallableStatement.
+        */
+       protected void readProcedureOutParameters(CallableStatement statement, 
OperationObserver delegate)
+                       throws SQLException, Exception {
+
+               long t1 = System.currentTimeMillis();
+
+               // build result row...
+               DataRow result = null;
+               List<ProcedureParameter> parameters = 
getProcedure().getCallParameters();
+               for (int i = 0; i < parameters.size(); i++) {
+                       ProcedureParameter parameter = parameters.get(i);
+
+                       if (!parameter.isOutParam()) {
+                               continue;
+                       }
+
+                       if (result == null) {
+                               result = new DataRow(2);
+                       }
+
+                       ColumnDescriptor descriptor = new 
ColumnDescriptor(parameter);
+                       ExtendedType type = 
dataNode.getAdapter().getExtendedTypes().getRegisteredType(descriptor.getJavaClass());
+                       Object val = type.materializeObject(statement, i + 1, 
descriptor.getJdbcType());
+
+                       result.put(descriptor.getDataRowKey(), val);
+               }
+
+               if (result != null && !result.isEmpty()) {
+                       // treat out parameters as a separate data row set
+                       dataNode.getJdbcEventLogger().logSelectCount(1, 
System.currentTimeMillis() - t1);
+                       delegate.nextRows(query, 
Collections.singletonList(result));
+               }
+       }
+
+       /**
+        * Initializes statement with query parameters
+        * 
+        * @throws Exception
+        */
+       protected void initStatement(CallableStatement statement) throws 
Exception {
+               int statementFetchSize = 
query.getMetaData(dataNode.getEntityResolver()).getStatementFetchSize();
+               if (statementFetchSize != 0) {
+                       statement.setFetchSize(statementFetchSize);
+               }
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbAttributesPerSchemaLoader.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbAttributesPerSchemaLoader.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbAttributesPerSchemaLoader.java
index b32c230..769255f 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbAttributesPerSchemaLoader.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbAttributesPerSchemaLoader.java
@@ -34,87 +34,89 @@ import java.util.*;
  * */
 public class DbAttributesPerSchemaLoader extends DbAttributesBaseLoader {
 
-    private final TableFilter filter;
-
-    private Map<String, List<DbAttribute>> attributes;
-
-    public DbAttributesPerSchemaLoader(String catalog, String schema, 
DatabaseMetaData metaData,
-                                       DbAdapter adapter, TableFilter filter) {
-        super(catalog, schema, metaData, adapter);
-
-        this.filter = filter;
-    }
-
-    private Map<String, List<DbAttribute>> loadDbAttributes() throws 
SQLException {
-        Map<String, List<DbAttribute>> attributes = new HashMap<String, 
List<DbAttribute>>();
-        ResultSet rs = getMetaData().getColumns(getCatalog(), getSchema(), 
"%", "%");
-        try {
-            Set<String> columns = new HashSet<String>();
-
-            while (rs.next()) {
-                if (columns.isEmpty()) {
-                    ResultSetMetaData rsMetaData = rs.getMetaData();
-                    for (int i = 1; i <= rsMetaData.getColumnCount(); i++) {
-                        columns.add(rsMetaData.getColumnLabel(i));
-                    }
-                }
-
-                // for a reason not quiet apparent to me, Oracle sometimes
-                // returns duplicate record sets for the same table, messing 
up table
-                // names. E.g. for the system table "WK$_ATTR_MAPPING" columns 
are
-                // returned twice - as "WK$_ATTR_MAPPING" and 
"WK$$_ATTR_MAPPING"... Go figure
-                String tableName = rs.getString("TABLE_NAME");
-                String columnName = rs.getString("COLUMN_NAME");
-
-                PatternFilter columnFilter = filter.isIncludeTable(tableName);
-                /*
-                * Here is possible optimization if filter will contain 
map<tableName, columnFilter>
-                *     we can replace it after tables loading since already 
done pattern matching once and exactly
-                *     know all tables that we want to process
-                * */
-                if (columnFilter == null || 
!columnFilter.isInclude(columnName)) {
-                    if (LOGGER.isDebugEnabled()) {
-                        LOGGER.debug("Skip column '" + tableName + "." + 
columnName
-                                + "' (Path: " + getCatalog() + "/" + 
getSchema() + "; Filter: " + columnFilter + ")");
-                    }
-                    continue;
-                }
-
-                List<DbAttribute> attrs = attributes.get(tableName);
-                if (attrs == null) {
-                    attrs = new LinkedList<DbAttribute>();
-
-                    attributes.put(tableName, attrs);
-                }
-
-                attrs.add(loadDbAttribute(columns, rs));
-            }
-        } finally {
-            rs.close();
-        }
-
-        return attributes;
-    }
-
-    @Override
-    protected List<DbAttribute> loadDbAttributes(String tableName) {
-        Map<String, List<DbAttribute>> attributes = getAttributes();
-        if (attributes != null) {
-            return attributes.get(tableName);
-        }
-
-        return new LinkedList<DbAttribute>();
-    }
-
-    public Map<String, List<DbAttribute>> getAttributes() {
-        if (attributes == null) {
-            try {
-                attributes = loadDbAttributes();
-            } catch (SQLException e) {
-                LOGGER.error(e);
-                attributes = new HashMap<String, List<DbAttribute>>();
-            }
-        }
-        return attributes;
-    }
+       private final TableFilter filter;
+
+       private Map<String, List<DbAttribute>> attributes;
+
+       public DbAttributesPerSchemaLoader(String catalog, String schema, 
DatabaseMetaData metaData, DbAdapter adapter,
+                       TableFilter filter) {
+               super(catalog, schema, metaData, adapter);
+
+               this.filter = filter;
+       }
+
+       private Map<String, List<DbAttribute>> loadDbAttributes() throws 
SQLException {
+               Map<String, List<DbAttribute>> attributes = new HashMap<String, 
List<DbAttribute>>();
+
+               try (ResultSet rs = getMetaData().getColumns(getCatalog(), 
getSchema(), "%", "%");) {
+                       Set<String> columns = new HashSet<String>();
+
+                       while (rs.next()) {
+                               if (columns.isEmpty()) {
+                                       ResultSetMetaData rsMetaData = 
rs.getMetaData();
+                                       for (int i = 1; i <= 
rsMetaData.getColumnCount(); i++) {
+                                               
columns.add(rsMetaData.getColumnLabel(i));
+                                       }
+                               }
+
+                               // for a reason not quiet apparent to me, 
Oracle sometimes
+                               // returns duplicate record sets for the same 
table, messing up
+                               // table
+                               // names. E.g. for the system table 
"WK$_ATTR_MAPPING" columns
+                               // are
+                               // returned twice - as "WK$_ATTR_MAPPING" and
+                               // "WK$$_ATTR_MAPPING"... Go figure
+                               String tableName = rs.getString("TABLE_NAME");
+                               String columnName = rs.getString("COLUMN_NAME");
+
+                               PatternFilter columnFilter = 
filter.isIncludeTable(tableName);
+                               /*
+                                * Here is possible optimization if filter will 
contain
+                                * map<tableName, columnFilter> we can replace 
it after tables
+                                * loading since already done pattern matching 
once and exactly
+                                * know all tables that we want to process
+                                */
+                               if (columnFilter == null || 
!columnFilter.isInclude(columnName)) {
+                                       if (LOGGER.isDebugEnabled()) {
+                                               LOGGER.debug("Skip column '" + 
tableName + "." + columnName + "' (Path: " + getCatalog() + "/"
+                                                               + getSchema() + 
"; Filter: " + columnFilter + ")");
+                                       }
+                                       continue;
+                               }
+
+                               List<DbAttribute> attrs = 
attributes.get(tableName);
+                               if (attrs == null) {
+                                       attrs = new LinkedList<DbAttribute>();
+
+                                       attributes.put(tableName, attrs);
+                               }
+
+                               attrs.add(loadDbAttribute(columns, rs));
+                       }
+               }
+
+               return attributes;
+       }
+
+       @Override
+       protected List<DbAttribute> loadDbAttributes(String tableName) {
+               Map<String, List<DbAttribute>> attributes = getAttributes();
+               if (attributes != null) {
+                       return attributes.get(tableName);
+               }
+
+               return new LinkedList<DbAttribute>();
+       }
+
+       public Map<String, List<DbAttribute>> getAttributes() {
+               if (attributes == null) {
+                       try {
+                               attributes = loadDbAttributes();
+                       } catch (SQLException e) {
+                               LOGGER.error(e);
+                               attributes = new HashMap<String, 
List<DbAttribute>>();
+                       }
+               }
+               return attributes;
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbTableLoader.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbTableLoader.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbTableLoader.java
index da9821c..baad305 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbTableLoader.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/loader/DbTableLoader.java
@@ -18,172 +18,179 @@
  ****************************************************************/
 package org.apache.cayenne.access.loader;
 
-import org.apache.cayenne.access.DbLoaderDelegate;
-import org.apache.cayenne.access.loader.filters.*;
-import org.apache.cayenne.map.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.cayenne.access.DbLoaderDelegate;
+import org.apache.cayenne.access.loader.filters.PatternFilter;
+import org.apache.cayenne.access.loader.filters.TableFilter;
+import org.apache.cayenne.map.DataMap;
+import org.apache.cayenne.map.DbAttribute;
+import org.apache.cayenne.map.DbEntity;
+import org.apache.cayenne.map.DetectedDbEntity;
+import org.apache.cayenne.map.ObjEntity;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 /**
  * @since 4.0
  */
 public class DbTableLoader {
 
-    private static final Log LOGGER = LogFactory.getLog(DbTableLoader.class);
-
-    private static final String WILDCARD = "%";
-
-    private final String catalog;
-    private final String schema;
-
-    private final DatabaseMetaData metaData;
-    private final DbLoaderDelegate delegate;
-
-    private final DbAttributesLoader attributesLoader;
-
-    public DbTableLoader(String catalog, String schema, DatabaseMetaData 
metaData, DbLoaderDelegate delegate, DbAttributesLoader attributesLoader) {
-        this.catalog = catalog;
-        this.schema = schema;
-        this.metaData = metaData;
-        this.delegate = delegate;
-
-        this.attributesLoader = attributesLoader;
-    }
-
-
-    /**
-     * Returns all tables for given combination of the criteria. Tables 
returned
-     * as DbEntities without any attributes or relationships.
-     *
-     * @param types  The types of table names to retrieve, null returns all 
types.
-     * @return
-     * @since 4.0
-     */
-    public List<DetectedDbEntity> getDbEntities(TableFilter filters, String[] 
types) throws SQLException {
-        if (LOGGER.isDebugEnabled()) {
-            LOGGER.debug("Read tables: catalog=" + catalog + ", schema=" + 
schema + ", types="
-                    + Arrays.toString(types));
-        }
-
-        ResultSet rs = metaData.getTables(catalog, schema, WILDCARD, types);
-
-        List<DetectedDbEntity> tables = new LinkedList<DetectedDbEntity>();
-        try {
-            while (rs.next()) {
-                // Oracle 9i and newer has a nifty recycle bin feature... but 
we don't
-                // want dropped tables to be included here; in fact they may 
even result
-                // in errors on reverse engineering as their names have 
special chars like
-                // "/", etc. So skip them all together
-
-                String name = rs.getString("TABLE_NAME");
-                if (name == null) {
-                    continue;
-                }
-
-                DetectedDbEntity table = new DetectedDbEntity(name);
-
-                String catalog = rs.getString("TABLE_CAT");
-                table.setCatalog(catalog);
-
-                String schema = rs.getString("TABLE_SCHEM");
-                table.setSchema(schema);
-                if (!(this.catalog == null || this.catalog.equals(catalog)) ||
-                        !(this.schema == null || this.schema.equals(schema))) {
-
-                    LOGGER.error(catalog + "." + schema + "." + name + " 
wrongly loaded for catalog/schema : "
-                            + this.catalog + "." + this.schema);
-
-                    continue;
-                }
-
-                PatternFilter includeTable = 
filters.isIncludeTable(table.getName());
-                if (includeTable != null) {
-                    tables.add(table);
-                }
-            }
-        } finally {
-            rs.close();
-        }
-        return tables;
-    }
-
-    /**
-     * Loads dbEntities for the specified tables.
-     * @param config
-     * @param types
-     */
-    public List<DbEntity> loadDbEntities(DataMap map, DbLoaderConfiguration 
config, String[] types) throws SQLException {
-        /** List of db entities to process. */
-
-        List<DetectedDbEntity> tables
-                = getDbEntities(config.getFiltersConfig().tableFilter(catalog, 
schema), types);
-
-        List<DbEntity> dbEntities = new ArrayList<DbEntity>();
-        for (DbEntity dbEntity : tables) {
-            DbEntity oldEnt = map.getDbEntity(dbEntity.getName());
-            if (oldEnt != null) {
-                Collection<ObjEntity> oldObjEnt = 
map.getMappedEntities(oldEnt);
-                if (!oldObjEnt.isEmpty()) {
-                    for (ObjEntity objEntity : oldObjEnt) {
-                        LOGGER.debug("Delete ObjEntity: " + 
objEntity.getName());
-                        map.removeObjEntity(objEntity.getName(), true);
-                        delegate.objEntityRemoved(objEntity);
-                    }
-                }
-
-                LOGGER.debug("Overwrite DbEntity: " + oldEnt.getName());
-                map.removeDbEntity(oldEnt.getName(), true);
-                delegate.dbEntityRemoved(oldEnt);
-            }
-
-            map.addDbEntity(dbEntity);
-
-            delegate.dbEntityAdded(dbEntity);
-
-            // delegate might have thrown this entity out... so check if it is 
still
-            // around before continuing processing
-            if (map.getDbEntity(dbEntity.getName()) == dbEntity) {
-                dbEntities.add(dbEntity);
-                attributesLoader.loadDbAttributes(dbEntity);
-                if (!config.isSkipPrimaryKeyLoading()) {
-                    loadPrimaryKey(dbEntity);
-                }
-            }
-        }
-
-        return dbEntities;
-    }
-
-    private void loadPrimaryKey(DbEntity dbEntity) throws SQLException {
-        ResultSet rs = metaData.getPrimaryKeys(dbEntity.getCatalog(), 
dbEntity.getSchema(), dbEntity.getName());
-        try {
-            while (rs.next()) {
-                String columnName = rs.getString("COLUMN_NAME");
-                DbAttribute attribute = dbEntity.getAttribute(columnName);
-
-                if (attribute != null) {
-                    attribute.setPrimaryKey(true);
-                } else {
-                    // why an attribute might be null is not quiet clear
-                    // but there is a bug report 731406 indicating that it is 
possible
-                    // so just print the warning, and ignore
-                    LOGGER.warn("Can't locate attribute for primary key: " + 
columnName);
-                }
-
-                String pkName = rs.getString("PK_NAME");
-                if (pkName != null && dbEntity instanceof DetectedDbEntity) {
-                    ((DetectedDbEntity) dbEntity).setPrimaryKeyName(pkName);
-                }
-
-            }
-        } finally {
-            rs.close();
-        }
-    }
+       private static final Log LOGGER = 
LogFactory.getLog(DbTableLoader.class);
+
+       private static final String WILDCARD = "%";
+
+       private final String catalog;
+       private final String schema;
+
+       private final DatabaseMetaData metaData;
+       private final DbLoaderDelegate delegate;
+
+       private final DbAttributesLoader attributesLoader;
+
+       public DbTableLoader(String catalog, String schema, DatabaseMetaData 
metaData, DbLoaderDelegate delegate,
+                       DbAttributesLoader attributesLoader) {
+               this.catalog = catalog;
+               this.schema = schema;
+               this.metaData = metaData;
+               this.delegate = delegate;
+
+               this.attributesLoader = attributesLoader;
+       }
+
+       /**
+        * Returns all tables for given combination of the criteria. Tables 
returned
+        * as DbEntities without any attributes or relationships.
+        *
+        * @param types
+        *            The types of table names to retrieve, null returns all 
types.
+        * @return
+        * @since 4.0
+        */
+       public List<DetectedDbEntity> getDbEntities(TableFilter filters, 
String[] types) throws SQLException {
+               if (LOGGER.isDebugEnabled()) {
+                       LOGGER.debug("Read tables: catalog=" + catalog + ", 
schema=" + schema + ", types=" + Arrays.toString(types));
+               }
+
+               List<DetectedDbEntity> tables = new 
LinkedList<DetectedDbEntity>();
+               try (ResultSet rs = metaData.getTables(catalog, schema, 
WILDCARD, types);) {
+                       while (rs.next()) {
+                               // Oracle 9i and newer has a nifty recycle bin 
feature... but we
+                               // don't
+                               // want dropped tables to be included here; in 
fact they may
+                               // even result
+                               // in errors on reverse engineering as their 
names have special
+                               // chars like
+                               // "/", etc. So skip them all together
+
+                               String name = rs.getString("TABLE_NAME");
+                               if (name == null) {
+                                       continue;
+                               }
+
+                               DetectedDbEntity table = new 
DetectedDbEntity(name);
+
+                               String catalog = rs.getString("TABLE_CAT");
+                               table.setCatalog(catalog);
+
+                               String schema = rs.getString("TABLE_SCHEM");
+                               table.setSchema(schema);
+                               if (!(this.catalog == null || 
this.catalog.equals(catalog))
+                                               || !(this.schema == null || 
this.schema.equals(schema))) {
+
+                                       LOGGER.error(catalog + "." + schema + 
"." + name + " wrongly loaded for catalog/schema : "
+                                                       + this.catalog + "." + 
this.schema);
+
+                                       continue;
+                               }
+
+                               PatternFilter includeTable = 
filters.isIncludeTable(table.getName());
+                               if (includeTable != null) {
+                                       tables.add(table);
+                               }
+                       }
+               }
+               return tables;
+       }
+
+       /**
+        * Loads dbEntities for the specified tables.
+        * 
+        * @param config
+        * @param types
+        */
+       public List<DbEntity> loadDbEntities(DataMap map, DbLoaderConfiguration 
config, String[] types) throws SQLException {
+               /** List of db entities to process. */
+
+               List<DetectedDbEntity> tables = 
getDbEntities(config.getFiltersConfig().tableFilter(catalog, schema), types);
+
+               List<DbEntity> dbEntities = new ArrayList<DbEntity>();
+               for (DbEntity dbEntity : tables) {
+                       DbEntity oldEnt = map.getDbEntity(dbEntity.getName());
+                       if (oldEnt != null) {
+                               Collection<ObjEntity> oldObjEnt = 
map.getMappedEntities(oldEnt);
+                               if (!oldObjEnt.isEmpty()) {
+                                       for (ObjEntity objEntity : oldObjEnt) {
+                                               LOGGER.debug("Delete ObjEntity: 
" + objEntity.getName());
+                                               
map.removeObjEntity(objEntity.getName(), true);
+                                               
delegate.objEntityRemoved(objEntity);
+                                       }
+                               }
+
+                               LOGGER.debug("Overwrite DbEntity: " + 
oldEnt.getName());
+                               map.removeDbEntity(oldEnt.getName(), true);
+                               delegate.dbEntityRemoved(oldEnt);
+                       }
+
+                       map.addDbEntity(dbEntity);
+
+                       delegate.dbEntityAdded(dbEntity);
+
+                       // delegate might have thrown this entity out... so 
check if it is
+                       // still
+                       // around before continuing processing
+                       if (map.getDbEntity(dbEntity.getName()) == dbEntity) {
+                               dbEntities.add(dbEntity);
+                               attributesLoader.loadDbAttributes(dbEntity);
+                               if (!config.isSkipPrimaryKeyLoading()) {
+                                       loadPrimaryKey(dbEntity);
+                               }
+                       }
+               }
+
+               return dbEntities;
+       }
+
+       private void loadPrimaryKey(DbEntity dbEntity) throws SQLException {
+
+               try (ResultSet rs = 
metaData.getPrimaryKeys(dbEntity.getCatalog(), dbEntity.getSchema(), 
dbEntity.getName());) {
+                       while (rs.next()) {
+                               String columnName = rs.getString("COLUMN_NAME");
+                               DbAttribute attribute = 
dbEntity.getAttribute(columnName);
+
+                               if (attribute != null) {
+                                       attribute.setPrimaryKey(true);
+                               } else {
+                                       // why an attribute might be null is 
not quiet clear
+                                       // but there is a bug report 731406 
indicating that it is
+                                       // possible
+                                       // so just print the warning, and ignore
+                                       LOGGER.warn("Can't locate attribute for 
primary key: " + columnName);
+                               }
+
+                               String pkName = rs.getString("PK_NAME");
+                               if (pkName != null && dbEntity instanceof 
DetectedDbEntity) {
+                                       ((DetectedDbEntity) 
dbEntity).setPrimaryKeyName(pkName);
+                               }
+
+                       }
+               }
+       }
 }
-

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/types/ByteArrayType.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/types/ByteArrayType.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/access/types/ByteArrayType.java
index e8198f3..8b42a65 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/types/ByteArrayType.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/types/ByteArrayType.java
@@ -33,190 +33,171 @@ import org.apache.cayenne.CayenneException;
 import org.apache.cayenne.util.MemoryBlob;
 
 /**
- * Handles <code>byte[]</code>, mapping it as either of JDBC types - BLOB or 
(VAR)BINARY.
- * Can be configured to trim trailing zero bytes.
+ * Handles <code>byte[]</code>, mapping it as either of JDBC types - BLOB or
+ * (VAR)BINARY. Can be configured to trim trailing zero bytes.
  */
 public class ByteArrayType implements ExtendedType {
 
-    private static final int BUF_SIZE = 8 * 1024;
-
-    protected boolean trimmingBytes;
-    protected boolean usingBlobs;
-
-    /**
-     * Strips null bytes from the byte array, returning a potentially smaller 
array that
-     * contains no trailing zero bytes.
-     */
-    public static byte[] trimBytes(byte[] bytes) {
-        int bytesToTrim = 0;
-        for (int i = bytes.length - 1; i >= 0; i--) {
-            if (bytes[i] != 0) {
-                bytesToTrim = bytes.length - 1 - i;
-                break;
-            }
-        }
-
-        if (bytesToTrim == 0) {
-            return bytes;
-        }
-
-        byte[] dest = new byte[bytes.length - bytesToTrim];
-        System.arraycopy(bytes, 0, dest, 0, dest.length);
-        return dest;
-    }
-
-    public ByteArrayType(boolean trimmingBytes, boolean usingBlobs) {
-        this.usingBlobs = usingBlobs;
-        this.trimmingBytes = trimmingBytes;
-    }
-
-    @Override
-    public String getClassName() {
-        return "byte[]";
-    }
-
-    @Override
-    public Object materializeObject(ResultSet rs, int index, int type) throws 
Exception {
-
-        byte[] bytes = null;
-
-        if (type == Types.BLOB) {
-            bytes = (isUsingBlobs()) ? readBlob(rs.getBlob(index)) : 
readBinaryStream(
-                    rs,
-                    index);
-        }
-        else {
-            bytes = rs.getBytes(index);
-
-            // trim BINARY type
-            if (bytes != null && type == Types.BINARY && isTrimmingBytes()) {
-                bytes = trimBytes(bytes);
-            }
-        }
-
-        return bytes;
-    }
-
-    @Override
-    public Object materializeObject(CallableStatement cs, int index, int type)
-            throws Exception {
-
-        byte[] bytes = null;
-
-        if (type == Types.BLOB) {
-            if (!isUsingBlobs()) {
-                throw new CayenneException(
-                        "Binary streams are not supported in stored procedure 
parameters.");
-            }
-            bytes = readBlob(cs.getBlob(index));
-        }
-        else {
-
-            bytes = cs.getBytes(index);
-
-            // trim BINARY type
-            if (bytes != null && type == Types.BINARY && isTrimmingBytes()) {
-                bytes = trimBytes(bytes);
-            }
-        }
-
-        return bytes;
-    }
-
-    @Override
-    public void setJdbcObject(
-            PreparedStatement st,
-            Object val,
-            int pos,
-            int type,
-            int scale) throws Exception {
-
-        // if this is a BLOB column, set the value as "bytes"
-        // instead. This should work with most drivers
-        if (type == Types.BLOB) {
-            if (isUsingBlobs()) {
-                st.setBlob(pos, writeBlob((byte[]) val));
-            }
-            else {
-                st.setBytes(pos, (byte[]) val);
-            }
-        }
-        else {
-            if (scale != -1) {
-                st.setObject(pos, val, type, scale);
-            }
-            else {
-                st.setObject(pos, val, type);
-            }
-        }
-    }
-
-    protected Blob writeBlob(byte[] bytes) {
-        // TODO: should we use Connection.createBlob() instead? (Like Oracle
-        // ByteArrayType does)
-        return bytes != null ? new MemoryBlob(bytes) : null;
-    }
-
-    protected byte[] readBlob(Blob blob) throws IOException, SQLException {
-        if (blob == null) {
-            return null;
-        }
-
-        // sanity check on size
-        if (blob.length() > Integer.MAX_VALUE) {
-            throw new IllegalArgumentException(
-                    "BLOB is too big to be read as byte[] in memory: " + 
blob.length());
-        }
-
-        int size = (int) blob.length();
-        if (size == 0) {
-            return new byte[0];
-        }
-        
-        return blob.getBytes(1, size);
-    }
-
-    protected byte[] readBinaryStream(ResultSet rs, int index) throws 
IOException,
-            SQLException {
-        InputStream in = rs.getBinaryStream(index);
-        return (in != null) ? readValueStream(in, -1, BUF_SIZE) : null;
-    }
-
-    protected byte[] readValueStream(InputStream in, int streamSize, int 
bufSize)
-            throws IOException {
-
-        byte[] buf = new byte[bufSize];
-        int read;
-        ByteArrayOutputStream out = (streamSize > 0) ? new 
ByteArrayOutputStream(
-                streamSize) : new ByteArrayOutputStream();
-
-        try {
-            while ((read = in.read(buf, 0, bufSize)) >= 0) {
-                out.write(buf, 0, read);
-            }
-            return out.toByteArray();
-        }
-        finally {
-            in.close();
-        }
-    }
-
-    /**
-     * Returns <code>true</code> if byte columns are handled as BLOBs 
internally.
-     */
-    public boolean isUsingBlobs() {
-        return usingBlobs;
-    }
-
-    public void setUsingBlobs(boolean usingBlobs) {
-        this.usingBlobs = usingBlobs;
-    }
-
-    public boolean isTrimmingBytes() {
-        return trimmingBytes;
-    }
-
-    public void setTrimmingBytes(boolean trimingBytes) {
-        this.trimmingBytes = trimingBytes;
-    }
+       private static final int BUF_SIZE = 8 * 1024;
+
+       protected boolean trimmingBytes;
+       protected boolean usingBlobs;
+
+       /**
+        * Strips null bytes from the byte array, returning a potentially 
smaller
+        * array that contains no trailing zero bytes.
+        */
+       public static byte[] trimBytes(byte[] bytes) {
+               int bytesToTrim = 0;
+               for (int i = bytes.length - 1; i >= 0; i--) {
+                       if (bytes[i] != 0) {
+                               bytesToTrim = bytes.length - 1 - i;
+                               break;
+                       }
+               }
+
+               if (bytesToTrim == 0) {
+                       return bytes;
+               }
+
+               byte[] dest = new byte[bytes.length - bytesToTrim];
+               System.arraycopy(bytes, 0, dest, 0, dest.length);
+               return dest;
+       }
+
+       public ByteArrayType(boolean trimmingBytes, boolean usingBlobs) {
+               this.usingBlobs = usingBlobs;
+               this.trimmingBytes = trimmingBytes;
+       }
+
+       @Override
+       public String getClassName() {
+               return "byte[]";
+       }
+
+       @Override
+       public Object materializeObject(ResultSet rs, int index, int type) 
throws Exception {
+
+               byte[] bytes = null;
+
+               if (type == Types.BLOB) {
+                       bytes = (isUsingBlobs()) ? readBlob(rs.getBlob(index)) 
: readBinaryStream(rs, index);
+               } else {
+                       bytes = rs.getBytes(index);
+
+                       // trim BINARY type
+                       if (bytes != null && type == Types.BINARY && 
isTrimmingBytes()) {
+                               bytes = trimBytes(bytes);
+                       }
+               }
+
+               return bytes;
+       }
+
+       @Override
+       public Object materializeObject(CallableStatement cs, int index, int 
type) throws Exception {
+
+               byte[] bytes = null;
+
+               if (type == Types.BLOB) {
+                       if (!isUsingBlobs()) {
+                               throw new CayenneException("Binary streams are 
not supported in stored procedure parameters.");
+                       }
+                       bytes = readBlob(cs.getBlob(index));
+               } else {
+
+                       bytes = cs.getBytes(index);
+
+                       // trim BINARY type
+                       if (bytes != null && type == Types.BINARY && 
isTrimmingBytes()) {
+                               bytes = trimBytes(bytes);
+                       }
+               }
+
+               return bytes;
+       }
+
+       @Override
+       public void setJdbcObject(PreparedStatement st, Object val, int pos, 
int type, int scale) throws Exception {
+
+               // if this is a BLOB column, set the value as "bytes"
+               // instead. This should work with most drivers
+               if (type == Types.BLOB) {
+                       if (isUsingBlobs()) {
+                               st.setBlob(pos, writeBlob((byte[]) val));
+                       } else {
+                               st.setBytes(pos, (byte[]) val);
+                       }
+               } else {
+                       if (scale != -1) {
+                               st.setObject(pos, val, type, scale);
+                       } else {
+                               st.setObject(pos, val, type);
+                       }
+               }
+       }
+
+       protected Blob writeBlob(byte[] bytes) {
+               // TODO: should we use Connection.createBlob() instead? (Like 
Oracle
+               // ByteArrayType does)
+               return bytes != null ? new MemoryBlob(bytes) : null;
+       }
+
+       protected byte[] readBlob(Blob blob) throws IOException, SQLException {
+               if (blob == null) {
+                       return null;
+               }
+
+               // sanity check on size
+               if (blob.length() > Integer.MAX_VALUE) {
+                       throw new IllegalArgumentException("BLOB is too big to 
be read as byte[] in memory: " + blob.length());
+               }
+
+               int size = (int) blob.length();
+               if (size == 0) {
+                       return new byte[0];
+               }
+
+               return blob.getBytes(1, size);
+       }
+
+       protected byte[] readBinaryStream(ResultSet rs, int index) throws 
IOException, SQLException {
+               try (InputStream in = rs.getBinaryStream(index);) {
+                       return (in != null) ? readValueStream(in, -1, BUF_SIZE) 
: null;
+               }
+       }
+
+       protected byte[] readValueStream(InputStream in, int streamSize, int 
bufSize) throws IOException {
+
+               byte[] buf = new byte[bufSize];
+               int read;
+               ByteArrayOutputStream out = (streamSize > 0) ? new 
ByteArrayOutputStream(streamSize)
+                               : new ByteArrayOutputStream();
+
+               while ((read = in.read(buf, 0, bufSize)) >= 0) {
+                       out.write(buf, 0, read);
+               }
+
+               return out.toByteArray();
+       }
+
+       /**
+        * Returns <code>true</code> if byte columns are handled as BLOBs
+        * internally.
+        */
+       public boolean isUsingBlobs() {
+               return usingBlobs;
+       }
+
+       public void setUsingBlobs(boolean usingBlobs) {
+               this.usingBlobs = usingBlobs;
+       }
+
+       public boolean isTrimmingBytes() {
+               return trimmingBytes;
+       }
+
+       public void setTrimmingBytes(boolean trimingBytes) {
+               this.trimmingBytes = trimingBytes;
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/types/CharType.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/types/CharType.java 
b/cayenne-server/src/main/java/org/apache/cayenne/access/types/CharType.java
index b3aff54..9f6f990 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/types/CharType.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/types/CharType.java
@@ -32,155 +32,141 @@ import java.sql.SQLException;
 import java.sql.Types;
 
 /**
- * Handles <code>java.lang.String</code>, mapping it as either of JDBC types - 
CLOB or
- * (VAR)CHAR. Can be configured to trim trailing spaces.
+ * Handles <code>java.lang.String</code>, mapping it as either of JDBC types -
+ * CLOB or (VAR)CHAR. Can be configured to trim trailing spaces.
  */
 public class CharType implements ExtendedType {
 
-    private static final int BUF_SIZE = 8 * 1024;
-
-    protected boolean trimmingChars;
-    protected boolean usingClobs;
-
-    public CharType(boolean trimmingChars, boolean usingClobs) {
-        this.trimmingChars = trimmingChars;
-        this.usingClobs = usingClobs;
-    }
-
-    /**
-     * Returns "java.lang.String".
-     */
-    @Override
-    public String getClassName() {
-        return String.class.getName();
-    }
-
-    /** Return trimmed string. */
-    @Override
-    public Object materializeObject(ResultSet rs, int index, int type) throws 
Exception {
-
-        if (type == Types.CLOB || type == Types.NCLOB) {
-            return isUsingClobs() ? readClob(rs.getClob(index)) : 
readCharStream(rs, index);
-        }
-
-        return handleString(rs.getString(index), type);
-    }
-
-    @Override
-    public Object materializeObject(CallableStatement cs, int index, int type)
-            throws Exception {
-
-        if (type == Types.CLOB || type == Types.NCLOB) {
-            if (!isUsingClobs()) {
-                throw new CayenneException(
-                        "Character streams are not supported in stored 
procedure parameters.");
-            }
-
-            return readClob(cs.getClob(index));
-        }
-
-        return handleString(cs.getString(index), type);
-    }
-
-    private Object handleString(String val, int type) throws SQLException {
-        // trim CHAR type
-        if (val != null && (type == Types.CHAR || type == Types.NCHAR) && 
isTrimmingChars()) {
-            return rtrim(val);
-        }
-
-        return val;
-    }
-
-    /** Trim right spaces. */
-    protected String rtrim(String value) {
-        int end = value.length() - 1;
-        int count = end;
-        while (end >= 0 && value.charAt(end) <= ' ') {
-            end--;
-        }
-        return end == count ? value : value.substring(0, end + 1);
-    }
-
-    @Override
-    public void setJdbcObject(
-            PreparedStatement st,
-            Object value,
-            int pos,
-            int type,
-            int scale) throws Exception {
-
-        // if this is a CLOB column, set the value as "String"
-        // instead. This should work with most drivers
-        if (type == Types.CLOB || type == Types.NCLOB) {
-            st.setString(pos, (String) value);
-        } else if (scale != -1) {
-            st.setObject(pos, value, type, scale);
-        } else {
-            st.setObject(pos, value, type);
-        }
-    }
-
-    protected String readClob(Clob clob) throws IOException, SQLException {
-        if (clob == null) {
-            return null;
-        }
-
-        // sanity check on size
-        if (clob.length() > Integer.MAX_VALUE) {
-            throw new IllegalArgumentException(
-                    "CLOB is too big to be read as String in memory: " + 
clob.length());
-        }
-
-        int size = (int) clob.length();
-        if (size == 0) {
-            return "";
-        }
-
-        return clob.getSubString(1, size);
-    }
-
-    protected String readCharStream(ResultSet rs, int index) throws 
IOException,
-            SQLException {
-        Reader in = rs.getCharacterStream(index);
-
-        return in != null ? readValueStream(in, -1, BUF_SIZE) : null;
-    }
-
-    protected String readValueStream(Reader in, int streamSize, int bufSize)
-            throws IOException {
-        char[] buf = new char[bufSize];
-        StringWriter out = streamSize > 0 ? new StringWriter(streamSize) : new 
StringWriter();
-
-        try {
-            int read;
-            while ((read = in.read(buf, 0, bufSize)) >= 0) {
-                out.write(buf, 0, read);
-            }
-            return out.toString();
-        }
-        finally {
-            in.close();
-        }
-    }
-
-    /**
-     * Returns <code>true</code> if 'materializeObject' method should trim 
trailing spaces
-     * from the CHAR columns. This addresses an issue with some JDBC drivers 
(e.g.
-     * Oracle), that return Strings for CHAR columns padded with spaces.
-     */
-    public boolean isTrimmingChars() {
-        return trimmingChars;
-    }
-
-    public void setTrimmingChars(boolean trimingChars) {
-        this.trimmingChars = trimingChars;
-    }
-
-    public boolean isUsingClobs() {
-        return usingClobs;
-    }
-
-    public void setUsingClobs(boolean usingClobs) {
-        this.usingClobs = usingClobs;
-    }
+       private static final int BUF_SIZE = 8 * 1024;
+
+       protected boolean trimmingChars;
+       protected boolean usingClobs;
+
+       public CharType(boolean trimmingChars, boolean usingClobs) {
+               this.trimmingChars = trimmingChars;
+               this.usingClobs = usingClobs;
+       }
+
+       /**
+        * Returns "java.lang.String".
+        */
+       @Override
+       public String getClassName() {
+               return String.class.getName();
+       }
+
+       /** Return trimmed string. */
+       @Override
+       public Object materializeObject(ResultSet rs, int index, int type) 
throws Exception {
+
+               if (type == Types.CLOB || type == Types.NCLOB) {
+                       return isUsingClobs() ? readClob(rs.getClob(index)) : 
readCharStream(rs, index);
+               }
+
+               return handleString(rs.getString(index), type);
+       }
+
+       @Override
+       public Object materializeObject(CallableStatement cs, int index, int 
type) throws Exception {
+
+               if (type == Types.CLOB || type == Types.NCLOB) {
+                       if (!isUsingClobs()) {
+                               throw new CayenneException("Character streams 
are not supported in stored procedure parameters.");
+                       }
+
+                       return readClob(cs.getClob(index));
+               }
+
+               return handleString(cs.getString(index), type);
+       }
+
+       private Object handleString(String val, int type) throws SQLException {
+               // trim CHAR type
+               if (val != null && (type == Types.CHAR || type == Types.NCHAR) 
&& isTrimmingChars()) {
+                       return rtrim(val);
+               }
+
+               return val;
+       }
+
+       /** Trim right spaces. */
+       protected String rtrim(String value) {
+               int end = value.length() - 1;
+               int count = end;
+               while (end >= 0 && value.charAt(end) <= ' ') {
+                       end--;
+               }
+               return end == count ? value : value.substring(0, end + 1);
+       }
+
+       @Override
+       public void setJdbcObject(PreparedStatement st, Object value, int pos, 
int type, int scale) throws Exception {
+
+               // if this is a CLOB column, set the value as "String"
+               // instead. This should work with most drivers
+               if (type == Types.CLOB || type == Types.NCLOB) {
+                       st.setString(pos, (String) value);
+               } else if (scale != -1) {
+                       st.setObject(pos, value, type, scale);
+               } else {
+                       st.setObject(pos, value, type);
+               }
+       }
+
+       protected String readClob(Clob clob) throws IOException, SQLException {
+               if (clob == null) {
+                       return null;
+               }
+
+               // sanity check on size
+               if (clob.length() > Integer.MAX_VALUE) {
+                       throw new IllegalArgumentException("CLOB is too big to 
be read as String in memory: " + clob.length());
+               }
+
+               int size = (int) clob.length();
+               if (size == 0) {
+                       return "";
+               }
+
+               return clob.getSubString(1, size);
+       }
+
+       protected String readCharStream(ResultSet rs, int index) throws 
IOException, SQLException {
+               try (Reader in = rs.getCharacterStream(index);) {
+                       return in != null ? readValueStream(in, -1, BUF_SIZE) : 
null;
+               }
+       }
+
+       protected String readValueStream(Reader in, int streamSize, int 
bufSize) throws IOException {
+               char[] buf = new char[bufSize];
+               StringWriter out = streamSize > 0 ? new 
StringWriter(streamSize) : new StringWriter();
+
+               int read;
+               while ((read = in.read(buf, 0, bufSize)) >= 0) {
+                       out.write(buf, 0, read);
+               }
+               return out.toString();
+       }
+
+       /**
+        * Returns <code>true</code> if 'materializeObject' method should trim
+        * trailing spaces from the CHAR columns. This addresses an issue with 
some
+        * JDBC drivers (e.g. Oracle), that return Strings for CHAR columns 
padded
+        * with spaces.
+        */
+       public boolean isTrimmingChars() {
+               return trimmingChars;
+       }
+
+       public void setTrimmingChars(boolean trimingChars) {
+               this.trimmingChars = trimingChars;
+       }
+
+       public boolean isUsingClobs() {
+               return usingClobs;
+       }
+
+       public void setUsingClobs(boolean usingClobs) {
+               this.usingClobs = usingClobs;
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/types/SerializableTypeFactory.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/types/SerializableTypeFactory.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/access/types/SerializableTypeFactory.java
index e1fe567..214f84b 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/types/SerializableTypeFactory.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/types/SerializableTypeFactory.java
@@ -29,98 +29,97 @@ import java.io.ObjectOutputStream;
 import java.io.Serializable;
 
 /**
- * ExtendedTypeFactory for handling serializable objects. Returned 
ExtendedType is simply
- * an object serialization wrapper on top of byte[] ExtendedType.
+ * ExtendedTypeFactory for handling serializable objects. Returned ExtendedType
+ * is simply an object serialization wrapper on top of byte[] ExtendedType.
  * 
  * @since 3.0
  */
 class SerializableTypeFactory implements ExtendedTypeFactory {
 
-    private static final Log logger = 
LogFactory.getLog(SerializableTypeFactory.class);
-
-    private ExtendedTypeMap map;
-
-    SerializableTypeFactory(ExtendedTypeMap map) {
-        this.map = map;
-    }
-
-    public ExtendedType getType(Class<?> objectClass) {
-        logger.warn("Haven't found suitable ExtendedType for class '" + 
objectClass.getCanonicalName() + "'. Most likely you need to define a custom 
ExtendedType.");
-
-        if (Serializable.class.isAssignableFrom(objectClass)) {
-            logger.warn("SerializableType will be used for type conversion.");
-
-            // using a binary stream delegate instead of byte[] may actually 
speed up
-            // things in some dbs, but at least byte[] type works consistently 
across
-            // adapters...
-
-            // note - can't use "getRegisteredType" as it causes infinite 
recursion
-            ExtendedType bytesType = map.getExplictlyRegisteredType("byte[]");
-
-            // not sure if this type of recursion can occur, still worth 
checking
-            if (bytesType instanceof SerializableType) {
-                throw new IllegalStateException(
-                        "Can't create Serializable ExtendedType for "
-                                + objectClass.getCanonicalName()
-                                + ": no ExtendedType exists for byte[]");
-            }
-
-            return new SerializableType(objectClass, bytesType);
-        }
-
-        return null;
-    }
-
-    /**
-     * A serialization wrapper on top of byte[] ExtendedType
-     */
-    final class SerializableType extends ExtendedTypeDecorator {
-
-        private Class<?> javaClass;
-
-        SerializableType(Class<?> javaClass, ExtendedType bytesType) {
-            super(bytesType);
-            this.javaClass = javaClass;
-        }
-
-        @Override
-        public String getClassName() {
-            return javaClass.getCanonicalName();
-        }
-
-        @Override
-        Object fromJavaObject(Object object) {
-            ByteArrayOutputStream bytes = new ByteArrayOutputStream() {
-
-                // avoid unneeded array copy...
-                @Override
-                public synchronized byte[] toByteArray() {
-                    return buf;
-                }
-            };
-
-            try {
-                ObjectOutputStream out = new ObjectOutputStream(bytes);
-                out.writeObject(object);
-                out.close();
-            }
-            catch (Exception e) {
-                throw new CayenneRuntimeException("Error serializing object", 
e);
-            }
-
-            return bytes.toByteArray();
-        }
-
-        @Override
-        Object toJavaObject(Object object) {
-            byte[] bytes = (byte[]) object;
-            try {
-                return bytes != null && bytes.length > 0 ? new 
ObjectInputStream(
-                        new ByteArrayInputStream(bytes)).readObject() : null;
-            }
-            catch (Exception e) {
-                throw new CayenneRuntimeException("Error deserializing 
object", e);
-            }
-        }
-    }
+       private static final Log logger = 
LogFactory.getLog(SerializableTypeFactory.class);
+
+       private ExtendedTypeMap map;
+
+       SerializableTypeFactory(ExtendedTypeMap map) {
+               this.map = map;
+       }
+
+       public ExtendedType getType(Class<?> objectClass) {
+               logger.warn("Haven't found suitable ExtendedType for class '" + 
objectClass.getCanonicalName()
+                               + "'. Most likely you need to define a custom 
ExtendedType.");
+
+               if (Serializable.class.isAssignableFrom(objectClass)) {
+                       logger.warn("SerializableType will be used for type 
conversion.");
+
+                       // using a binary stream delegate instead of byte[] may 
actually
+                       // speed up
+                       // things in some dbs, but at least byte[] type works 
consistently
+                       // across
+                       // adapters...
+
+                       // note - can't use "getRegisteredType" as it causes 
infinite
+                       // recursion
+                       ExtendedType bytesType = 
map.getExplictlyRegisteredType("byte[]");
+
+                       // not sure if this type of recursion can occur, still 
worth
+                       // checking
+                       if (bytesType instanceof SerializableType) {
+                               throw new IllegalStateException("Can't create 
Serializable ExtendedType for "
+                                               + 
objectClass.getCanonicalName() + ": no ExtendedType exists for byte[]");
+                       }
+
+                       return new SerializableType(objectClass, bytesType);
+               }
+
+               return null;
+       }
+
+       /**
+        * A serialization wrapper on top of byte[] ExtendedType
+        */
+       final class SerializableType extends ExtendedTypeDecorator {
+
+               private Class<?> javaClass;
+
+               SerializableType(Class<?> javaClass, ExtendedType bytesType) {
+                       super(bytesType);
+                       this.javaClass = javaClass;
+               }
+
+               @Override
+               public String getClassName() {
+                       return javaClass.getCanonicalName();
+               }
+
+               @Override
+               Object fromJavaObject(Object object) {
+                       ByteArrayOutputStream bytes = new 
ByteArrayOutputStream() {
+
+                               // avoid unneeded array copy...
+                               @Override
+                               public synchronized byte[] toByteArray() {
+                                       return buf;
+                               }
+                       };
+
+                       try (ObjectOutputStream out = new 
ObjectOutputStream(bytes);) {
+                               out.writeObject(object);
+                       } catch (Exception e) {
+                               throw new CayenneRuntimeException("Error 
serializing object", e);
+                       }
+
+                       return bytes.toByteArray();
+               }
+
+               @Override
+               Object toJavaObject(Object object) {
+                       byte[] bytes = (byte[]) object;
+                       try {
+                               return bytes != null && bytes.length > 0 ? new 
ObjectInputStream(new ByteArrayInputStream(bytes))
+                                               .readObject() : null;
+                       } catch (Exception e) {
+                               throw new CayenneRuntimeException("Error 
deserializing object", e);
+                       }
+               }
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/configuration/XMLDataChannelDescriptorLoader.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/configuration/XMLDataChannelDescriptorLoader.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/configuration/XMLDataChannelDescriptorLoader.java
index 420c43c..e763b5b 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/configuration/XMLDataChannelDescriptorLoader.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/configuration/XMLDataChannelDescriptorLoader.java
@@ -82,21 +82,14 @@ public class XMLDataChannelDescriptorLoader implements 
DataChannelDescriptorLoad
         */
        @Deprecated
        private static String passwordFromInputStream(InputStream inputStream) {
-               BufferedReader bufferedReader = null;
                String password = null;
 
-               try {
-                       bufferedReader = new BufferedReader(new 
InputStreamReader(inputStream));
+               try (BufferedReader bufferedReader = new BufferedReader(new 
InputStreamReader(inputStream));) {
+
                        password = bufferedReader.readLine();
                } catch (IOException exception) {
                        logger.warn(exception);
                } finally {
-                       try {
-                               if (bufferedReader != null) {
-                                       bufferedReader.close();
-                               }
-                       } catch (Exception exception) {
-                       }
 
                        try {
                                inputStream.close();

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/configuration/server/DefaultDbAdapterFactory.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/configuration/server/DefaultDbAdapterFactory.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/configuration/server/DefaultDbAdapterFactory.java
index 5cbb40c..2287071 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/configuration/server/DefaultDbAdapterFactory.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/configuration/server/DefaultDbAdapterFactory.java
@@ -45,98 +45,88 @@ import org.apache.cayenne.log.JdbcEventLogger;
  */
 public class DefaultDbAdapterFactory implements DbAdapterFactory {
 
-    @Inject
-    protected Injector injector;
-
-    @Inject
-    protected JdbcEventLogger jdbcEventLogger;
-
-    @Inject
-    protected AdhocObjectFactory objectFactory;
-    protected List<DbAdapterDetector> detectors;
-
-    public 
DefaultDbAdapterFactory(@Inject(Constants.SERVER_ADAPTER_DETECTORS_LIST) 
List<DbAdapterDetector> detectors) {
-        if (detectors == null) {
-            throw new NullPointerException("Null detectors list");
-        }
-
-        this.detectors = detectors;
-    }
-
-    @Override
-    public DbAdapter createAdapter(DataNodeDescriptor nodeDescriptor, final 
DataSource dataSource) throws Exception {
-
-        String adapterType = null;
-
-        if (nodeDescriptor != null) {
-            adapterType = nodeDescriptor.getAdapterType();
-        }
-
-        // must not create AutoAdapter via objectFactory, so treat explicit
-        // AutoAdapter as null and let it fall through to the default. 
(explicit
-        // AutoAdapter is often passed from the cdbimport pligin).
-        if (adapterType != null && 
adapterType.equals(AutoAdapter.class.getName())) {
-            adapterType = null;
-        }
-
-        if (adapterType != null) {
-            return objectFactory.newInstance(DbAdapter.class, adapterType);
-        } else {
-            return new AutoAdapter(new Provider<DbAdapter>() {
-
-                public DbAdapter get() {
-                    return detectAdapter(dataSource);
-                }
-            }, jdbcEventLogger);
-        }
-    }
-
-    protected DbAdapter detectAdapter(DataSource dataSource) {
-
-        if (detectors.isEmpty()) {
-            return defaultAdapter();
-        }
-
-        try {
-            Connection c = dataSource.getConnection();
-
-            try {
-                return detectAdapter(c.getMetaData());
-            } finally {
-                try {
-                    c.close();
-                } catch (SQLException e) {
-                    // ignore...
-                }
-            }
-        } catch (SQLException e) {
-            throw new CayenneRuntimeException("Error detecting database type: 
" + e.getLocalizedMessage(), e);
-        }
-    }
-
-    protected DbAdapter detectAdapter(DatabaseMetaData metaData) throws 
SQLException {
-        // iterate in reverse order to allow custom factories to take 
precedence
-        // over the
-        // default ones configured in constructor
-        for (int i = detectors.size() - 1; i >= 0; i--) {
-            DbAdapterDetector detector = detectors.get(i);
-            DbAdapter adapter = detector.createAdapter(metaData);
-
-            if (adapter != null) {
-                jdbcEventLogger.log("Detected and installed adapter: " + 
adapter.getClass().getName());
-
-                // TODO: should detector do this??
-                injector.injectMembers(adapter);
-
-                return adapter;
-            }
-        }
-
-        return defaultAdapter();
-    }
-
-    protected DbAdapter defaultAdapter() {
-        jdbcEventLogger.log("Failed to detect database type, using generic 
adapter");
-        return objectFactory.newInstance(DbAdapter.class, 
JdbcAdapter.class.getName());
-    }
+       @Inject
+       protected Injector injector;
+
+       @Inject
+       protected JdbcEventLogger jdbcEventLogger;
+
+       @Inject
+       protected AdhocObjectFactory objectFactory;
+       protected List<DbAdapterDetector> detectors;
+
+       public 
DefaultDbAdapterFactory(@Inject(Constants.SERVER_ADAPTER_DETECTORS_LIST) 
List<DbAdapterDetector> detectors) {
+               if (detectors == null) {
+                       throw new NullPointerException("Null detectors list");
+               }
+
+               this.detectors = detectors;
+       }
+
+       @Override
+       public DbAdapter createAdapter(DataNodeDescriptor nodeDescriptor, final 
DataSource dataSource) throws Exception {
+
+               String adapterType = null;
+
+               if (nodeDescriptor != null) {
+                       adapterType = nodeDescriptor.getAdapterType();
+               }
+
+               // must not create AutoAdapter via objectFactory, so treat 
explicit
+               // AutoAdapter as null and let it fall through to the default. 
(explicit
+               // AutoAdapter is often passed from the cdbimport pligin).
+               if (adapterType != null && 
adapterType.equals(AutoAdapter.class.getName())) {
+                       adapterType = null;
+               }
+
+               if (adapterType != null) {
+                       return objectFactory.newInstance(DbAdapter.class, 
adapterType);
+               } else {
+                       return new AutoAdapter(new Provider<DbAdapter>() {
+
+                               public DbAdapter get() {
+                                       return detectAdapter(dataSource);
+                               }
+                       }, jdbcEventLogger);
+               }
+       }
+
+       protected DbAdapter detectAdapter(DataSource dataSource) {
+
+               if (detectors.isEmpty()) {
+                       return defaultAdapter();
+               }
+
+               try (Connection c = dataSource.getConnection();) {
+                       return detectAdapter(c.getMetaData());
+               } catch (SQLException e) {
+                       throw new CayenneRuntimeException("Error detecting 
database type: " + e.getLocalizedMessage(), e);
+               }
+       }
+
+       protected DbAdapter detectAdapter(DatabaseMetaData metaData) throws 
SQLException {
+               // iterate in reverse order to allow custom factories to take 
precedence
+               // over the
+               // default ones configured in constructor
+               for (int i = detectors.size() - 1; i >= 0; i--) {
+                       DbAdapterDetector detector = detectors.get(i);
+                       DbAdapter adapter = detector.createAdapter(metaData);
+
+                       if (adapter != null) {
+                               jdbcEventLogger.log("Detected and installed 
adapter: " + adapter.getClass().getName());
+
+                               // TODO: should detector do this??
+                               injector.injectMembers(adapter);
+
+                               return adapter;
+                       }
+               }
+
+               return defaultAdapter();
+       }
+
+       protected DbAdapter defaultAdapter() {
+               jdbcEventLogger.log("Failed to detect database type, using 
generic adapter");
+               return objectFactory.newInstance(DbAdapter.class, 
JdbcAdapter.class.getName());
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/datasource/PoolAwareConnection.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/datasource/PoolAwareConnection.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/datasource/PoolAwareConnection.java
index 6e7e746..d293b40 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/datasource/PoolAwareConnection.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/datasource/PoolAwareConnection.java
@@ -68,20 +68,15 @@ public class PoolAwareConnection implements Connection {
                }
 
                try {
-                       Statement statement = connection.createStatement();
-                       try {
-                               ResultSet rs = 
statement.executeQuery(validationQuery);
-                               try {
+
+                       try (Statement statement = 
connection.createStatement();) {
+
+                               try (ResultSet rs = 
statement.executeQuery(validationQuery);) {
 
                                        if (!rs.next()) {
                                                throw new 
SQLException("Connection validation failed, no result for query: " + 
validationQuery);
                                        }
-
-                               } finally {
-                                       rs.close();
                                }
-                       } finally {
-                               statement.close();
                        }
                } catch (SQLException e) {
                        return false;

Reply via email to