http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/IncrementalFaultList.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/IncrementalFaultList.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/access/IncrementalFaultList.java
index c1eeee2..7c66056 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/IncrementalFaultList.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/IncrementalFaultList.java
@@ -58,781 +58,778 @@ import org.apache.cayenne.util.Util;
  */
 public class IncrementalFaultList<E> implements List<E>, Serializable {
 
-    protected int pageSize;
-    protected List elements;
-    protected DataContext dataContext;
-    protected ObjEntity rootEntity;
-    protected SelectQuery<?> internalQuery;
-    protected int unfetchedObjects;
-
-    /**
-     * Stores a hint allowing to distinguish data rows from unfetched ids when
-     * the query fetches data rows.
-     */
-    protected int idWidth;
-
-    private IncrementalListHelper helper;
-
-    /**
-     * Defines the upper limit on the size of fetches. This is needed to avoid
-     * where clause size limitations.
-     */
-    protected int maxFetchSize;
-
-    // Don't confuse this with the JDBC ResultSet fetch size setting - this
-    // controls
-    // the where clause generation that is necessary to fetch specific records 
a
-    // page
-    // at a time. Some JDBC Drivers/Databases may have limits on statement
-    // length
-    // or complexity of the where clause - e.g., PostgreSQL having a default
-    // limit of
-    // 10,000 nested expressions.
-
-    /**
-     * Creates a new IncrementalFaultList using a given DataContext and query.
-     * 
-     * @param dataContext
-     *            DataContext used by IncrementalFaultList to fill itself with
-     *            objects.
-     * @param query
-     *            Main query used to retrieve data. Must have "pageSize"
-     *            property set to a value greater than zero.
-     * @param maxFetchSize
-     *            maximum number of fetches in one query
-     */
-    public IncrementalFaultList(DataContext dataContext, Query query, int 
maxFetchSize) {
-        QueryMetadata metadata = 
query.getMetaData(dataContext.getEntityResolver());
-        if (metadata.getPageSize() <= 0) {
-            throw new CayenneRuntimeException("Not a paginated query; page 
size: " + metadata.getPageSize());
-        }
-
-        this.dataContext = dataContext;
-        this.pageSize = metadata.getPageSize();
-        this.rootEntity = metadata.getObjEntity();
-
-        if (rootEntity == null) {
-            throw new CayenneRuntimeException("Pagination is not supported for 
queries not rooted in an ObjEntity");
-        }
-
-        // create an internal query, it is a partial replica of
-        // the original query and will serve as a value holder for
-        // various parameters
-        this.internalQuery = new SelectQuery<Object>(rootEntity);
-        this.internalQuery.setFetchingDataRows(metadata.isFetchingDataRows());
-        this.internalQuery.setPrefetchTree(metadata.getPrefetchTree());
-
-        this.helper = createHelper(metadata);
-        this.idWidth = metadata.getDbEntity().getPrimaryKeys().size();
-
-        List<Object> elementsUnsynced = new ArrayList<Object>();
-        fillIn(query, elementsUnsynced);
-        this.elements = Collections.synchronizedList(elementsUnsynced);
-
-        this.maxFetchSize = maxFetchSize;
-    }
-
-    /**
-     * @since 3.0
-     */
-    IncrementalListHelper createHelper(QueryMetadata metadata) {
-        if (metadata.isFetchingDataRows()) {
-            return new DataRowListHelper();
-        } else {
-            return new PersistentListHelper();
-        }
-    }
-
-    /**
-     * @since 1.2
-     */
-    SelectQuery getInternalQuery() {
-        return internalQuery;
-    }
-
-    /**
-     * Performs initialization of the list of objects. Only the first page is
-     * fully resolved. For the rest of the list, only ObjectIds are read.
-     * 
-     * @since 3.0
-     */
-    protected void fillIn(final Query query, List elementsList) {
-
-        elementsList.clear();
-
-        ResultIterator it = dataContext.performIteratedQuery(query);
-        try {
-
-            while (it.hasNextRow()) {
-                elementsList.add(it.nextRow());
-            }
-        } finally {
-            it.close();
-        }
-
-        unfetchedObjects = elementsList.size();
-    }
-
-    /**
-     * Will resolve all unread objects.
-     */
-    public void resolveAll() {
-        resolveInterval(0, size());
-    }
-
-    /**
-     * Checks that an object is of the same type as the rest of objects
-     * (DataObject or DataRows depending on the query type).
-     */
-    private void validateListObject(Object object) throws 
IllegalArgumentException {
-
-        // I am not sure if such a check makes sense???
-
-        if (internalQuery.isFetchingDataRows()) {
-            if (!(object instanceof Map)) {
-                throw new IllegalArgumentException("Only Map objects can be 
stored in this list.");
-            }
-        } else {
-            if (!(object instanceof Persistent)) {
-                throw new IllegalArgumentException("Only DataObjects can be 
stored in this list.");
-            }
-        }
-    }
-
-    /**
-     * Resolves a sublist of objects starting at <code>fromIndex</code> up to
-     * but not including <code>toIndex</code>. Internally performs bound
-     * checking and trims indexes accordingly.
-     */
-    protected void resolveInterval(int fromIndex, int toIndex) {
-        if (fromIndex >= toIndex) {
-            return;
-        }
-
-        synchronized (elements) {
-            if (elements.size() == 0) {
-                return;
-            }
-
-            // perform bound checking
-            if (fromIndex < 0) {
-                fromIndex = 0;
-            }
-
-            if (toIndex > elements.size()) {
-                toIndex = elements.size();
-            }
-
-            List<Expression> quals = new ArrayList<Expression>(pageSize);
-            List<Object> ids = new ArrayList<Object>(pageSize);
-            for (int i = fromIndex; i < toIndex; i++) {
-                Object object = elements.get(i);
-                if (helper.unresolvedSuspect(object)) {
-                    quals.add(buildIdQualifier(object));
-                    ids.add(object);
-                }
-            }
-
-            int qualsSize = quals.size();
-            if (qualsSize == 0) {
-                return;
-            }
-
-            // fetch the range of objects in fetchSize chunks
-            boolean fetchesDataRows = internalQuery.isFetchingDataRows();
-            List<Object> objects = new ArrayList<Object>(qualsSize);
-
-            int fetchSize = maxFetchSize > 0 ? maxFetchSize : 
Integer.MAX_VALUE;
-
-            int fetchEnd = Math.min(qualsSize, fetchSize);
-            int fetchBegin = 0;
-            while (fetchBegin < qualsSize) {
-                SelectQuery<Object> query = new 
SelectQuery<Object>(rootEntity, ExpressionFactory.joinExp(
-                        Expression.OR, quals.subList(fetchBegin, fetchEnd)));
-
-                query.setFetchingDataRows(fetchesDataRows);
-
-                if (!query.isFetchingDataRows()) {
-                    query.setPrefetchTree(internalQuery.getPrefetchTree());
-                }
-
-                objects.addAll(dataContext.performQuery(query));
-                fetchBegin = fetchEnd;
-                fetchEnd += Math.min(fetchSize, qualsSize - fetchEnd);
-            }
-
-            // sanity check - database data may have changed
-            checkPageResultConsistency(objects, ids);
-
-            // replace ids in the list with objects
-            Iterator it = objects.iterator();
-            while (it.hasNext()) {
-                helper.updateWithResolvedObjectInRange(it.next(), fromIndex, 
toIndex);
-            }
-
-            unfetchedObjects -= objects.size();
-        }
-    }
-
-    /**
-     * Returns a qualifier expression for an unresolved id object.
-     * 
-     * @since 3.0
-     */
-    Expression buildIdQualifier(Object id) {
-
-        Map<String, ?> map = (Map<String, ?>) id;
-        if (map.isEmpty()) {
-            throw new CayenneRuntimeException("Empty id map");
-        }
-
-        return ExpressionFactory.matchAllDbExp(map, Expression.EQUAL_TO);
-    }
-
-    /**
-     * @since 3.0
-     */
-    void checkPageResultConsistency(List<?> objects, List<?> ids) {
-
-        if (objects.size() < ids.size()) {
-            // find missing ids
-            StringBuilder buffer = new StringBuilder();
-            buffer.append("Some ObjectIds are missing from the database. ");
-            buffer.append("Expected ").append(ids.size()).append(", fetched 
").append(objects.size());
-
-            boolean first = true;
-            for (Object id : ids) {
-                boolean found = false;
-
-                for (Object object : objects) {
-
-                    if (helper.replacesObject(object, id)) {
-                        found = true;
-                        break;
-                    }
-                }
-
-                if (!found) {
-                    if (first) {
-                        first = false;
-                    } else {
-                        buffer.append(", ");
-                    }
-
-                    buffer.append(id.toString());
-                }
-            }
-
-            throw new CayenneRuntimeException(buffer.toString());
-        } else if (objects.size() > ids.size()) {
-            throw new CayenneRuntimeException("Expected " + ids.size() + " 
objects, retrieved " + objects.size());
-        }
-    }
-
-    /**
-     * Returns zero-based index of the virtual "page" for a given array element
-     * index.
-     */
-    public int pageIndex(int elementIndex) {
-        if (elementIndex < 0 || elementIndex > size()) {
-            throw new IndexOutOfBoundsException("Index: " + elementIndex);
-        }
-
-        if (pageSize <= 0 || elementIndex < 0) {
-            return -1;
-        }
-
-        return elementIndex / pageSize;
-    }
-
-    /**
-     * Get the upper bound on the number of records to resolve in one round 
trip
-     * to the database. This setting governs the size/complexity of the where
-     * clause generated to retrieve the next page of records. If the fetch size
-     * is less than the page size, then multiple fetches will be made to 
resolve
-     * a page.
-     */
-    public int getMaxFetchSize() {
-        return maxFetchSize;
-    }
-
-    public void setMaxFetchSize(int fetchSize) {
-        this.maxFetchSize = fetchSize;
-    }
-
-    /**
-     * Returns the dataContext.
-     * 
-     * @return DataContext
-     */
-    public DataContext getDataContext() {
-        return dataContext;
-    }
-
-    /**
-     * Returns the pageSize.
-     * 
-     * @return int
-     */
-    public int getPageSize() {
-        return pageSize;
-    }
-
-    /**
-     * Returns a list iterator for this list. DataObjects are resolved a page
-     * (according to getPageSize()) at a time as necessary - when retrieved 
with
-     * next() or previous().
-     */
-    public ListIterator<E> listIterator() {
-        return new IncrementalListIterator(0);
-    }
-
-    /**
-     * Returns a list iterator of the elements in this list (in proper
-     * sequence), starting at the specified position in this list. The 
specified
-     * index indicates the first element that would be returned by an initial
-     * call to the next method. An initial call to the previous method would
-     * return the element with the specified index minus one. DataObjects are
-     * resolved a page at a time (according to getPageSize()) as necessary -
-     * when retrieved with next() or previous().
-     */
-    public ListIterator<E> listIterator(int index) {
-        if (index < 0 || index > size()) {
-            throw new IndexOutOfBoundsException("Index: " + index);
-        }
-
-        return new IncrementalListIterator(index);
-    }
-
-    /**
-     * Return an iterator for this list. DataObjects are resolved a page
-     * (according to getPageSize()) at a time as necessary - when retrieved 
with
-     * next().
-     */
-    public Iterator<E> iterator() {
-        // by virtue of get(index)'s implementation, resolution of ids into
-        // objects will occur on pageSize boundaries as necessary.
-        return new Iterator<E>() {
-
-            int listIndex = 0;
-
-            public boolean hasNext() {
-                return (listIndex < elements.size());
-            }
-
-            public E next() {
-                if (listIndex >= elements.size())
-                    throw new NoSuchElementException("no more elements");
-
-                return get(listIndex++);
-            }
-
-            public void remove() {
-                throw new UnsupportedOperationException("remove not 
supported.");
-            }
-        };
-    }
-
-    /**
-     * @see java.util.List#add(int, Object)
-     */
-    public void add(int index, Object element) {
-        validateListObject(element);
-
-        synchronized (elements) {
-            elements.add(index, element);
-        }
-    }
-
-    /**
-     * @see java.util.Collection#add(Object)
-     */
-    public boolean add(Object o) {
-        validateListObject(o);
-
-        synchronized (elements) {
-            return elements.add(o);
-        }
-    }
-
-    /**
-     * @see java.util.Collection#addAll(Collection)
-     */
-    public boolean addAll(Collection<? extends E> c) {
-        synchronized (elements) {
-            return elements.addAll(c);
-        }
-    }
-
-    /**
-     * @see java.util.List#addAll(int, Collection)
-     */
-    public boolean addAll(int index, Collection<? extends E> c) {
-        synchronized (elements) {
-            return elements.addAll(index, c);
-        }
-    }
-
-    /**
-     * @see java.util.Collection#clear()
-     */
-    public void clear() {
-        synchronized (elements) {
-            elements.clear();
-        }
-    }
-
-    /**
-     * @see java.util.Collection#contains(Object)
-     */
-    public boolean contains(Object o) {
-        synchronized (elements) {
-            return elements.contains(o);
-        }
-    }
-
-    /**
-     * @see java.util.Collection#containsAll(Collection)
-     */
-    public boolean containsAll(Collection<?> c) {
-        synchronized (elements) {
-            return elements.containsAll(c);
-        }
-    }
-
-    public E get(int index) {
-        synchronized (elements) {
-            Object o = elements.get(index);
-
-            if (helper.unresolvedSuspect(o)) {
-                // read this page
-                int pageStart = pageIndex(index) * pageSize;
-                resolveInterval(pageStart, pageStart + pageSize);
-
-                return (E) elements.get(index);
-            } else {
-                return (E) o;
-            }
-        }
-    }
-
-    /**
-     * @see java.util.List#indexOf(Object)
-     */
-    public int indexOf(Object o) {
-        return helper.indexOfObject(o);
-    }
-
-    /**
-     * @see java.util.Collection#isEmpty()
-     */
-    public boolean isEmpty() {
-        synchronized (elements) {
-            return elements.isEmpty();
-        }
-    }
-
-    public int lastIndexOf(Object o) {
-        return helper.lastIndexOfObject(o);
-    }
-
-    public E remove(int index) {
-        synchronized (elements) {
-            // have to resolve the page to return correct object
-            E object = get(index);
-            elements.remove(index);
-            return object;
-        }
-    }
-
-    public boolean remove(Object o) {
-        synchronized (elements) {
-            return elements.remove(o);
-        }
-    }
-
-    public boolean removeAll(Collection<?> c) {
-        synchronized (elements) {
-            return elements.removeAll(c);
-        }
-    }
-
-    public boolean retainAll(Collection<?> c) {
-        synchronized (elements) {
-            return elements.retainAll(c);
-        }
-    }
-
-    /**
-     * @see java.util.List#set(int, Object)
-     */
-    public E set(int index, Object element) {
-        validateListObject(element);
-
-        synchronized (elements) {
-            return (E) elements.set(index, element);
-        }
-    }
-
-    /**
-     * @see java.util.Collection#size()
-     */
-    public int size() {
-        synchronized (elements) {
-            return elements.size();
-        }
-    }
-
-    public List<E> subList(int fromIndex, int toIndex) {
-        synchronized (elements) {
-            resolveInterval(fromIndex, toIndex);
-            return elements.subList(fromIndex, toIndex);
-        }
-    }
-
-    public Object[] toArray() {
-        resolveAll();
-
-        return elements.toArray();
-    }
-
-    public <T> T[] toArray(T[] a) {
-        resolveAll();
-
-        return (T[]) elements.toArray(a);
-    }
-
-    /**
-     * Returns a total number of objects that are not resolved yet.
-     */
-    public int getUnfetchedObjects() {
-        return unfetchedObjects;
-    }
-
-    abstract class IncrementalListHelper implements Serializable {
-
-        int indexOfObject(Object object) {
-            if (unresolvedSuspect(object)) {
-                return -1;
-            }
-
-            synchronized (elements) {
-                for (int i = 0; i < elements.size(); i++) {
-                    if (objectsAreEqual(object, elements.get(i))) {
-                        return i;
-                    }
-                }
-            }
-            return -1;
-        }
-
-        int lastIndexOfObject(Object object) {
-            if (unresolvedSuspect(object)) {
-                return -1;
-            }
-
-            synchronized (elements) {
-                for (int i = elements.size() - 1; i >= 0; i--) {
-                    if (objectsAreEqual(object, elements.get(i))) {
-                        return i;
-                    }
-                }
-            }
-
-            return -1;
-        }
-
-        void updateWithResolvedObjectInRange(Object object, int from, int to) {
-            boolean found = false;
-
-            synchronized (elements) {
-
-                for (int i = from; i < to; i++) {
-                    if (replacesObject(object, elements.get(i))) {
-                        elements.set(i, object);
-                        found = true;
-                        break;
-                    }
-                }
-            }
-
-            if (!found) {
-                throw new CayenneRuntimeException("Can't find id for " + 
object);
-            }
-        }
-
-        /**
-         * Returns true if an object is not the type of object expected in the
-         * list. This method is not expected to perform thorough checking of 
the
-         * object type. What's important is the guarantee that an unresolved
-         * object representation will always return true for this method, and
-         * resolved will return false. Other types of objects that users may
-         * choose to add to the list will not be analyzed in detail.
-         */
-        abstract boolean unresolvedSuspect(Object object);
-
-        abstract boolean objectsAreEqual(Object object, Object 
objectInTheList);
-
-        abstract boolean replacesObject(Object object, Object objectInTheList);
-    }
-
-    class PersistentListHelper extends IncrementalListHelper {
-
-        @Override
-        boolean unresolvedSuspect(Object object) {
-            if (!(object instanceof Persistent)) {
-                return true;
-            }
-
-            // don't do a full check for object type matching the type of
-            // objects in the
-            // list... what's important is a quick "false" return if the object
-            // is of type
-            // representing unresolved objects.. furthermore, if inheritance is
-            // involved,
-            // we'll need an even more extensive check (see CAY-1142 on
-            // inheritance
-            // issues).
-
-            return false;
-        }
-
-        @Override
-        boolean objectsAreEqual(Object object, Object objectInTheList) {
-
-            if (objectInTheList instanceof Persistent) {
-                // due to object uniquing this should be sufficient
-                return object == objectInTheList;
-            } else {
-                return ((Persistent) 
object).getObjectId().getIdSnapshot().equals(objectInTheList);
-            }
-        }
-
-        @Override
-        boolean replacesObject(Object object, Object objectInTheList) {
-            if (objectInTheList instanceof Persistent) {
-                return false;
-            }
-
-            Persistent dataObject = (Persistent) object;
-            return 
dataObject.getObjectId().getIdSnapshot().equals(objectInTheList);
-        }
-    }
-
-    class DataRowListHelper extends IncrementalListHelper {
-
-        @Override
-        boolean unresolvedSuspect(Object object) {
-            if (!(object instanceof Map)) {
-                return true;
-            }
-
-            return false;
-        }
-
-        @Override
-        boolean objectsAreEqual(Object object, Object objectInTheList) {
-            if (object == null && objectInTheList == null) {
-                return true;
-            }
-
-            if (object != null && objectInTheList != null) {
-
-                Map<?, ?> id = (Map<?, ?>) objectInTheList;
-                Map<?, ?> map = (Map<?, ?>) object;
-
-                if (id.size() != map.size()) {
-                    return false;
-                }
-
-                // id must be a subset of this map
-                for (Map.Entry<?, ?> entry : id.entrySet()) {
-                    Object key = entry.getKey();
-                    Object value = entry.getValue();
-                    if (!Util.nullSafeEquals(value, map.get(key))) {
-                        return false;
-                    }
-                }
-
-                return true;
-            }
-
-            return false;
-        }
-
-        @Override
-        boolean replacesObject(Object object, Object objectInTheList) {
-
-            Map<?, ?> id = (Map<?, ?>) objectInTheList;
-            if (id.size() > idWidth) {
-                return false;
-            }
-
-            // id must be a subset of this map
-            Map<?, ?> map = (Map<?, ?>) object;
-            for (Map.Entry<?, ?> entry : id.entrySet()) {
-                Object key = entry.getKey();
-                Object value = entry.getValue();
-                if (!Util.nullSafeEquals(value, map.get(key))) {
-                    return false;
-                }
-            }
-
-            return true;
-        }
-    }
-
-    class IncrementalListIterator implements ListIterator<E> {
-
-        // by virtue of get(index)'s implementation, resolution of ids into
-        // objects will occur on pageSize boundaries as necessary.
-
-        int listIndex;
-
-        public IncrementalListIterator(int startIndex) {
-            this.listIndex = startIndex;
-        }
-
-        public void add(Object o) {
-            throw new UnsupportedOperationException("add operation not 
supported");
-        }
-
-        public boolean hasNext() {
-            return (listIndex < elements.size());
-        }
-
-        public boolean hasPrevious() {
-            return (listIndex > 0);
-        }
-
-        public E next() {
-            if (listIndex >= elements.size())
-                throw new NoSuchElementException("at the end of the list");
-
-            return get(listIndex++);
-        }
-
-        public int nextIndex() {
-            return listIndex;
-        }
-
-        public E previous() {
-            if (listIndex < 1)
-                throw new NoSuchElementException("at the beginning of the 
list");
-
-            return get(--listIndex);
-        }
-
-        public int previousIndex() {
-            return (listIndex - 1);
-        }
+       protected int pageSize;
+       protected List elements;
+       protected DataContext dataContext;
+       protected ObjEntity rootEntity;
+       protected SelectQuery<?> internalQuery;
+       protected int unfetchedObjects;
+
+       /**
+        * Stores a hint allowing to distinguish data rows from unfetched ids 
when
+        * the query fetches data rows.
+        */
+       protected int idWidth;
+
+       private IncrementalListHelper helper;
+
+       /**
+        * Defines the upper limit on the size of fetches. This is needed to 
avoid
+        * where clause size limitations.
+        */
+       protected int maxFetchSize;
+
+       // Don't confuse this with the JDBC ResultSet fetch size setting - this
+       // controls
+       // the where clause generation that is necessary to fetch specific 
records a
+       // page
+       // at a time. Some JDBC Drivers/Databases may have limits on statement
+       // length
+       // or complexity of the where clause - e.g., PostgreSQL having a default
+       // limit of
+       // 10,000 nested expressions.
+
+       /**
+        * Creates a new IncrementalFaultList using a given DataContext and 
query.
+        * 
+        * @param dataContext
+        *            DataContext used by IncrementalFaultList to fill itself 
with
+        *            objects.
+        * @param query
+        *            Main query used to retrieve data. Must have "pageSize"
+        *            property set to a value greater than zero.
+        * @param maxFetchSize
+        *            maximum number of fetches in one query
+        */
+       public IncrementalFaultList(DataContext dataContext, Query query, int 
maxFetchSize) {
+               QueryMetadata metadata = 
query.getMetaData(dataContext.getEntityResolver());
+               if (metadata.getPageSize() <= 0) {
+                       throw new CayenneRuntimeException("Not a paginated 
query; page size: " + metadata.getPageSize());
+               }
+
+               this.dataContext = dataContext;
+               this.pageSize = metadata.getPageSize();
+               this.rootEntity = metadata.getObjEntity();
+
+               if (rootEntity == null) {
+                       throw new CayenneRuntimeException("Pagination is not 
supported for queries not rooted in an ObjEntity");
+               }
+
+               // create an internal query, it is a partial replica of
+               // the original query and will serve as a value holder for
+               // various parameters
+               this.internalQuery = new SelectQuery<Object>(rootEntity);
+               
this.internalQuery.setFetchingDataRows(metadata.isFetchingDataRows());
+               this.internalQuery.setPrefetchTree(metadata.getPrefetchTree());
+
+               this.helper = createHelper(metadata);
+               this.idWidth = metadata.getDbEntity().getPrimaryKeys().size();
+
+               List<Object> elementsUnsynced = new ArrayList<Object>();
+               fillIn(query, elementsUnsynced);
+               this.elements = Collections.synchronizedList(elementsUnsynced);
+
+               this.maxFetchSize = maxFetchSize;
+       }
+
+       /**
+        * @since 3.0
+        */
+       IncrementalListHelper createHelper(QueryMetadata metadata) {
+               if (metadata.isFetchingDataRows()) {
+                       return new DataRowListHelper();
+               } else {
+                       return new PersistentListHelper();
+               }
+       }
+
+       /**
+        * @since 1.2
+        */
+       SelectQuery getInternalQuery() {
+               return internalQuery;
+       }
+
+       /**
+        * Performs initialization of the list of objects. Only the first page 
is
+        * fully resolved. For the rest of the list, only ObjectIds are read.
+        * 
+        * @since 3.0
+        */
+       protected void fillIn(final Query query, List elementsList) {
+
+               elementsList.clear();
+
+               try (ResultIterator it = 
dataContext.performIteratedQuery(query);) {
+
+                       while (it.hasNextRow()) {
+                               elementsList.add(it.nextRow());
+                       }
+               }
+
+               unfetchedObjects = elementsList.size();
+       }
+
+       /**
+        * Will resolve all unread objects.
+        */
+       public void resolveAll() {
+               resolveInterval(0, size());
+       }
+
+       /**
+        * Checks that an object is of the same type as the rest of objects
+        * (DataObject or DataRows depending on the query type).
+        */
+       private void validateListObject(Object object) throws 
IllegalArgumentException {
+
+               // I am not sure if such a check makes sense???
+
+               if (internalQuery.isFetchingDataRows()) {
+                       if (!(object instanceof Map)) {
+                               throw new IllegalArgumentException("Only Map 
objects can be stored in this list.");
+                       }
+               } else {
+                       if (!(object instanceof Persistent)) {
+                               throw new IllegalArgumentException("Only 
DataObjects can be stored in this list.");
+                       }
+               }
+       }
+
+       /**
+        * Resolves a sublist of objects starting at <code>fromIndex</code> up 
to
+        * but not including <code>toIndex</code>. Internally performs bound
+        * checking and trims indexes accordingly.
+        */
+       protected void resolveInterval(int fromIndex, int toIndex) {
+               if (fromIndex >= toIndex) {
+                       return;
+               }
+
+               synchronized (elements) {
+                       if (elements.size() == 0) {
+                               return;
+                       }
+
+                       // perform bound checking
+                       if (fromIndex < 0) {
+                               fromIndex = 0;
+                       }
+
+                       if (toIndex > elements.size()) {
+                               toIndex = elements.size();
+                       }
+
+                       List<Expression> quals = new 
ArrayList<Expression>(pageSize);
+                       List<Object> ids = new ArrayList<Object>(pageSize);
+                       for (int i = fromIndex; i < toIndex; i++) {
+                               Object object = elements.get(i);
+                               if (helper.unresolvedSuspect(object)) {
+                                       quals.add(buildIdQualifier(object));
+                                       ids.add(object);
+                               }
+                       }
+
+                       int qualsSize = quals.size();
+                       if (qualsSize == 0) {
+                               return;
+                       }
+
+                       // fetch the range of objects in fetchSize chunks
+                       boolean fetchesDataRows = 
internalQuery.isFetchingDataRows();
+                       List<Object> objects = new ArrayList<Object>(qualsSize);
+
+                       int fetchSize = maxFetchSize > 0 ? maxFetchSize : 
Integer.MAX_VALUE;
+
+                       int fetchEnd = Math.min(qualsSize, fetchSize);
+                       int fetchBegin = 0;
+                       while (fetchBegin < qualsSize) {
+                               SelectQuery<Object> query = new 
SelectQuery<Object>(rootEntity, ExpressionFactory.joinExp(
+                                               Expression.OR, 
quals.subList(fetchBegin, fetchEnd)));
+
+                               query.setFetchingDataRows(fetchesDataRows);
+
+                               if (!query.isFetchingDataRows()) {
+                                       
query.setPrefetchTree(internalQuery.getPrefetchTree());
+                               }
+
+                               objects.addAll(dataContext.performQuery(query));
+                               fetchBegin = fetchEnd;
+                               fetchEnd += Math.min(fetchSize, qualsSize - 
fetchEnd);
+                       }
+
+                       // sanity check - database data may have changed
+                       checkPageResultConsistency(objects, ids);
+
+                       // replace ids in the list with objects
+                       Iterator it = objects.iterator();
+                       while (it.hasNext()) {
+                               
helper.updateWithResolvedObjectInRange(it.next(), fromIndex, toIndex);
+                       }
+
+                       unfetchedObjects -= objects.size();
+               }
+       }
+
+       /**
+        * Returns a qualifier expression for an unresolved id object.
+        * 
+        * @since 3.0
+        */
+       Expression buildIdQualifier(Object id) {
+
+               Map<String, ?> map = (Map<String, ?>) id;
+               if (map.isEmpty()) {
+                       throw new CayenneRuntimeException("Empty id map");
+               }
+
+               return ExpressionFactory.matchAllDbExp(map, 
Expression.EQUAL_TO);
+       }
+
+       /**
+        * @since 3.0
+        */
+       void checkPageResultConsistency(List<?> objects, List<?> ids) {
+
+               if (objects.size() < ids.size()) {
+                       // find missing ids
+                       StringBuilder buffer = new StringBuilder();
+                       buffer.append("Some ObjectIds are missing from the 
database. ");
+                       buffer.append("Expected ").append(ids.size()).append(", 
fetched ").append(objects.size());
+
+                       boolean first = true;
+                       for (Object id : ids) {
+                               boolean found = false;
+
+                               for (Object object : objects) {
+
+                                       if (helper.replacesObject(object, id)) {
+                                               found = true;
+                                               break;
+                                       }
+                               }
+
+                               if (!found) {
+                                       if (first) {
+                                               first = false;
+                                       } else {
+                                               buffer.append(", ");
+                                       }
+
+                                       buffer.append(id.toString());
+                               }
+                       }
+
+                       throw new CayenneRuntimeException(buffer.toString());
+               } else if (objects.size() > ids.size()) {
+                       throw new CayenneRuntimeException("Expected " + 
ids.size() + " objects, retrieved " + objects.size());
+               }
+       }
+
+       /**
+        * Returns zero-based index of the virtual "page" for a given array 
element
+        * index.
+        */
+       public int pageIndex(int elementIndex) {
+               if (elementIndex < 0 || elementIndex > size()) {
+                       throw new IndexOutOfBoundsException("Index: " + 
elementIndex);
+               }
+
+               if (pageSize <= 0 || elementIndex < 0) {
+                       return -1;
+               }
+
+               return elementIndex / pageSize;
+       }
+
+       /**
+        * Get the upper bound on the number of records to resolve in one round 
trip
+        * to the database. This setting governs the size/complexity of the 
where
+        * clause generated to retrieve the next page of records. If the fetch 
size
+        * is less than the page size, then multiple fetches will be made to 
resolve
+        * a page.
+        */
+       public int getMaxFetchSize() {
+               return maxFetchSize;
+       }
+
+       public void setMaxFetchSize(int fetchSize) {
+               this.maxFetchSize = fetchSize;
+       }
+
+       /**
+        * Returns the dataContext.
+        * 
+        * @return DataContext
+        */
+       public DataContext getDataContext() {
+               return dataContext;
+       }
+
+       /**
+        * Returns the pageSize.
+        * 
+        * @return int
+        */
+       public int getPageSize() {
+               return pageSize;
+       }
+
+       /**
+        * Returns a list iterator for this list. DataObjects are resolved a 
page
+        * (according to getPageSize()) at a time as necessary - when retrieved 
with
+        * next() or previous().
+        */
+       public ListIterator<E> listIterator() {
+               return new IncrementalListIterator(0);
+       }
+
+       /**
+        * Returns a list iterator of the elements in this list (in proper
+        * sequence), starting at the specified position in this list. The 
specified
+        * index indicates the first element that would be returned by an 
initial
+        * call to the next method. An initial call to the previous method would
+        * return the element with the specified index minus one. DataObjects 
are
+        * resolved a page at a time (according to getPageSize()) as necessary -
+        * when retrieved with next() or previous().
+        */
+       public ListIterator<E> listIterator(int index) {
+               if (index < 0 || index > size()) {
+                       throw new IndexOutOfBoundsException("Index: " + index);
+               }
+
+               return new IncrementalListIterator(index);
+       }
+
+       /**
+        * Return an iterator for this list. DataObjects are resolved a page
+        * (according to getPageSize()) at a time as necessary - when retrieved 
with
+        * next().
+        */
+       public Iterator<E> iterator() {
+               // by virtue of get(index)'s implementation, resolution of ids 
into
+               // objects will occur on pageSize boundaries as necessary.
+               return new Iterator<E>() {
+
+                       int listIndex = 0;
+
+                       public boolean hasNext() {
+                               return (listIndex < elements.size());
+                       }
+
+                       public E next() {
+                               if (listIndex >= elements.size())
+                                       throw new NoSuchElementException("no 
more elements");
+
+                               return get(listIndex++);
+                       }
+
+                       public void remove() {
+                               throw new UnsupportedOperationException("remove 
not supported.");
+                       }
+               };
+       }
+
+       /**
+        * @see java.util.List#add(int, Object)
+        */
+       public void add(int index, Object element) {
+               validateListObject(element);
+
+               synchronized (elements) {
+                       elements.add(index, element);
+               }
+       }
+
+       /**
+        * @see java.util.Collection#add(Object)
+        */
+       public boolean add(Object o) {
+               validateListObject(o);
+
+               synchronized (elements) {
+                       return elements.add(o);
+               }
+       }
+
+       /**
+        * @see java.util.Collection#addAll(Collection)
+        */
+       public boolean addAll(Collection<? extends E> c) {
+               synchronized (elements) {
+                       return elements.addAll(c);
+               }
+       }
+
+       /**
+        * @see java.util.List#addAll(int, Collection)
+        */
+       public boolean addAll(int index, Collection<? extends E> c) {
+               synchronized (elements) {
+                       return elements.addAll(index, c);
+               }
+       }
+
+       /**
+        * @see java.util.Collection#clear()
+        */
+       public void clear() {
+               synchronized (elements) {
+                       elements.clear();
+               }
+       }
+
+       /**
+        * @see java.util.Collection#contains(Object)
+        */
+       public boolean contains(Object o) {
+               synchronized (elements) {
+                       return elements.contains(o);
+               }
+       }
+
+       /**
+        * @see java.util.Collection#containsAll(Collection)
+        */
+       public boolean containsAll(Collection<?> c) {
+               synchronized (elements) {
+                       return elements.containsAll(c);
+               }
+       }
+
+       public E get(int index) {
+               synchronized (elements) {
+                       Object o = elements.get(index);
+
+                       if (helper.unresolvedSuspect(o)) {
+                               // read this page
+                               int pageStart = pageIndex(index) * pageSize;
+                               resolveInterval(pageStart, pageStart + 
pageSize);
+
+                               return (E) elements.get(index);
+                       } else {
+                               return (E) o;
+                       }
+               }
+       }
+
+       /**
+        * @see java.util.List#indexOf(Object)
+        */
+       public int indexOf(Object o) {
+               return helper.indexOfObject(o);
+       }
+
+       /**
+        * @see java.util.Collection#isEmpty()
+        */
+       public boolean isEmpty() {
+               synchronized (elements) {
+                       return elements.isEmpty();
+               }
+       }
+
+       public int lastIndexOf(Object o) {
+               return helper.lastIndexOfObject(o);
+       }
+
+       public E remove(int index) {
+               synchronized (elements) {
+                       // have to resolve the page to return correct object
+                       E object = get(index);
+                       elements.remove(index);
+                       return object;
+               }
+       }
+
+       public boolean remove(Object o) {
+               synchronized (elements) {
+                       return elements.remove(o);
+               }
+       }
+
+       public boolean removeAll(Collection<?> c) {
+               synchronized (elements) {
+                       return elements.removeAll(c);
+               }
+       }
+
+       public boolean retainAll(Collection<?> c) {
+               synchronized (elements) {
+                       return elements.retainAll(c);
+               }
+       }
+
+       /**
+        * @see java.util.List#set(int, Object)
+        */
+       public E set(int index, Object element) {
+               validateListObject(element);
+
+               synchronized (elements) {
+                       return (E) elements.set(index, element);
+               }
+       }
+
+       /**
+        * @see java.util.Collection#size()
+        */
+       public int size() {
+               synchronized (elements) {
+                       return elements.size();
+               }
+       }
+
+       public List<E> subList(int fromIndex, int toIndex) {
+               synchronized (elements) {
+                       resolveInterval(fromIndex, toIndex);
+                       return elements.subList(fromIndex, toIndex);
+               }
+       }
+
+       public Object[] toArray() {
+               resolveAll();
+
+               return elements.toArray();
+       }
+
+       public <T> T[] toArray(T[] a) {
+               resolveAll();
+
+               return (T[]) elements.toArray(a);
+       }
+
+       /**
+        * Returns a total number of objects that are not resolved yet.
+        */
+       public int getUnfetchedObjects() {
+               return unfetchedObjects;
+       }
+
+       abstract class IncrementalListHelper implements Serializable {
+
+               int indexOfObject(Object object) {
+                       if (unresolvedSuspect(object)) {
+                               return -1;
+                       }
+
+                       synchronized (elements) {
+                               for (int i = 0; i < elements.size(); i++) {
+                                       if (objectsAreEqual(object, 
elements.get(i))) {
+                                               return i;
+                                       }
+                               }
+                       }
+                       return -1;
+               }
+
+               int lastIndexOfObject(Object object) {
+                       if (unresolvedSuspect(object)) {
+                               return -1;
+                       }
+
+                       synchronized (elements) {
+                               for (int i = elements.size() - 1; i >= 0; i--) {
+                                       if (objectsAreEqual(object, 
elements.get(i))) {
+                                               return i;
+                                       }
+                               }
+                       }
+
+                       return -1;
+               }
+
+               void updateWithResolvedObjectInRange(Object object, int from, 
int to) {
+                       boolean found = false;
+
+                       synchronized (elements) {
+
+                               for (int i = from; i < to; i++) {
+                                       if (replacesObject(object, 
elements.get(i))) {
+                                               elements.set(i, object);
+                                               found = true;
+                                               break;
+                                       }
+                               }
+                       }
+
+                       if (!found) {
+                               throw new CayenneRuntimeException("Can't find 
id for " + object);
+                       }
+               }
+
+               /**
+                * Returns true if an object is not the type of object expected 
in the
+                * list. This method is not expected to perform thorough 
checking of the
+                * object type. What's important is the guarantee that an 
unresolved
+                * object representation will always return true for this 
method, and
+                * resolved will return false. Other types of objects that 
users may
+                * choose to add to the list will not be analyzed in detail.
+                */
+               abstract boolean unresolvedSuspect(Object object);
+
+               abstract boolean objectsAreEqual(Object object, Object 
objectInTheList);
+
+               abstract boolean replacesObject(Object object, Object 
objectInTheList);
+       }
+
+       class PersistentListHelper extends IncrementalListHelper {
+
+               @Override
+               boolean unresolvedSuspect(Object object) {
+                       if (!(object instanceof Persistent)) {
+                               return true;
+                       }
+
+                       // don't do a full check for object type matching the 
type of
+                       // objects in the
+                       // list... what's important is a quick "false" return 
if the object
+                       // is of type
+                       // representing unresolved objects.. furthermore, if 
inheritance is
+                       // involved,
+                       // we'll need an even more extensive check (see 
CAY-1142 on
+                       // inheritance
+                       // issues).
+
+                       return false;
+               }
+
+               @Override
+               boolean objectsAreEqual(Object object, Object objectInTheList) {
+
+                       if (objectInTheList instanceof Persistent) {
+                               // due to object uniquing this should be 
sufficient
+                               return object == objectInTheList;
+                       } else {
+                               return ((Persistent) 
object).getObjectId().getIdSnapshot().equals(objectInTheList);
+                       }
+               }
+
+               @Override
+               boolean replacesObject(Object object, Object objectInTheList) {
+                       if (objectInTheList instanceof Persistent) {
+                               return false;
+                       }
+
+                       Persistent dataObject = (Persistent) object;
+                       return 
dataObject.getObjectId().getIdSnapshot().equals(objectInTheList);
+               }
+       }
+
+       class DataRowListHelper extends IncrementalListHelper {
+
+               @Override
+               boolean unresolvedSuspect(Object object) {
+                       if (!(object instanceof Map)) {
+                               return true;
+                       }
+
+                       return false;
+               }
+
+               @Override
+               boolean objectsAreEqual(Object object, Object objectInTheList) {
+                       if (object == null && objectInTheList == null) {
+                               return true;
+                       }
+
+                       if (object != null && objectInTheList != null) {
+
+                               Map<?, ?> id = (Map<?, ?>) objectInTheList;
+                               Map<?, ?> map = (Map<?, ?>) object;
+
+                               if (id.size() != map.size()) {
+                                       return false;
+                               }
+
+                               // id must be a subset of this map
+                               for (Map.Entry<?, ?> entry : id.entrySet()) {
+                                       Object key = entry.getKey();
+                                       Object value = entry.getValue();
+                                       if (!Util.nullSafeEquals(value, 
map.get(key))) {
+                                               return false;
+                                       }
+                               }
+
+                               return true;
+                       }
+
+                       return false;
+               }
+
+               @Override
+               boolean replacesObject(Object object, Object objectInTheList) {
+
+                       Map<?, ?> id = (Map<?, ?>) objectInTheList;
+                       if (id.size() > idWidth) {
+                               return false;
+                       }
+
+                       // id must be a subset of this map
+                       Map<?, ?> map = (Map<?, ?>) object;
+                       for (Map.Entry<?, ?> entry : id.entrySet()) {
+                               Object key = entry.getKey();
+                               Object value = entry.getValue();
+                               if (!Util.nullSafeEquals(value, map.get(key))) {
+                                       return false;
+                               }
+                       }
+
+                       return true;
+               }
+       }
+
+       class IncrementalListIterator implements ListIterator<E> {
+
+               // by virtue of get(index)'s implementation, resolution of ids 
into
+               // objects will occur on pageSize boundaries as necessary.
+
+               int listIndex;
+
+               public IncrementalListIterator(int startIndex) {
+                       this.listIndex = startIndex;
+               }
+
+               public void add(Object o) {
+                       throw new UnsupportedOperationException("add operation 
not supported");
+               }
+
+               public boolean hasNext() {
+                       return (listIndex < elements.size());
+               }
+
+               public boolean hasPrevious() {
+                       return (listIndex > 0);
+               }
+
+               public E next() {
+                       if (listIndex >= elements.size())
+                               throw new NoSuchElementException("at the end of 
the list");
+
+                       return get(listIndex++);
+               }
+
+               public int nextIndex() {
+                       return listIndex;
+               }
+
+               public E previous() {
+                       if (listIndex < 1)
+                               throw new NoSuchElementException("at the 
beginning of the list");
+
+                       return get(--listIndex);
+               }
+
+               public int previousIndex() {
+                       return (listIndex - 1);
+               }
 
-        public void remove() {
-            throw new UnsupportedOperationException("remove operation not 
supported");
-        }
-
-        public void set(Object o) {
-            IncrementalFaultList.this.set(listIndex - 1, o);
-        }
-    }
+               public void remove() {
+                       throw new UnsupportedOperationException("remove 
operation not supported");
+               }
+
+               public void set(Object o) {
+                       IncrementalFaultList.this.set(listIndex - 1, o);
+               }
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/CreateIfNoSchemaStrategy.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/CreateIfNoSchemaStrategy.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/CreateIfNoSchemaStrategy.java
index 15f389f..3a7675f 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/CreateIfNoSchemaStrategy.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/CreateIfNoSchemaStrategy.java
@@ -39,84 +39,68 @@ import org.apache.commons.logging.LogFactory;
  */
 public class CreateIfNoSchemaStrategy extends BaseSchemaUpdateStrategy {
 
-    final static Log logger = 
LogFactory.getLog(CreateIfNoSchemaStrategy.class);
+       final static Log logger = 
LogFactory.getLog(CreateIfNoSchemaStrategy.class);
 
-    @Override
-    protected void processSchemaUpdate(DataNode dataNode) throws SQLException {
+       @Override
+       protected void processSchemaUpdate(DataNode dataNode) throws 
SQLException {
 
-        Map<String, Boolean> nameTables = getNameTablesInDB(dataNode);
-        Collection<DbEntity> entities = 
dataNode.getEntityResolver().getDbEntities();
-        boolean generate = true;
-        Iterator<DbEntity> it = entities.iterator();
-        while (it.hasNext()) {
-            if (nameTables.get(it.next().getName()) != null) {
-                generate = false;
-                break;
-            }
-        }
+               Map<String, Boolean> nameTables = getNameTablesInDB(dataNode);
+               Collection<DbEntity> entities = 
dataNode.getEntityResolver().getDbEntities();
+               boolean generate = true;
+               Iterator<DbEntity> it = entities.iterator();
+               while (it.hasNext()) {
+                       if (nameTables.get(it.next().getName()) != null) {
+                               generate = false;
+                               break;
+                       }
+               }
 
-        if (generate) {
-            logger.info("No schema detected, will create mapped tables");
-            generate(dataNode);
-        }
-        else {
-            logger.info("Full or partial schema detected, skipping tables 
creation");
-        }
-    }
+               if (generate) {
+                       logger.info("No schema detected, will create mapped 
tables");
+                       generate(dataNode);
+               } else {
+                       logger.info("Full or partial schema detected, skipping 
tables creation");
+               }
+       }
 
-    private void generate(DataNode dataNode) {
-        Collection<DataMap> map = dataNode.getDataMaps();
-        Iterator<DataMap> iterator = map.iterator();
-        while (iterator.hasNext()) {
-            DbGenerator gen = new DbGenerator(dataNode.getAdapter(), 
iterator.next(), 
-                    dataNode.getJdbcEventLogger());
-            gen.setShouldCreateTables(true);
-            gen.setShouldDropTables(false);
-            gen.setShouldCreateFKConstraints(true);
-            gen.setShouldCreatePKSupport(true);
-            gen.setShouldDropPKSupport(false);
-            try {
-                gen.runGenerator(dataNode.getDataSource());
-            }
-            catch (Exception e) {
-                throw new CayenneRuntimeException(e);
-            }
-        }
-    }
+       private void generate(DataNode dataNode) {
+               Collection<DataMap> map = dataNode.getDataMaps();
+               Iterator<DataMap> iterator = map.iterator();
+               while (iterator.hasNext()) {
+                       DbGenerator gen = new 
DbGenerator(dataNode.getAdapter(), iterator.next(), 
dataNode.getJdbcEventLogger());
+                       gen.setShouldCreateTables(true);
+                       gen.setShouldDropTables(false);
+                       gen.setShouldCreateFKConstraints(true);
+                       gen.setShouldCreatePKSupport(true);
+                       gen.setShouldDropPKSupport(false);
+                       try {
+                               gen.runGenerator(dataNode.getDataSource());
+                       } catch (Exception e) {
+                               throw new CayenneRuntimeException(e);
+                       }
+               }
+       }
 
-    /**
-     * Returns all the table names in database.
-     * 
-     * @throws SQLException
-     */
-    protected Map<String, Boolean> getNameTablesInDB(DataNode dataNode)
-            throws SQLException {
-        String tableLabel = dataNode.getAdapter().tableTypeForTable();
-        Connection con = null;
-        Map<String, Boolean> nameTables = new HashMap<String, Boolean>();
-        con = dataNode.getDataSource().getConnection();
+       /**
+        * Returns all the table names in database.
+        * 
+        * @throws SQLException
+        */
+       protected Map<String, Boolean> getNameTablesInDB(DataNode dataNode) 
throws SQLException {
+               String tableLabel = dataNode.getAdapter().tableTypeForTable();
+               Map<String, Boolean> nameTables = new HashMap<String, 
Boolean>();
 
-        try {
-            ResultSet rs = con.getMetaData().getTables(null, null, "%", new 
String[] {
-                tableLabel
-            });
+               try (Connection con = 
dataNode.getDataSource().getConnection();) {
 
-            try {
+                       try (ResultSet rs = con.getMetaData().getTables(null, 
null, "%", new String[] { tableLabel });) {
 
-                while (rs.next()) {
-                    String name = rs.getString("TABLE_NAME");
-                    nameTables.put(name, false);
-                }
-            }
-            finally {
-                rs.close();
-            }
+                               while (rs.next()) {
+                                       String name = 
rs.getString("TABLE_NAME");
+                                       nameTables.put(name, false);
+                               }
+                       }
+               }
 
-        }
-        finally {
-
-            con.close();
-        }
-        return nameTables;
-    }
+               return nameTables;
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/SchemaAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/SchemaAnalyzer.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/SchemaAnalyzer.java
index 86e9110..86f29c8 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/SchemaAnalyzer.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/SchemaAnalyzer.java
@@ -36,189 +36,171 @@ import org.apache.cayenne.map.DbEntity;
  */
 class SchemaAnalyzer {
 
-    private Map<String, String> mapTableInDB;
-    private List<String> tableNoInDB;
-    private Map<String, Collection<String>> nameSchemaMap;
-    private Map<String, Collection<String>> schemaNameMap;
-    private Map<Map<String, String>, Collection<DbAttribute>> entityTables;
-    private String errorMessage;
-
-    SchemaAnalyzer() {
-        errorMessage = null;
-        mapTableInDB = new HashMap<String, String>();
-        tableNoInDB = new ArrayList<String>();
-        nameSchemaMap = new HashMap<String, Collection<String>>();
-        schemaNameMap = new HashMap<String, Collection<String>>();
-        entityTables = new HashMap<Map<String, String>, 
Collection<DbAttribute>>();
-    }
-
-    public List<String> getTableNoInDB() {
-        return tableNoInDB;
-    }
-
-    public void compareColumns(DatabaseMetaData md) throws SQLException {
-
-        for (Map.Entry<String, String> map : mapTableInDB.entrySet()) {
-
-            String schema = map.getValue();
-            String name = map.getKey();
-
-            ResultSet rs = md.getColumns(null, schema, name, null);
-            try {
-                Map<String, String> schemaName = new HashMap<String, String>();
-                schemaName.put(name, schema);
-                Collection<DbAttribute> atribute = 
entityTables.get(schemaName);
-                if (atribute == null) {
-                    schemaName.remove(name);
-                    schemaName.put(name, null);
-                    atribute = entityTables.get(schemaName);
-                }
-                if (atribute != null && rs.getFetchSize() != 0) {
-                    int countColumn = 0;
-                    int isInEntity = 0;
-                    while (rs.next()) {
-                        countColumn++;
-                        String columnName = rs.getString("COLUMN_NAME");
-                        for (DbAttribute attr : atribute) {
-
-                            if (attr.getName().equalsIgnoreCase(columnName)) {
-                                isInEntity++;
-                                continue;
-                            }
-                        }
-                    }
-
-                    if (countColumn != atribute.size()) {
-                        errorMessage = "different number of columns in table " 
+ name;
-                        continue;
-                    }
-                    if (countColumn != isInEntity && errorMessage == null) {
-                        errorMessage = "no columns in table "
-                                + name
-                                + " or does not match the type of column";
-                        continue;
-                    }
-
-                }
-            }
-            finally {
-                rs.close();
-            }
-        }
-
-    }
-
-    public boolean compareTables(DatabaseMetaData md, Collection<DbEntity> 
entities) {
-
-        boolean isIncluded = true;
-        for (DbEntity ent : entities) {
-
-            String name = ent.getName();
-            String schema = ent.getSchema();
-            Collection<DbAttribute> atributes = ent.getAttributes();
-
-            if (schema != null) {
-                if (schemaNameMap.get(schema) != null) {
-                    Collection<String> names = schemaNameMap.get(schema);
-                    if (names.contains(name)) {
-                         mapTableInDB.put(name, schema);
-                    } else {
-                        tableNoInDB.add(name);
-                    }
-                } else {
-                    isIncluded = false;
-                    errorMessage = "no schema " + schema + " in db";
-                    break;
-                }
-            }
-            else {
-                if (nameSchemaMap.get(name) != null
-                        || !ent.getDataMap().isQuotingSQLIdentifiers()
-                        && (nameSchemaMap.get(name.toLowerCase()) != null || 
nameSchemaMap
-                                .get(name.toUpperCase()) != null)) {
-                    Collection<String> sc = nameSchemaMap.get(name);
-                    if (sc == null) {
-                        if (nameSchemaMap.get(name.toLowerCase()) != null) {
-                            sc = nameSchemaMap.get(name.toLowerCase());
-                        }
-                        else {
-                            sc = nameSchemaMap.get(name.toUpperCase());
-                        }
-                    }
-
-                    if (sc.size() == 1) {
-                        mapTableInDB.put(name, sc.iterator().next());
-                    }
-                    else {
-                        errorMessage = " enter the schema. Table found in the 
schemas: ";
-                        Iterator<String> it = sc.iterator();
-                        String names = "";
-                        while (it.hasNext()) {
-                            names += it.next() + ", ";
-                        }
-                        errorMessage = errorMessage + names;
-                    }
-                }
-                else {
-                    tableNoInDB.add(name);
-                }
-            }
-            Map<String, String> schemaName = new HashMap<String, String>();
-            schemaName.put(name, schema);
-            entityTables.put(schemaName, atributes);
-        }
-        return isIncluded;
-    }
-
-    public void analyzeSchemas(List<String> schemas, DatabaseMetaData md)
-            throws SQLException {
-
-        if (schemas.size() == 0) {
-            schemas.add("%");
-        }
-        for (String schema : schemas) {
-            ResultSet tables = md.getTables(null, schema, null, null);
-
-            Collection<String> tableInSchema = new ArrayList<String>();
-            try {
-                while (tables.next()) {
-                    String name = tables.getString("TABLE_NAME");
-                    if (name == null || name.startsWith("BIN$")) {
-                        continue;
-                    }
-
-                    tableInSchema.add(name);
-                    if (nameSchemaMap.get(name) != null) {
-                        Collection<String> sc = nameSchemaMap.get(name);
-                        Iterator<String> iSc = sc.iterator();
-                        boolean inSchema = false;
-                        while (iSc.hasNext()) {
-                            if (iSc.next().equals(schema)) {
-                                inSchema = true;
-                            }
-                        }
-                        if (!inSchema) {
-                            sc.add(schema);
-                            nameSchemaMap.remove(name);
-                            nameSchemaMap.put(name, sc);
-                        }
-
-                    }
-                    else {
-                        Collection<String> sc = new ArrayList<String>();
-                        sc.add(schema);
-                        nameSchemaMap.put(name, sc);
-                    }
-                }
-                schemaNameMap.put(schema, tableInSchema);
-            }
-            finally {
-                tables.close();
-            }
-        }
-    }
-
-    public String getErrorMessage() {
-        return errorMessage;
-    }
+       private Map<String, String> mapTableInDB;
+       private List<String> tableNoInDB;
+       private Map<String, Collection<String>> nameSchemaMap;
+       private Map<String, Collection<String>> schemaNameMap;
+       private Map<Map<String, String>, Collection<DbAttribute>> entityTables;
+       private String errorMessage;
+
+       SchemaAnalyzer() {
+               errorMessage = null;
+               mapTableInDB = new HashMap<String, String>();
+               tableNoInDB = new ArrayList<String>();
+               nameSchemaMap = new HashMap<String, Collection<String>>();
+               schemaNameMap = new HashMap<String, Collection<String>>();
+               entityTables = new HashMap<Map<String, String>, 
Collection<DbAttribute>>();
+       }
+
+       public List<String> getTableNoInDB() {
+               return tableNoInDB;
+       }
+
+       public void compareColumns(DatabaseMetaData md) throws SQLException {
+
+               for (Map.Entry<String, String> map : mapTableInDB.entrySet()) {
+
+                       String schema = map.getValue();
+                       String name = map.getKey();
+
+                       try (ResultSet rs = md.getColumns(null, schema, name, 
null);) {
+                               Map<String, String> schemaName = new 
HashMap<String, String>();
+                               schemaName.put(name, schema);
+                               Collection<DbAttribute> atribute = 
entityTables.get(schemaName);
+                               if (atribute == null) {
+                                       schemaName.remove(name);
+                                       schemaName.put(name, null);
+                                       atribute = entityTables.get(schemaName);
+                               }
+                               if (atribute != null && rs.getFetchSize() != 0) 
{
+                                       int countColumn = 0;
+                                       int isInEntity = 0;
+                                       while (rs.next()) {
+                                               countColumn++;
+                                               String columnName = 
rs.getString("COLUMN_NAME");
+                                               for (DbAttribute attr : 
atribute) {
+
+                                                       if 
(attr.getName().equalsIgnoreCase(columnName)) {
+                                                               isInEntity++;
+                                                               continue;
+                                                       }
+                                               }
+                                       }
+
+                                       if (countColumn != atribute.size()) {
+                                               errorMessage = "different 
number of columns in table " + name;
+                                               continue;
+                                       }
+                                       if (countColumn != isInEntity && 
errorMessage == null) {
+                                               errorMessage = "no columns in 
table " + name + " or does not match the type of column";
+                                               continue;
+                                       }
+                               }
+                       }
+               }
+
+       }
+
+       public boolean compareTables(DatabaseMetaData md, Collection<DbEntity> 
entities) {
+
+               boolean isIncluded = true;
+               for (DbEntity ent : entities) {
+
+                       String name = ent.getName();
+                       String schema = ent.getSchema();
+                       Collection<DbAttribute> atributes = ent.getAttributes();
+
+                       if (schema != null) {
+                               if (schemaNameMap.get(schema) != null) {
+                                       Collection<String> names = 
schemaNameMap.get(schema);
+                                       if (names.contains(name)) {
+                                               mapTableInDB.put(name, schema);
+                                       } else {
+                                               tableNoInDB.add(name);
+                                       }
+                               } else {
+                                       isIncluded = false;
+                                       errorMessage = "no schema " + schema + 
" in db";
+                                       break;
+                               }
+                       } else {
+                               if (nameSchemaMap.get(name) != null
+                                               || 
!ent.getDataMap().isQuotingSQLIdentifiers()
+                                               && 
(nameSchemaMap.get(name.toLowerCase()) != null || 
nameSchemaMap.get(name.toUpperCase()) != null)) {
+                                       Collection<String> sc = 
nameSchemaMap.get(name);
+                                       if (sc == null) {
+                                               if 
(nameSchemaMap.get(name.toLowerCase()) != null) {
+                                                       sc = 
nameSchemaMap.get(name.toLowerCase());
+                                               } else {
+                                                       sc = 
nameSchemaMap.get(name.toUpperCase());
+                                               }
+                                       }
+
+                                       if (sc.size() == 1) {
+                                               mapTableInDB.put(name, 
sc.iterator().next());
+                                       } else {
+                                               errorMessage = " enter the 
schema. Table found in the schemas: ";
+                                               Iterator<String> it = 
sc.iterator();
+                                               String names = "";
+                                               while (it.hasNext()) {
+                                                       names += it.next() + ", 
";
+                                               }
+                                               errorMessage = errorMessage + 
names;
+                                       }
+                               } else {
+                                       tableNoInDB.add(name);
+                               }
+                       }
+                       Map<String, String> schemaName = new HashMap<String, 
String>();
+                       schemaName.put(name, schema);
+                       entityTables.put(schemaName, atributes);
+               }
+               return isIncluded;
+       }
+
+       public void analyzeSchemas(List<String> schemas, DatabaseMetaData md) 
throws SQLException {
+
+               if (schemas.size() == 0) {
+                       schemas.add("%");
+               }
+               for (String schema : schemas) {
+
+                       Collection<String> tableInSchema = new 
ArrayList<String>();
+                       try (ResultSet tables = md.getTables(null, schema, 
null, null);) {
+                               while (tables.next()) {
+                                       String name = 
tables.getString("TABLE_NAME");
+                                       if (name == null || 
name.startsWith("BIN$")) {
+                                               continue;
+                                       }
+
+                                       tableInSchema.add(name);
+                                       if (nameSchemaMap.get(name) != null) {
+                                               Collection<String> sc = 
nameSchemaMap.get(name);
+                                               Iterator<String> iSc = 
sc.iterator();
+                                               boolean inSchema = false;
+                                               while (iSc.hasNext()) {
+                                                       if 
(iSc.next().equals(schema)) {
+                                                               inSchema = true;
+                                                       }
+                                               }
+                                               if (!inSchema) {
+                                                       sc.add(schema);
+                                                       
nameSchemaMap.remove(name);
+                                                       nameSchemaMap.put(name, 
sc);
+                                               }
+
+                                       } else {
+                                               Collection<String> sc = new 
ArrayList<String>();
+                                               sc.add(schema);
+                                               nameSchemaMap.put(name, sc);
+                                       }
+                               }
+                               schemaNameMap.put(schema, tableInSchema);
+                       }
+               }
+       }
+
+       public String getErrorMessage() {
+               return errorMessage;
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/ThrowOnPartialSchemaStrategy.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/ThrowOnPartialSchemaStrategy.java
 
b/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/ThrowOnPartialSchemaStrategy.java
index b7cf1ea..051feab 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/ThrowOnPartialSchemaStrategy.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/dbsync/ThrowOnPartialSchemaStrategy.java
@@ -37,91 +37,71 @@ import org.apache.commons.logging.LogFactory;
  */
 public class ThrowOnPartialSchemaStrategy extends BaseSchemaUpdateStrategy {
 
-    final static Log logger = 
LogFactory.getLog(ThrowOnPartialSchemaStrategy.class);
-
-    /**
-     * @since 3.0
-     */
-    @Override
-    protected void processSchemaUpdate(DataNode dataNode) throws SQLException {
-
-        SchemaAnalyzer analyzer = new SchemaAnalyzer();
-
-        List<String> schemas = new ArrayList<String>();
-        DatabaseMetaData md = null;
-        Connection connection = null;
-        try {
-            connection = dataNode.getDataSource().getConnection();
-            
-            try {
-                md = connection.getMetaData();
-                ResultSet rs = md.getSchemas();
-    
-                try {
-                    while (rs.next()) {
-                        String schemaName = rs.getString(1);
-                        schemas.add(schemaName);
-                    }
-                }
-                finally {
-                    rs.close();
-                }
-            }
-            finally {
-                connection.close();
-            }
-            analyzer.analyzeSchemas(schemas, md);
-        }
-        catch (Exception e) {
-            logger.debug("Exception analyzing schema, ignoring", e);
-        }
-
-        Collection<DbEntity> entities = 
dataNode.getEntityResolver().getDbEntities();
-
-        boolean isIncluded = analyzer.compareTables(md, entities);
-
-        if (isIncluded && analyzer.getErrorMessage() == null) {
-            try {
-                analyzer.compareColumns(md);
-            }
-            catch (SQLException e) {
-                logger.debug("Exception analyzing schema, ignoring", e);
-            }
-        }
-
-        processSchemaUpdate(dataNode, analyzer.getTableNoInDB(), analyzer
-                .getErrorMessage(), entities.size());
-    }
-
-    protected void processSchemaUpdate(
-            DataNode dataNode,
-            List<String> mergerOnlyTable,
-            String errorMessage,
-            int entitiesSize) throws SQLException {
-
-        if (mergerOnlyTable.size() == 0 && errorMessage == null) {
-            logger.info("Full schema is present");
-        }
-        else {
-            logger.info("Error - missing or partial schema detected");
-            StringBuilder buffer = new StringBuilder("Schema mismatch 
detected");
-
-            if (errorMessage != null) {
-                buffer.append(": ").append(errorMessage);
-            }
-            else if (mergerOnlyTable.size() == entitiesSize) {
-                buffer.append(": no schema found");
-            }
-            else {
-                if (mergerOnlyTable.size() > 0) {
-                    buffer
-                            .append(": missing table '")
-                            .append(mergerOnlyTable.get(0))
-                            .append('\'');
-                }
-            }
-
-            throw new CayenneRuntimeException(buffer.toString());
-        }
-    }
+       final static Log logger = 
LogFactory.getLog(ThrowOnPartialSchemaStrategy.class);
+
+       /**
+        * @since 3.0
+        */
+       @Override
+       protected void processSchemaUpdate(DataNode dataNode) throws 
SQLException {
+
+               SchemaAnalyzer analyzer = new SchemaAnalyzer();
+
+               List<String> schemas = new ArrayList<String>();
+               DatabaseMetaData md = null;
+               try {
+
+                       try (Connection connection = 
dataNode.getDataSource().getConnection();) {
+                               md = connection.getMetaData();
+
+                               try (ResultSet rs = md.getSchemas();) {
+                                       while (rs.next()) {
+                                               String schemaName = 
rs.getString(1);
+                                               schemas.add(schemaName);
+                                       }
+                               }
+                       }
+
+                       analyzer.analyzeSchemas(schemas, md);
+               } catch (Exception e) {
+                       logger.debug("Exception analyzing schema, ignoring", e);
+               }
+
+               Collection<DbEntity> entities = 
dataNode.getEntityResolver().getDbEntities();
+
+               boolean isIncluded = analyzer.compareTables(md, entities);
+
+               if (isIncluded && analyzer.getErrorMessage() == null) {
+                       try {
+                               analyzer.compareColumns(md);
+                       } catch (SQLException e) {
+                               logger.debug("Exception analyzing schema, 
ignoring", e);
+                       }
+               }
+
+               processSchemaUpdate(dataNode, analyzer.getTableNoInDB(), 
analyzer.getErrorMessage(), entities.size());
+       }
+
+       protected void processSchemaUpdate(DataNode dataNode, List<String> 
mergerOnlyTable, String errorMessage,
+                       int entitiesSize) throws SQLException {
+
+               if (mergerOnlyTable.size() == 0 && errorMessage == null) {
+                       logger.info("Full schema is present");
+               } else {
+                       logger.info("Error - missing or partial schema 
detected");
+                       StringBuilder buffer = new StringBuilder("Schema 
mismatch detected");
+
+                       if (errorMessage != null) {
+                               buffer.append(": ").append(errorMessage);
+                       } else if (mergerOnlyTable.size() == entitiesSize) {
+                               buffer.append(": no schema found");
+                       } else {
+                               if (mergerOnlyTable.size() > 0) {
+                                       buffer.append(": missing table 
'").append(mergerOnlyTable.get(0)).append('\'');
+                               }
+                       }
+
+                       throw new CayenneRuntimeException(buffer.toString());
+               }
+       }
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/BatchAction.java
----------------------------------------------------------------------
diff --git 
a/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/BatchAction.java 
b/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/BatchAction.java
index ca3d1ef..faff51c 100644
--- 
a/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/BatchAction.java
+++ 
b/cayenne-server/src/main/java/org/apache/cayenne/access/jdbc/BatchAction.java
@@ -49,226 +49,216 @@ import org.apache.cayenne.query.InsertBatchQuery;
  */
 public class BatchAction extends BaseSQLAction {
 
-    protected boolean runningAsBatch;
-    protected BatchQuery query;
-    protected RowDescriptor keyRowDescriptor;
-
-    private static void bind(DbAdapter adapter, PreparedStatement statement, 
ParameterBinding[] bindings)
-            throws SQLException, Exception {
-
-        for (ParameterBinding b : bindings) {
-            if (!b.isExcluded()) {
-                adapter.bindParameter(statement, b.getValue(), 
b.getStatementPosition(), b.getAttribute().getType(), b
-                        .getAttribute().getScale());
-            }
-        }
-    }
-
-    /**
-     * @since 4.0
-     */
-    public BatchAction(BatchQuery query, DataNode dataNode, boolean 
runningAsBatch) {
-        super(dataNode);
-        this.query = query;
-        this.runningAsBatch = runningAsBatch;
-    }
-
-    /**
-     * @return Query which originated this action
-     */
-    public BatchQuery getQuery() {
-        return query;
-    }
-
-    @Override
-    public void performAction(Connection connection, OperationObserver 
observer) throws SQLException, Exception {
-
-        BatchTranslator translator = createTranslator();
-        boolean generatesKeys = hasGeneratedKeys();
-
-        if (runningAsBatch && !generatesKeys) {
-            runAsBatch(connection, translator, observer);
-        } else {
-            runAsIndividualQueries(connection, translator, observer, 
generatesKeys);
-        }
-    }
-
-    protected BatchTranslator createTranslator() throws CayenneException {
-        return dataNode.batchTranslator(query, null);
-    }
-
-    protected void runAsBatch(Connection con, BatchTranslator translator, 
OperationObserver delegate)
-            throws SQLException, Exception {
-
-        String sql = translator.getSql();
-        JdbcEventLogger logger = dataNode.getJdbcEventLogger();
-        boolean isLoggable = logger.isLoggable();
-
-        // log batch SQL execution
-        logger.logQuery(sql, Collections.EMPTY_LIST);
-
-        // run batch
-
-        DbAdapter adapter = dataNode.getAdapter();
-        PreparedStatement statement = con.prepareStatement(sql);
-        try {
-            for (BatchQueryRow row : query.getRows()) {
-
-                ParameterBinding[] bindings = translator.updateBindings(row);
-                logger.logQueryParameters("batch bind", bindings);
-                bind(adapter, statement, bindings);
-
-                statement.addBatch();
-            }
-
-            // execute the whole batch
-            int[] results = statement.executeBatch();
-            delegate.nextBatchCount(query, results);
-
-            if (isLoggable) {
-                int totalUpdateCount = 0;
-                for (int result : results) {
-
-                    // this means Statement.SUCCESS_NO_INFO or
-                    // Statement.EXECUTE_FAILED
-                    if (result < 0) {
-                        totalUpdateCount = Statement.SUCCESS_NO_INFO;
-                        break;
-                    }
-
-                    totalUpdateCount += result;
-                }
-
-                logger.logUpdateCount(totalUpdateCount);
-            }
-        } finally {
-            try {
-                statement.close();
-            } catch (Exception e) {
-            }
-        }
-    }
-
-    /**
-     * Executes batch as individual queries over the same prepared statement.
-     */
-    protected void runAsIndividualQueries(Connection connection, 
BatchTranslator translator,
-            OperationObserver delegate, boolean generatesKeys) throws 
SQLException, Exception {
-
-        JdbcEventLogger logger = dataNode.getJdbcEventLogger();
-        boolean useOptimisticLock = query.isUsingOptimisticLocking();
-
-        String queryStr = translator.getSql();
-
-        // log batch SQL execution
-        logger.logQuery(queryStr, Collections.EMPTY_LIST);
-
-        // run batch queries one by one
-
-        DbAdapter adapter = dataNode.getAdapter();
-        PreparedStatement statement = (generatesKeys) ? 
connection.prepareStatement(queryStr,
-                Statement.RETURN_GENERATED_KEYS) : 
connection.prepareStatement(queryStr);
-        try {
-            for (BatchQueryRow row : query.getRows()) {
-
-                ParameterBinding[] bindings = translator.updateBindings(row);
-                logger.logQueryParameters("bind", bindings);
-
-                bind(adapter, statement, bindings);
-
-                int updated = statement.executeUpdate();
-                if (useOptimisticLock && updated != 1) {
-                    throw new OptimisticLockException(row.getObjectId(), 
query.getDbEntity(), queryStr,
-                            row.getQualifier());
-                }
-
-                delegate.nextCount(query, updated);
-
-                if (generatesKeys) {
-                    processGeneratedKeys(statement, delegate, row);
-                }
-
-                logger.logUpdateCount(updated);
-            }
-        } finally {
-            try {
-                statement.close();
-            } catch (Exception e) {
-            }
-        }
-    }
-
-    /**
-     * Returns whether BatchQuery generates any keys.
-     */
-    protected boolean hasGeneratedKeys() {
-        // see if we are configured to support generated keys
-        if (!dataNode.getAdapter().supportsGeneratedKeys()) {
-            return false;
-        }
-
-        // see if the query needs them
-        if (query instanceof InsertBatchQuery) {
-
-            // see if any of the generated attributes is PK
-            for (final DbAttribute attr : 
query.getDbEntity().getGeneratedAttributes()) {
-                if (attr.isPrimaryKey()) {
-                    return true;
-                }
-            }
-        }
-
-        return false;
-    }
-
-    /**
-     * Implements generated keys extraction supported in JDBC 3.0 
specification.
-     * 
-     * @since 4.0
-     */
-    @SuppressWarnings({ "rawtypes", "unchecked" })
-    protected void processGeneratedKeys(Statement statement, OperationObserver 
observer, BatchQueryRow row)
-            throws SQLException, CayenneException {
-
-        ResultSet keysRS = statement.getGeneratedKeys();
-
-        // TODO: andrus, 7/4/2007 - (1) get the type of meaningful PK's from
-        // their
-        // ObjAttributes; (2) use a different form of Statement.execute -
-        // "execute(String,String[])" to be able to map generated column names
-        // (this way
-        // we can support multiple columns.. although need to check how well
-        // this works
-        // with most common drivers)
-
-        RowDescriptorBuilder builder = new RowDescriptorBuilder();
-
-        if (this.keyRowDescriptor == null) {
-            // attempt to figure out the right descriptor from the mapping...
-            Collection<DbAttribute> generated = 
query.getDbEntity().getGeneratedAttributes();
-            if (generated.size() == 1) {
-                DbAttribute key = generated.iterator().next();
-
-                ColumnDescriptor[] columns = new ColumnDescriptor[1];
-
-                // use column name from result set, but type and Java class 
from
-                // DB
-                // attribute
-                columns[0] = new ColumnDescriptor(keysRS.getMetaData(), 1);
-                columns[0].setJdbcType(key.getType());
-                
columns[0].setJavaClass(TypesMapping.getJavaBySqlType(key.getType()));
-                builder.setColumns(columns);
-            } else {
-                builder.setResultSet(keysRS);
-            }
-
-            this.keyRowDescriptor = 
builder.getDescriptor(dataNode.getAdapter().getExtendedTypes());
-        }
-
-        RowReader<?> rowReader = dataNode.rowReader(keyRowDescriptor, 
query.getMetaData(dataNode.getEntityResolver()),
-                Collections.<ObjAttribute, ColumnDescriptor> emptyMap());
-        ResultIterator iterator = new JDBCResultIterator(null, keysRS, 
rowReader);
-
-        observer.nextGeneratedRows(query, iterator, row.getObjectId());
-    }
+       protected boolean runningAsBatch;
+       protected BatchQuery query;
+       protected RowDescriptor keyRowDescriptor;
+
+       private static void bind(DbAdapter adapter, PreparedStatement 
statement, ParameterBinding[] bindings)
+                       throws SQLException, Exception {
+
+               for (ParameterBinding b : bindings) {
+                       if (!b.isExcluded()) {
+                               adapter.bindParameter(statement, b.getValue(), 
b.getStatementPosition(), b.getAttribute().getType(), b
+                                               .getAttribute().getScale());
+                       }
+               }
+       }
+
+       /**
+        * @since 4.0
+        */
+       public BatchAction(BatchQuery query, DataNode dataNode, boolean 
runningAsBatch) {
+               super(dataNode);
+               this.query = query;
+               this.runningAsBatch = runningAsBatch;
+       }
+
+       /**
+        * @return Query which originated this action
+        */
+       public BatchQuery getQuery() {
+               return query;
+       }
+
+       @Override
+       public void performAction(Connection connection, OperationObserver 
observer) throws SQLException, Exception {
+
+               BatchTranslator translator = createTranslator();
+               boolean generatesKeys = hasGeneratedKeys();
+
+               if (runningAsBatch && !generatesKeys) {
+                       runAsBatch(connection, translator, observer);
+               } else {
+                       runAsIndividualQueries(connection, translator, 
observer, generatesKeys);
+               }
+       }
+
+       protected BatchTranslator createTranslator() throws CayenneException {
+               return dataNode.batchTranslator(query, null);
+       }
+
+       protected void runAsBatch(Connection con, BatchTranslator translator, 
OperationObserver delegate)
+                       throws SQLException, Exception {
+
+               String sql = translator.getSql();
+               JdbcEventLogger logger = dataNode.getJdbcEventLogger();
+               boolean isLoggable = logger.isLoggable();
+
+               // log batch SQL execution
+               logger.logQuery(sql, Collections.EMPTY_LIST);
+
+               // run batch
+
+               DbAdapter adapter = dataNode.getAdapter();
+
+               try (PreparedStatement statement = con.prepareStatement(sql);) {
+                       for (BatchQueryRow row : query.getRows()) {
+
+                               ParameterBinding[] bindings = 
translator.updateBindings(row);
+                               logger.logQueryParameters("batch bind", 
bindings);
+                               bind(adapter, statement, bindings);
+
+                               statement.addBatch();
+                       }
+
+                       // execute the whole batch
+                       int[] results = statement.executeBatch();
+                       delegate.nextBatchCount(query, results);
+
+                       if (isLoggable) {
+                               int totalUpdateCount = 0;
+                               for (int result : results) {
+
+                                       // this means Statement.SUCCESS_NO_INFO 
or
+                                       // Statement.EXECUTE_FAILED
+                                       if (result < 0) {
+                                               totalUpdateCount = 
Statement.SUCCESS_NO_INFO;
+                                               break;
+                                       }
+
+                                       totalUpdateCount += result;
+                               }
+
+                               logger.logUpdateCount(totalUpdateCount);
+                       }
+               }
+       }
+
+       /**
+        * Executes batch as individual queries over the same prepared 
statement.
+        */
+       protected void runAsIndividualQueries(Connection connection, 
BatchTranslator translator,
+                       OperationObserver delegate, boolean generatesKeys) 
throws SQLException, Exception {
+
+               JdbcEventLogger logger = dataNode.getJdbcEventLogger();
+               boolean useOptimisticLock = query.isUsingOptimisticLocking();
+
+               String queryStr = translator.getSql();
+
+               // log batch SQL execution
+               logger.logQuery(queryStr, Collections.EMPTY_LIST);
+
+               // run batch queries one by one
+
+               DbAdapter adapter = dataNode.getAdapter();
+
+               try (PreparedStatement statement = (generatesKeys) ? 
connection.prepareStatement(queryStr,
+                               Statement.RETURN_GENERATED_KEYS) : 
connection.prepareStatement(queryStr);) {
+                       for (BatchQueryRow row : query.getRows()) {
+
+                               ParameterBinding[] bindings = 
translator.updateBindings(row);
+                               logger.logQueryParameters("bind", bindings);
+
+                               bind(adapter, statement, bindings);
+
+                               int updated = statement.executeUpdate();
+                               if (useOptimisticLock && updated != 1) {
+                                       throw new 
OptimisticLockException(row.getObjectId(), query.getDbEntity(), queryStr,
+                                                       row.getQualifier());
+                               }
+
+                               delegate.nextCount(query, updated);
+
+                               if (generatesKeys) {
+                                       processGeneratedKeys(statement, 
delegate, row);
+                               }
+
+                               logger.logUpdateCount(updated);
+                       }
+               }
+       }
+
+       /**
+        * Returns whether BatchQuery generates any keys.
+        */
+       protected boolean hasGeneratedKeys() {
+               // see if we are configured to support generated keys
+               if (!dataNode.getAdapter().supportsGeneratedKeys()) {
+                       return false;
+               }
+
+               // see if the query needs them
+               if (query instanceof InsertBatchQuery) {
+
+                       // see if any of the generated attributes is PK
+                       for (final DbAttribute attr : 
query.getDbEntity().getGeneratedAttributes()) {
+                               if (attr.isPrimaryKey()) {
+                                       return true;
+                               }
+                       }
+               }
+
+               return false;
+       }
+
+       /**
+        * Implements generated keys extraction supported in JDBC 3.0 
specification.
+        * 
+        * @since 4.0
+        */
+       @SuppressWarnings({ "rawtypes", "unchecked" })
+       protected void processGeneratedKeys(Statement statement, 
OperationObserver observer, BatchQueryRow row)
+                       throws SQLException, CayenneException {
+
+               ResultSet keysRS = statement.getGeneratedKeys();
+
+               // TODO: andrus, 7/4/2007 - (1) get the type of meaningful PK's 
from
+               // their
+               // ObjAttributes; (2) use a different form of Statement.execute 
-
+               // "execute(String,String[])" to be able to map generated 
column names
+               // (this way
+               // we can support multiple columns.. although need to check how 
well
+               // this works
+               // with most common drivers)
+
+               RowDescriptorBuilder builder = new RowDescriptorBuilder();
+
+               if (this.keyRowDescriptor == null) {
+                       // attempt to figure out the right descriptor from the 
mapping...
+                       Collection<DbAttribute> generated = 
query.getDbEntity().getGeneratedAttributes();
+                       if (generated.size() == 1) {
+                               DbAttribute key = generated.iterator().next();
+
+                               ColumnDescriptor[] columns = new 
ColumnDescriptor[1];
+
+                               // use column name from result set, but type 
and Java class from
+                               // DB
+                               // attribute
+                               columns[0] = new 
ColumnDescriptor(keysRS.getMetaData(), 1);
+                               columns[0].setJdbcType(key.getType());
+                               
columns[0].setJavaClass(TypesMapping.getJavaBySqlType(key.getType()));
+                               builder.setColumns(columns);
+                       } else {
+                               builder.setResultSet(keysRS);
+                       }
+
+                       this.keyRowDescriptor = 
builder.getDescriptor(dataNode.getAdapter().getExtendedTypes());
+               }
+
+               RowReader<?> rowReader = dataNode.rowReader(keyRowDescriptor, 
query.getMetaData(dataNode.getEntityResolver()),
+                               Collections.<ObjAttribute, ColumnDescriptor> 
emptyMap());
+               ResultIterator iterator = new JDBCResultIterator(null, keysRS, 
rowReader);
+
+               observer.nextGeneratedRows(query, iterator, row.getObjectId());
+       }
 }

Reply via email to