Modified: 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java?rev=1659616&r1=1659615&r2=1659616&view=diff
==============================================================================
--- 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java
 (original)
+++ 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java
 Fri Feb 13 17:22:26 2015
@@ -19,12 +19,23 @@ package org.apache.jackrabbit.oak.plugin
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.UnsupportedEncodingException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
 
 import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
 import org.junit.Assume;
@@ -58,16 +69,30 @@ public class BasicDocumentStoreTest exte
     }
 
     @Test
-    public void testMaxId() {
+    public void testMaxIdAscii() {
+        // TODO see OAK-2395
+        Assume.assumeTrue(! super.dsname.contains("MSSql"));
+
+        int result = testMaxId(true);
+        assertTrue("needs to support keys of 512 bytes length, but only 
supports " + result, result >= 512);
+    }
+
+    @Test
+    public void testMaxIdNonAscii() {
+        testMaxId(false);
+    }
+
+    private int testMaxId(boolean ascii) {
         // TODO see OAK-1589
         Assume.assumeTrue(!(super.ds instanceof MongoDocumentStore));
         int min = 0;
         int max = 32768;
         int test = 0;
+        int last = 0;
 
         while (max - min >= 2) {
             test = (max + min) / 2;
-            String id = generateString(test);
+            String id = generateId(test, ascii);
             UpdateOp up = new UpdateOp(id, true);
             up.set("_id", id);
             boolean success = super.ds.create(Collection.NODES, 
Collections.singletonList(up));
@@ -75,26 +100,30 @@ public class BasicDocumentStoreTest exte
                 // check that we really can read it
                 NodeDocument findme = super.ds.find(Collection.NODES, id, 0);
                 assertNotNull("failed to retrieve previously stored document", 
findme);
+                assertEquals(id, findme.getId());
                 super.ds.remove(Collection.NODES, id);
                 min = test;
+                last = test;
             } else {
                 max = test;
             }
         }
 
-        LOG.info("max id length for " + super.dsname + " was " + test);
+        LOG.info("max " + (ascii ? "ASCII ('0')" : "non-ASCII (U+1F4A9)") + " 
id length for " + super.dsname + " was " + last);
+        return last;
     }
 
     @Test
     public void testMaxProperty() {
         int min = 0;
-        int max = 1024 * 1024 * 4; // 32M
+        int max = 1024 * 1024 * 8;
         int test = 0;
+        int last = 0;
 
         while (max - min >= 256) {
             test = (max + min) / 2;
             String id = this.getClass().getName() + ".testMaxProperty-" + test;
-            String pval = generateString(test);
+            String pval = generateString(test, true);
             UpdateOp up = new UpdateOp(id, true);
             up.set("_id", id);
             up.set("foo", pval);
@@ -105,12 +134,151 @@ public class BasicDocumentStoreTest exte
                 assertNotNull("failed to retrieve previously stored document", 
findme);
                 super.ds.remove(Collection.NODES, id);
                 min = test;
+                last = test;
             } else {
                 max = test;
             }
         }
 
-        LOG.info("max prop length for " + super.dsname + " was " + test);
+        LOG.info("max prop length for " + super.dsname + " was " + last);
+    }
+
+    @Test
+    public void testInterestingPropLengths() {
+        int lengths[] = { 1, 10, 100, 1000, 2000, 3000, 4000, 5000, 6000, 
7000, 8000, 9000, 10000, 11000, 12000, 13000, 14000,
+                15000, 16000, 20000 };
+
+        for (int test : lengths) {
+            String id = this.getClass().getName() + 
".testInterestingPropLengths-" + test;
+            String pval = generateString(test, true);
+            UpdateOp up = new UpdateOp(id, true);
+            up.set("_id", id);
+            up.set("foo", pval);
+            super.ds.remove(Collection.NODES, id);
+            boolean success = super.ds.create(Collection.NODES, 
Collections.singletonList(up));
+            assertTrue("failed to insert a document with property of length " 
+ test + "(ASCII) in " + super.dsname, success);
+            super.ds.remove(Collection.NODES, id);
+        }
+
+        for (int test : lengths) {
+            String id = this.getClass().getName() + 
".testInterestingPropLengths-" + test;
+            String pval = generateString(test, false);
+            UpdateOp up = new UpdateOp(id, true);
+            up.set("_id", id);
+            up.set("foo", pval);
+            super.ds.remove(Collection.NODES, id);
+            boolean success = super.ds.create(Collection.NODES, 
Collections.singletonList(up));
+            try {
+                assertTrue("failed to insert a document with property of 
length " + test
+                        + "(potentially non-ASCII, actual octet length in 
UTF-8: " + pval.getBytes("UTF-8").length + ") in "
+                        + super.dsname, success);
+            } catch (UnsupportedEncodingException e) {
+                // outch
+            }
+            super.ds.remove(Collection.NODES, id);
+        }
+    }
+
+    @Test
+    public void testModifiedMaxUpdate() {
+        String id = this.getClass().getName() + ".testModifiedMaxUpdate";
+        // create a test node
+        UpdateOp up = new UpdateOp(id, true);
+        up.set("_id", id);
+        up.set("_modified", 1000L);
+        boolean success = super.ds.create(Collection.NODES, 
Collections.singletonList(up));
+        assertTrue(success);
+        removeMe.add(id);
+
+        // update with smaller _modified
+        UpdateOp up2 = new UpdateOp(id, true);
+        up2.max("_modified", 100L);
+        up2.set("_id", id);
+        super.ds.findAndUpdate(Collection.NODES, up2);
+
+        super.ds.invalidateCache();
+
+        // this should find the document; will fail if the MAX operation 
wasn't applied to the indexed property
+        List<NodeDocument> results = super.ds.query(Collection.NODES, 
this.getClass().getName() + ".testModifiedMaxUpdatd", this.getClass().getName() 
+ ".testModifiedMaxUpdatf", "_modified", 1000, 1);
+        assertEquals("document not found, maybe indexed _modified property not 
properly updated", 1, results.size());
+    }
+
+    @Test
+    public void testInterestingStrings() {
+        // test case  "gclef:\uD834\uDD1E" will fail on MySQL unless properly 
configured to use utf8mb4 charset        // 
Assume.assumeTrue(!(super.dsname.equals("RDB-MySQL")));
+
+        String[] tests = new String[] { "simple:foo", "cr:a\n\b", 
"dquote:a\"b", "bs:a\\b", "euro:a\u201c", "gclef:\uD834\uDD1E",
+                "tab:a\tb", "nul:a\u0000b", "brokensurrogate:\ud800" };
+
+        for (String t : tests) {
+            int pos = t.indexOf(":");
+            String testname = t.substring(0, pos);
+            String test = t.substring(pos + 1);
+            String id = this.getClass().getName() + ".testInterestingStrings-" 
+ testname;
+            super.ds.remove(Collection.NODES, id);
+            UpdateOp up = new UpdateOp(id, true);
+            up.set("_id", id);
+            up.set("foo", test);
+            boolean success = super.ds.create(Collection.NODES, 
Collections.singletonList(up));
+            assertTrue("failed to insert a document with property value of " + 
test + " (" + testname + ") in " + super.dsname, success);
+            // re-read from persistence
+            super.ds.invalidateCache();
+            NodeDocument nd = super.ds.find(Collection.NODES, id);
+            assertEquals("failure to round-trip " + testname + " through " + 
super.dsname, test, nd.get("foo"));
+            super.ds.remove(Collection.NODES, id);
+        }
+    }
+
+    @Test
+    public void testDeleteNonExisting() {
+        String id = this.getClass().getName() + ".testDeleteNonExisting-" + 
UUID.randomUUID();
+        // delete is best effort
+        ds.remove(Collection.NODES, id);
+    }
+
+    @Test
+    public void testDeleteNonExistingMultiple() {
+        String id = this.getClass().getName() + 
".testDeleteNonExistingMultiple-" + UUID.randomUUID();
+        // create a test node
+        UpdateOp up = new UpdateOp(id + "-2", true);
+        up.set("_id", id + "-2");
+        boolean success = super.ds.create(Collection.NODES, 
Collections.singletonList(up));
+        assertTrue(success);
+        List<String> todelete = new ArrayList<String>();
+        todelete.add(id + "-2");
+        todelete.add(id);
+        ds.remove(Collection.NODES, todelete);
+        // id-2 should be removed
+        Document d = ds.find(Collection.NODES, id + "-2");
+        assertTrue(d == null);
+    }
+
+    @Test
+    public void testUpdateMultiple() {
+        String id = this.getClass().getName() + ".testUpdateMultiple";
+        // create a test node
+        super.ds.remove(Collection.NODES, id);
+        UpdateOp up = new UpdateOp(id, true);
+        up.set("_id", id);
+        boolean success = super.ds.create(Collection.NODES, 
Collections.singletonList(up));
+        assertTrue(success);
+        removeMe.add(id);
+
+        // update a non-existing one and this one
+        List<String> toupdate = new ArrayList<String>();
+        toupdate.add(id + "-" + UUID.randomUUID());
+        toupdate.add(id);
+
+        UpdateOp up2 = new UpdateOp(id, false);
+        up2.set("foo", "bar");
+        ds.update(Collection.NODES, toupdate, up2);
+
+        // id should be updated
+        ds.invalidateCache();
+        Document d = ds.find(Collection.NODES, id);
+        assertNotNull(d);
+        assertEquals(id, d.getId());
+        assertEquals("bar", d.get("foo").toString());
     }
 
     @Test
@@ -160,8 +328,6 @@ public class BasicDocumentStoreTest exte
 
     @Test
     public void testQueryDeletedOnce() {
-        // see OAK-2450
-        Assume.assumeTrue(!(super.ds instanceof MongoDocumentStore));
         // create ten documents
         String base = this.getClass().getName() + ".testQueryDeletedOnce-";
         for (int i = 0; i < 10; i++) {
@@ -233,71 +399,560 @@ public class BasicDocumentStoreTest exte
 
     @Test
     public void testCreatePerfSmall() {
-        createPerf(16);
+        createPerf(16, 1);
+    }
+
+    @Test
+    public void testCreatePerfSmallBatch() {
+        createPerf(16, 64);
     }
 
     @Test
     public void testCreatePerfBig() {
-        createPerf(32 * 1024);
+        createPerf(32 * 1024, 1);
     }
 
-    private void createPerf(int size) {
-        String pval = generateString(size);
+    private void createPerf(int size, int amount) {
+        String pval = generateString(size, true);
         long duration = 1000;
         long end = System.currentTimeMillis() + duration;
         long cnt = 0;
+        List<String> ids = new ArrayList<String>();
+
+        while (System.currentTimeMillis() < end) {
+            List<UpdateOp> ups = new ArrayList<UpdateOp>();
+            for (int i = 0; i < amount; i++) {
+                String id = this.getClass().getName() + ".testCreatePerf-" + 
size + "-" + cnt + "-" + i;
+                UpdateOp up = new UpdateOp(id, true);
+                up.set("_id", id);
+                up.set("foo", pval);
+                ups.add(up);
+                ids.add(id);
+            }
+            boolean success = super.ds.create(Collection.NODES, ups);
+            removeMe.addAll(ids);
+            assertTrue("documents with " + ids + " not created", success);
+            cnt += 1;
+        }
+
+        LOG.info("document creation with property of size " + size + " and 
batch size " + amount + " for " + super.dsname + " was "
+                + cnt + " in " + duration + "ms (" + (cnt / (duration / 
1000f)) + "/s)");
+    }
+
+    @Test
+    public void testPerfCollectionPaging() {
+        testPerfCollectionPaging(this.getClass().getName() + 
".testPerfCollectionPaging", false);
+    }
+
+    @Test
+    public void testPerfCollectionPagingUnCached() {
+        testPerfCollectionPaging(this.getClass().getName() + 
".testPerfCollectionPagingUnCached", true);
+    }
+
+    private void testPerfCollectionPaging(String name, boolean 
invalidateCache) {
+        String cid = name;
+        int nodecount = 20000;
+        int initialFetchCount = 100;
+        int maxFetchCount = 1600;
+        int fetchcount = initialFetchCount;
+        long duration = 2000;
+        int cnt = 0;
+        List<UpdateOp> ups = new ArrayList<UpdateOp>();
 
+        UpdateOp container = new UpdateOp(cid, true);
+        container.set("_id", cid);
+        ups.add(container);
+        removeMe.add(cid);
+        for (int i = 0; i < nodecount; i++) {
+            String id = String.format("%s/%08d", cid, i);
+            removeMe.add(id);
+            UpdateOp u = new UpdateOp(id, true);
+            u.set("_id", id);
+            ups.add(u);
+        }
+
+        boolean success = super.ds.create(Collection.NODES, ups);
+        assertTrue(success);
+        super.ds.invalidateCache();
+
+        long end = System.currentTimeMillis() + duration;
+        String sid = cid;
+        int found = 0;
         while (System.currentTimeMillis() < end) {
-            String id = this.getClass().getName() + ".testCreatePerf-" + size 
+ "-" + cnt;
+            long now = System.currentTimeMillis();
+            List<NodeDocument> result = super.ds.query(Collection.NODES, sid, 
cid + "X", fetchcount);
+            if (super.ds.getCacheStats() != null && result.size() > 0) {
+                // check freshness of returned documents
+                long created = result.get(0).getLastCheckTime();
+                assertTrue(
+                        "'getLastCheckTime' timestamp of NodeDocument too old 
(" + created + " vs " + now + ") (on " + super.dsname + ")",
+                        created >= now);
+            }
+            found += result.size();
+            if (result.size() < fetchcount) {
+                if (sid.equals(cid)) {
+                    fail("first page must not be empty");
+                }
+                sid = cid;
+                assertEquals(nodecount, found);
+                found = 0;
+                fetchcount = initialFetchCount;
+            }
+            else {
+                sid = result.get(result.size() -1).getId();
+                if (fetchcount < maxFetchCount) {
+                    fetchcount *= 2;
+                }
+            }
+            cnt += 1;
+            if (invalidateCache) {
+                super.ds.invalidateCache();
+            }
+        }
+
+        LOG.info("collection lookups " + (invalidateCache ? "(uncached) " : 
"") + super.dsname + " was " + cnt + " in " + duration
+                + "ms (" + (cnt / (duration / 1000f)) + "/s)");
+    }
+
+    @Test
+    public void testPerfLastRevBatch() {
+        String bid = this.getClass().getName() + ".testPerfLastRevBatch";
+        int nodecount = 100;
+        long duration = 5000;
+        int cnt = 0;
+        List<String> ids = new ArrayList<String>();
+        Revision cr = Revision.fromString("r0-0-1");
+
+        // create test nodes
+        for (int i = 0; i < nodecount; i++) {
+            String id = bid + "-" + i;
+            super.ds.remove(Collection.NODES, id);
+            removeMe.add(id);
             UpdateOp up = new UpdateOp(id, true);
             up.set("_id", id);
-            up.set("foo", pval);
+            up.set("testprop", generateString(100 * i, true));
+            up.setMapEntry("_lastRev", cr, "setup");
+            up.set("_modified", 
NodeDocument.getModifiedInSecs(System.currentTimeMillis()));
             boolean success = super.ds.create(Collection.NODES, 
Collections.singletonList(up));
-            assertTrue("document with " + id + " nit created", success);
-            removeMe.add(id);
+            assertTrue("creation failed for " + id + " in " + super.dsname, 
success);
+            ids.add(id);
+        }
+
+        long end = System.currentTimeMillis() + duration;
+        while (System.currentTimeMillis() < end) {
+            UpdateOp up = new UpdateOp(bid, true);
+            up.setMapEntry("_lastRev", cr, "iteration-" + cnt);
+            up.max("_modified", 
NodeDocument.getModifiedInSecs(System.currentTimeMillis()));
+            super.ds.update(Collection.NODES, ids, up);
+            cnt += 1;
+        }
+
+        // check postcondition
+        super.ds.invalidateCache();
+        for (int i = 0; i < nodecount; i++) {
+            NodeDocument d = super.ds.find(Collection.NODES, bid + "-" + i);
+            assertNotNull(d);
+            Map<Revision, String> m = (Map<Revision, String>)d.get("_lastRev");
+            assertEquals("iteration-" + (cnt - 1), m.get(cr));
+        }
+
+        LOG.info("batch update for _lastRev for " + super.dsname + " was "
+                + cnt + " in " + duration + "ms (" + (cnt / (duration / 
1000f)) + "/s)");
+    }
+
+    @Test
+    public void testPerfReadBigDoc() {
+        String id = this.getClass().getName() + ".testReadBigDoc";
+        long duration = 1000;
+        int cnt = 0;
+
+        super.ds.remove(Collection.NODES, Collections.singletonList(id));
+        UpdateOp up = new UpdateOp(id, true);
+        up.set("_id", id);
+        for (int i = 0; i < 100; i++) {
+            up.set("foo" + i, generateString(1024, true));
+        }
+        assertTrue(super.ds.create(Collection.NODES, 
Collections.singletonList(up)));
+        removeMe.add(id);
+
+        long end = System.currentTimeMillis() + duration;
+        while (System.currentTimeMillis() < end) {
+            NodeDocument d = super.ds.find(Collection.NODES, id, 10); // allow 
10ms old entries
             cnt += 1;
         }
 
-        LOG.info("document creation with property of size " + size + " for " + 
super.dsname + " was " + cnt + " in " + duration + "ms ("
-                + (cnt / (duration / 1000f)) + "/s)");
+        LOG.info("big doc read from " + super.dsname + " was "
+                + cnt + " in " + duration + "ms (" + (cnt / (duration / 
1000f)) + "/s)");
     }
 
     @Test
     public void testUpdatePerfSmall() {
-        updatePerf(16);
+        updatePerf(16, false);
+    }
+
+    @Test
+    public void testUpdatePerfSmallGrowing() {
+        updatePerf(16, true);
     }
 
     @Test
     public void testUpdatePerfBig() {
-        updatePerf(32 * 1024);
+        updatePerf(32 * 1024, false);
     }
 
-    private void updatePerf(int size) {
-        String pval = generateString(size);
+    private void updatePerf(int size, boolean growing) {
+        String pval = generateString(size, true);
         long duration = 1000;
         long end = System.currentTimeMillis() + duration;
         long cnt = 0;
+        Set<Revision> expectedRevs = new HashSet<Revision>();
 
-        String id = this.getClass().getName() + ".testUpdatePerf-" + size;
+        String id = this.getClass().getName() + ".testUpdatePerf" + (growing ? 
"Growing" : "") + "-" + size;
         removeMe.add(id);
 
         while (System.currentTimeMillis() < end) {
             UpdateOp up = new UpdateOp(id, true);
             up.set("_id", id);
-            up.set("foo", pval);
-            super.ds.createOrUpdate(Collection.NODES, up);
+            if (growing) {
+                Revision r = new Revision(System.currentTimeMillis(), (int) 
cnt, 1);
+                up.setMapEntry("foo", r, pval);
+                up.setMapEntry("_commitRoot", r, "1");
+                up.increment("c", 1);
+                up.max("max", System.currentTimeMillis());
+                expectedRevs.add(r);
+            } else {
+                up.set("foo", pval);
+            }
+            NodeDocument old = super.ds.createOrUpdate(Collection.NODES, up);
+            if (cnt == 0) {
+                assertNull("expect null on create", old);
+            } else {
+                assertNotNull("fail on update " + cnt, old);
+            }
             cnt += 1;
         }
 
-        LOG.info("document updates with property of size " + size + " for " + 
super.dsname + " was " + cnt + " in " + duration + "ms ("
-                + (cnt / (duration / 1000f)) + "/s)");
+        if (growing) {
+            NodeDocument result = super.ds.find(Collection.NODES, id, 0);
+            Map<Revision, Object> m = (Map<Revision, Object>)result.get("foo");
+            assertEquals("number of revisions", expectedRevs.size(), m.size());
+            assertTrue(m.keySet().equals(expectedRevs));
+        }
+
+        LOG.info("document updates with property of size " + size + (growing ? 
" (growing)" : "") + " for " + super.dsname
+                + " was " + cnt + " in " + duration + "ms (" + (cnt / 
(duration / 1000f)) + "/s)");
+    }
+
+    private static String generateString(int length, boolean ascii) {
+        char[] s = new char[length];
+        for (int i = 0; i < length; i++) {
+            if (ascii) {
+                s[i] = (char) (32 + (int) (95 * Math.random()));
+            } else {
+                s[i] = (char) (32 + (int) ((0xd7ff - 32) * Math.random()));
+            }
+        }
+        return new String(s);
+    }
+
+    private static String generateId(int length, boolean ascii) {
+        StringBuffer sb = new StringBuffer();
+        for (int i = 0; i < length; i++) {
+            if (ascii) {
+                sb.append("0");
+            }
+            else {
+                sb.append(Character.toChars(0x1F4A9));
+            }
+        }
+        return sb.toString();
+    }
+
+    @Test
+    public void testPerfUpdateLimit() throws SQLException, 
UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateLimit", "raw row update 
(set long)", 0);
+    }
+
+    @Test
+    public void testPerfUpdateLimitString() throws SQLException, 
UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateLimitString", "raw row 
update (set long/string)", 1);
+    }
+
+    @Test
+    public void testPerfUpdateLimitStringBlob() throws SQLException, 
UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateLimitStringBlob", "raw row 
update (set long/string/blob)", 2);
+    }
+
+    @Test
+    public void testPerfUpdateAppendString() throws SQLException, 
UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateAppendString", "raw row 
update (append string)", 3);
+    }
+
+    @Test
+    public void testPerfUpdateGrowingDoc() throws SQLException, 
UnsupportedEncodingException {
+        internalTestPerfUpdateLimit("testPerfUpdateGrowingDoc", "raw row 
update (string + blob)", 4);
+    }
+
+    private void internalTestPerfUpdateLimit(String name, String desc, int 
mode) throws SQLException, UnsupportedEncodingException {
+        if (super.rdbDataSource != null) {
+            String key = name;
+            Connection connection = null;
+            String table = DocumentStoreFixture.TABLEPREFIX + "NODES";
+
+            // create test node
+            try {
+                connection = super.rdbDataSource.getConnection();
+                connection.setAutoCommit(false);
+                PreparedStatement stmt = connection.prepareStatement("insert 
into " + table
+                        + " (ID, MODCOUNT, DATA) values (?, ?, ?)");
+                try {
+                    stmt.setString(1, key);
+                    stmt.setLong(2, 0);
+                    stmt.setString(3, "X");
+                    stmt.executeUpdate();
+                    connection.commit();
+                } finally {
+                    stmt.close();
+                }
+            } catch (SQLException ex) {
+                // ignored
+            } finally {
+                if (connection != null) {
+                    try {
+                        connection.close();
+                    } catch (SQLException e) {
+                        // ignored
+                    }
+                }
+            }
+
+            removeMe.add(key);
+            StringBuffer expect = new StringBuffer("X");
+
+            String appendString = generateString(512, true);
+
+            long duration = 1000;
+            long end = System.currentTimeMillis() + duration;
+            long cnt = 0;
+            byte bdata[] = new byte[65536];
+            String sdata = appendString;
+            boolean needsConcat = super.dsname.contains("MySQL");
+            boolean needsSQLStringConcat = super.dsname.contains("MSSql");
+            int dataInChars = ((super.dsname.contains("Oracle") || 
(super.dsname.contains("MSSql"))) ? 4000 : 16384);
+            int dataInBytes = dataInChars / 3;
+
+            while (System.currentTimeMillis() < end) {
+
+                try {
+                    connection = super.rdbDataSource.getConnection();
+                    connection.setAutoCommit(false);
+
+                    if (mode == 0) {
+                        PreparedStatement stmt = 
connection.prepareStatement("update " + table + " set MODCOUNT = ? where ID = 
?");
+                        try {
+                            stmt.setLong(1, cnt);
+                            stmt.setString(2, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                        } finally {
+                            stmt.close();
+                        }
+                    } else if (mode == 1) {
+                        PreparedStatement stmt = 
connection.prepareStatement("update " + table
+                                + " set MODCOUNT = ?, DATA = ? where ID = ?");
+                        try {
+                            stmt.setLong(1, cnt);
+                            stmt.setString(2, "JSON data " + 
UUID.randomUUID());
+                            stmt.setString(3, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                        } finally {
+                            stmt.close();
+                        }
+                    } else if (mode == 2) {
+                        PreparedStatement stmt = 
connection.prepareStatement("update " + table
+                                + " set MODCOUNT = ?, DATA = ?, BDATA = ? 
where ID = ?");
+                        try {
+                            stmt.setLong(1, cnt);
+                            stmt.setString(2, "JSON data " + 
UUID.randomUUID());
+                            bdata[(int) cnt % bdata.length] = (byte) (cnt & 
0xff);
+                            stmt.setString(2, "JSON data " + 
UUID.randomUUID());
+                            stmt.setBytes(3, bdata);
+                            stmt.setString(4, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                        } finally {
+                            stmt.close();
+                        }
+                    } else if (mode == 3) {
+                        String t = "update " + table + " ";
+
+                        t += "set DATA = ";
+                        if (needsConcat) {
+                            t += "CONCAT(DATA, ?) ";
+                        } else if (needsSQLStringConcat) {
+                            t += "CASE WHEN LEN(DATA) <= " + (dataInChars - 
appendString.length()) + " THEN (DATA + CAST(? AS nvarchar(" + 4000
+                                    + "))) ELSE (DATA + CAST(DATA AS 
nvarchar(max))) END";
+                        } else {
+                            t += "DATA || CAST(? as varchar(" + dataInChars + 
"))";
+                        }
+
+                        t += " where ID = ?";
+
+                        PreparedStatement stmt = 
connection.prepareStatement(t);
+                        try {
+                            stmt.setString(1, appendString);
+                            stmt.setString(2, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                            expect.append(appendString);
+                        } catch (SQLException ex) {
+                            String state = ex.getSQLState();
+                            if ("22001".equals(state) /* everybody */ || 
("72000".equals(state) && 1489 == ex.getErrorCode()) /* Oracle */) {
+                                // overflow
+                                connection.rollback();
+                                stmt = connection.prepareStatement("update " + 
table
+                                        + " set MODCOUNT = MODCOUNT + 1, DATA 
= ? where ID = ?");
+                                stmt.setString(1, "X");
+                                stmt.setString(2, key);
+                                assertEquals(1, stmt.executeUpdate());
+                                connection.commit();
+                                expect = new StringBuffer("X");
+                            } else {
+                                throw (ex);
+                            }
+                        } finally {
+                            stmt.close();
+                        }
+                    } else if (mode == 4) {
+                        PreparedStatement stmt = 
connection.prepareStatement("update " + table
+                                + " set MODIFIED = ?, HASBINARY = ?, MODCOUNT 
= ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, BDATA = ? where ID = ?");
+                        try {
+                            int si = 1;
+                            stmt.setObject(si++, System.currentTimeMillis() / 
5, Types.BIGINT);
+                            stmt.setObject(si++, 0, Types.SMALLINT);
+                            stmt.setObject(si++, cnt, Types.BIGINT);
+                            stmt.setObject(si++, null, Types.BIGINT);
+                            stmt.setObject(si++, sdata.length(), Types.BIGINT);
+
+                            if (sdata.length() < dataInBytes) {
+                                stmt.setString(si++, sdata);
+                                stmt.setBinaryStream(si++, null, 0);
+                            }
+                            else {
+                                stmt.setString(si++, "null");
+                                stmt.setBytes(si++, sdata.getBytes("UTF-8"));
+                            }
+                            stmt.setString(si++, key);
+                            assertEquals(1, stmt.executeUpdate());
+                            connection.commit();
+                            sdata += appendString;
+                        } finally {
+                            stmt.close();
+                        }
+
+                    }
+                } catch (SQLException ex) {
+                    LOG.error(ex.getMessage() + " " + ex.getSQLState() + " " + 
ex.getErrorCode(), ex);
+                } finally {
+                    if (connection != null) {
+                        try {
+                            connection.close();
+                        } catch (SQLException e) {
+                            // ignored
+                        }
+                    }
+                }
+
+                cnt += 1;
+            }
+
+            // check persisted values
+            if (mode == 3) {
+                try {
+                    connection = super.rdbDataSource.getConnection();
+                    connection.setAutoCommit(false);
+                    PreparedStatement stmt = 
connection.prepareStatement("select DATA, MODCOUNT from " + table + " where ID 
= ?");
+                    try {
+                        stmt.setString(1, key);
+                        ResultSet rs = stmt.executeQuery();
+                        assertTrue(rs.next());
+                        String got = rs.getString(1);
+                        long modc = rs.getLong(2);
+                        LOG.info("column reset " + modc + " times");
+                        assertEquals(expect.toString(), got);
+                    } finally {
+                        stmt.close();
+                    }
+                } finally {
+                    if (connection != null) {
+                        try {
+                            connection.close();
+                        } catch (SQLException e) {
+                            // ignored
+                        }
+                    }
+                }
+            }
+
+            LOG.info(desc + " for " + super.dsname + " was " + cnt + " in " + 
duration + "ms (" + (cnt / (duration / 1000f))
+                    + "/s)");
+        }
     }
 
-    private static String generateString(int length) {
-        StringBuffer buf = new StringBuffer(length);
-        while (length-- > 0) {
-            buf.append('A' + ((int) (26 * Math.random())));
+    // make sure _collisionsModCount property is maintained properly when it 
exists
+    @Test
+    public void testCollisionsModCount() {
+        String id = this.getClass().getName() + ".testCollisionsModCount";
+
+        // remove if present
+        NodeDocument nd = super.ds.find(Collection.NODES, id);
+        if (nd != null) {
+            super.ds.remove(Collection.NODES, id);
+        }
+
+        // add
+        Revision revision = Revision.fromString("r0-0-1");
+        UpdateOp up = new UpdateOp(id, true);
+        up.set("_id", id);
+        up.setMapEntry("_collisions", revision, "foo");
+        assertTrue(super.ds.create(Collection.NODES, 
Collections.singletonList(up)));
+        removeMe.add(id);
+
+        // get it
+        nd = super.ds.find(Collection.NODES, id);
+        assertNotNull(nd);
+        Number cmc = (Number)nd.get("_collisionsModCount");
+        if (cmc == null) {
+            // not supported
+        }
+        else {
+            // update 
+            Revision revision2 = Revision.fromString("r0-0-2");
+            UpdateOp up2 = new UpdateOp(id, false);
+            up2.set("_id", id);
+            up2.setMapEntry("_collisions", revision2, "foobar");
+            NodeDocument old = super.ds.findAndUpdate(Collection.NODES, up2);
+            assertNotNull(old);
+
+            nd = super.ds.find(Collection.NODES, id, 0);
+            assertNotNull(nd);
+            Number cmc2 = (Number)nd.get("_collisionsModCount");
+            assertNotNull(cmc2);
+            assertTrue(cmc2.longValue() > cmc.longValue());
+
+            // update 
+            UpdateOp up3 = new UpdateOp(id, false);
+            up3.set("_id", id);
+            up3.set("foo", "bar");
+            old = super.ds.findAndUpdate(Collection.NODES, up3);
+            assertNotNull(old);
+
+            nd = super.ds.find(Collection.NODES, id, 0);
+            assertNotNull(nd);
+            Number cmc3 = (Number)nd.get("_collisionsModCount");
+            assertNotNull(cmc3);
+            assertTrue(cmc2.longValue() == cmc3.longValue());
         }
-        return buf.toString();
     }
 }

Modified: 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreFixture.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreFixture.java?rev=1659616&r1=1659615&r2=1659616&view=diff
==============================================================================
--- 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreFixture.java
 (original)
+++ 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreFixture.java
 Fri Feb 13 17:22:26 2015
@@ -19,8 +19,12 @@ package org.apache.jackrabbit.oak.plugin
 import javax.sql.DataSource;
 
 import com.mongodb.BasicDBObject;
+
 import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore;
 import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
+import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDataSourceFactory;
+import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore;
+import org.apache.jackrabbit.oak.plugins.document.rdb.RDBOptions;
 import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -32,17 +36,40 @@ public abstract class DocumentStoreFixtu
     private static final Logger LOG = 
LoggerFactory.getLogger(DocumentStoreFixture.class);
 
     public static final DocumentStoreFixture MEMORY = new MemoryFixture();
+    public static final DocumentStoreFixture RDB_H2 = new 
RDBFixture("RDB-H2(file)", "jdbc:h2:file:./target/ds-test", "sa", "");
+    public static final DocumentStoreFixture RDB_PG = new 
RDBFixture("RDB-Postgres", "jdbc:postgresql:oak", "postgres", "geheim");
+    public static final DocumentStoreFixture RDB_DB2 = new 
RDBFixture("RDB-DB2", "jdbc:db2://localhost:50000/OAK", "oak", "geheim");
+    public static final DocumentStoreFixture RDB_MYSQL = new 
RDBFixture("RDB-MySQL", "jdbc:mysql://localhost:3306/oak", "root", "geheim");
+    public static final DocumentStoreFixture RDB_ORACLE = new 
RDBFixture("RDB-Oracle", "jdbc:oracle:thin:@localhost:1521:orcl", "system", 
"geheim");
+    public static final DocumentStoreFixture RDB_MSSQL = new 
RDBFixture("RDB-MSSql", "jdbc:sqlserver://localhost:1433;databaseName=OAK", 
"sa", "geheim");
     public static final DocumentStoreFixture MONGO = new 
MongoFixture("mongodb://localhost:27017/oak");
 
+    public static final String TABLEPREFIX = "dstest_";
+
     public abstract String getName();
 
-    public abstract DocumentStore createDocumentStore();
+    public abstract DocumentStore createDocumentStore(int clusterId);
+
+    public DocumentStore createDocumentStore() {
+        return createDocumentStore(1);
+    }
 
     public boolean isAvailable() {
         return true;
     }
 
-    public void dispose() throws Exception {}
+    // get underlying datasource if RDB persistence
+    public DataSource getRDBDataSource() {
+        return null;
+    }
+
+    // return false if the multiple instances will not share the same 
persistence
+    public boolean hasSinglePersistence() {
+        return true;
+    }
+
+    public void dispose() throws Exception {
+    }
 
     public static class MemoryFixture extends DocumentStoreFixture {
 
@@ -52,16 +79,83 @@ public abstract class DocumentStoreFixtu
         }
 
         @Override
-        public DocumentStore createDocumentStore() {
+        public DocumentStore createDocumentStore(int clusterId) {
             return new MemoryDocumentStore();
         }
+
+        @Override
+        public boolean hasSinglePersistence() {
+            return false;
+        }
+    }
+
+    public static class RDBFixture extends DocumentStoreFixture {
+
+        DataSource dataSource;
+        DocumentStore store1, store2;
+        String name;
+        RDBOptions options = new 
RDBOptions().tablePrefix(TABLEPREFIX).dropTablesOnClose(true);
+
+        public RDBFixture() {
+            // default RDB fixture
+            this("RDB-H2(file)", "jdbc:h2:file:./target/ds-test2", "sa", "");
+        }
+
+        public RDBFixture(String name, String url, String username, String 
passwd) {
+            this.name = name;
+            try {
+                dataSource = RDBDataSourceFactory.forJdbcUrl(url, username, 
passwd);
+            } catch (Exception ex) {
+                LOG.info("Database instance not available at " + url + ", 
skipping tests...", ex);
+            }
+        }
+
+        @Override
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public DocumentStore createDocumentStore(int clusterId) {
+            if (clusterId == 1) {
+                store1 = new RDBDocumentStore(dataSource, new 
DocumentMK.Builder().setClusterId(1), options);
+                return store1;
+            } else if (clusterId == 2) {
+                store2 = new RDBDocumentStore(dataSource, new 
DocumentMK.Builder().setClusterId(2), options);
+                return store2;
+            } else {
+                throw new RuntimeException("expect clusterId == 1 or == 2");
+            }
+        }
+
+        @Override
+        public boolean isAvailable() {
+            return dataSource != null;
+        }
+
+        @Override
+        public DataSource getRDBDataSource() {
+            return dataSource;
+        }
+
+        @Override
+        public void dispose() {
+            if (this.store1 != null) {
+                this.store1.dispose();
+                this.store1 = null;
+            }
+            if (this.store2 != null) {
+                this.store2.dispose();
+                this.store2 = null;
+            }
+        }
     }
 
     public static class MongoFixture extends DocumentStoreFixture {
         public static final String DEFAULT_URI = 
"mongodb://localhost:27017/oak-test";
         private String uri;
 
-        public MongoFixture(){
+        public MongoFixture() {
             this(DEFAULT_URI);
         }
 
@@ -75,12 +169,12 @@ public abstract class DocumentStoreFixtu
         }
 
         @Override
-        public DocumentStore createDocumentStore() {
+        public DocumentStore createDocumentStore(int clusterId) {
             try {
                 MongoConnection connection = new MongoConnection(uri);
                 DB db = connection.getDB();
                 MongoUtils.dropCollections(db);
-                return new MongoDocumentStore(db, new DocumentMK.Builder());
+                return new MongoDocumentStore(db, new 
DocumentMK.Builder().setClusterId(clusterId));
             } catch (Exception e) {
                 throw new RuntimeException(e);
             }
@@ -88,21 +182,21 @@ public abstract class DocumentStoreFixtu
 
         @Override
         public boolean isAvailable() {
-            try{
+            try {
                 MongoConnection connection = new MongoConnection(uri);
                 connection.getDB().command(new BasicDBObject("ping", 1));
                 return true;
-            }catch(Exception e){
+            } catch (Exception e) {
                 return false;
             }
         }
 
         @Override
         public void dispose() {
-            try{
+            try {
                 MongoConnection connection = new MongoConnection(uri);
                 connection.getDB().dropDatabase();
-            } catch(Exception ignore) {
+            } catch (Exception ignore) {
             }
         }
     }

Added: 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/MultiDocumentStoreTest.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/MultiDocumentStoreTest.java?rev=1659616&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/MultiDocumentStoreTest.java
 (added)
+++ 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/MultiDocumentStoreTest.java
 Fri Feb 13 17:22:26 2015
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.util.Collections;
+
+import org.junit.Test;
+
+public class MultiDocumentStoreTest extends AbstractMultiDocumentStoreTest {
+
+    public MultiDocumentStoreTest(DocumentStoreFixture dsf) {
+        super(dsf);
+    }
+
+    @Test
+    public void testInterleavedUpdate() {
+        String id = this.getClass().getName() + ".testInterleavedUpdate";
+
+        // remove if present
+        NodeDocument nd = super.ds1.find(Collection.NODES, id);
+        if (nd != null) {
+            super.ds1.remove(Collection.NODES, id);
+        }
+
+        UpdateOp up = new UpdateOp(id, true);
+        up.set("_id", id);
+        up.set("_foo", 0l);
+        assertTrue(super.ds1.create(Collection.NODES, 
Collections.singletonList(up)));
+
+        long increments = 10;
+
+        for (int i = 0; i < increments; i++) {
+            up = new UpdateOp(id, true);
+            up.set("_id", id);
+            up.increment("_foo", 1l);
+            if (i % 2 == 0) {
+                super.ds1.update(Collection.NODES, 
Collections.singletonList(id), up);
+            }
+            else {
+                super.ds2.update(Collection.NODES, 
Collections.singletonList(id), up);
+            }
+        }
+        removeMe.add(id);
+
+        // read uncached
+        nd = super.ds1.find(Collection.NODES, id, 0);
+        assertEquals("_foo should have been incremented 10 times", increments, 
nd.get("_foo"));
+    }
+
+    @Test
+    public void testInterleavedUpdate2() {
+        String id = this.getClass().getName() + ".testInterleavedUpdate2";
+
+        // remove if present
+        NodeDocument nd1 = super.ds1.find(Collection.NODES, id);
+        if (nd1 != null) {
+            super.ds1.remove(Collection.NODES, id);
+        }
+
+        UpdateOp up = new UpdateOp(id, true);
+        up.set("_id", id);
+        assertTrue(super.ds1.create(Collection.NODES, 
Collections.singletonList(up)));
+        nd1 = super.ds1.find(Collection.NODES, id, 0);
+        Number n = nd1.getModCount();
+        if (n != null) {
+            // Document store uses modCount
+            int n1 = n.intValue();
+
+            // get the document into ds2's cache
+            NodeDocument nd2 = super.ds2.find(Collection.NODES, id, 0);
+            int n2 = nd2.getModCount().intValue();
+            assertEquals(n1, n2);
+
+            UpdateOp upds1 = new UpdateOp(id, true);
+            upds1.set("_id", id);
+            upds1.set("foo", "bar");
+            super.ds1.update(Collection.NODES, Collections.singletonList(id), 
upds1);
+            nd1 = super.ds1.find(Collection.NODES, id);
+            int oldn1 = n1;
+            n1 = nd1.getModCount().intValue();
+            assertEquals(oldn1 + 1, n1);
+            assertEquals("bar", nd1.get("foo"));
+
+            // modify in DS2
+            UpdateOp upds2 = new UpdateOp(id, true);
+            upds2.set("_id", id);
+            upds2.set("foo", "qux");
+            super.ds2.update(Collection.NODES, Collections.singletonList(id), 
upds2);
+            nd2 = super.ds2.find(Collection.NODES, id);
+            n2 = nd2.getModCount().intValue();
+            assertEquals(oldn1 + 1, n2);
+            assertEquals("qux", nd2.get("foo"));
+
+            // both stores are now at the same modCount with different contents
+            upds1 = new UpdateOp(id, true);
+            upds1.set("_id", id);
+            upds1.set("foo", "barbar");
+            NodeDocument prev = super.ds1.findAndUpdate(Collection.NODES, 
upds1);
+            // prev document should contain mod from DS2
+            assertEquals("qux", prev.get("foo"));
+            assertEquals(oldn1 + 2, prev.getModCount().intValue());
+        }
+    }
+}

Propchange: 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/MultiDocumentStoreTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStoreFriend.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStoreFriend.java?rev=1659616&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStoreFriend.java
 (added)
+++ 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStoreFriend.java
 Fri Feb 13 17:22:26 2015
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.rdb;
+
+import java.io.IOException;
+
+public class RDBBlobStoreFriend {
+
+    public static void storeBlock(RDBBlobStore ds, byte[] digest, int level, 
byte[] data) throws IOException {
+        ds.storeBlock(digest, level, data);
+    }
+
+    public static byte[] readBlockFromBackend(RDBBlobStore ds, byte[] digest) 
throws Exception {
+        return ds.readBlockFromBackend(digest);
+    }
+}

Propchange: 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStoreFriend.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentSerializerTest.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentSerializerTest.java?rev=1659616&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentSerializerTest.java
 (added)
+++ 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentSerializerTest.java
 Fri Feb 13 17:22:26 2015
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.rdb;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.UnsupportedEncodingException;
+import java.util.Collections;
+
+import org.apache.jackrabbit.oak.plugins.document.Collection;
+import org.apache.jackrabbit.oak.plugins.document.DocumentStore;
+import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
+import org.apache.jackrabbit.oak.plugins.document.DocumentStoreFixture;
+import org.apache.jackrabbit.oak.plugins.document.NodeDocument;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class RDBDocumentSerializerTest  {
+
+    private DocumentStoreFixture fixture = DocumentStoreFixture.RDB_H2;
+    private DocumentStore store;
+    private RDBDocumentSerializer ser;
+
+    @Before
+    public void setUp() throws Exception {
+        store = fixture.createDocumentStore();
+        ser = new RDBDocumentSerializer(store, Collections.singleton("_id"));
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        fixture.dispose();
+    }
+
+    @Test
+    public void testSimpleString() {
+        RDBRow row = new RDBRow("_foo", true, true, 1, 2, 3, "{}", null);
+        NodeDocument doc = this.ser.fromRow(Collection.NODES, row);
+        assertEquals("_foo", doc.getId());
+        assertEquals(true, doc.hasBinary());
+        assertEquals(true, doc.get(NodeDocument.DELETED_ONCE));
+        assertEquals(2L, doc.getModCount());
+    }
+
+    @Test
+    public void testSimpleBlob() throws UnsupportedEncodingException {
+        RDBRow row = new RDBRow("_foo", false, false, 1, 2, 3, "\"blob\"", 
"{}".getBytes("UTF-8"));
+        NodeDocument doc = this.ser.fromRow(Collection.NODES, row);
+        assertEquals("_foo", doc.getId());
+        assertEquals(false, doc.hasBinary());
+        assertEquals(2L, doc.getModCount());
+    }
+
+    @Test
+    public void testSimpleBlob2() throws UnsupportedEncodingException {
+        RDBRow row = new RDBRow("_foo", false, false, 1, 2, 3, "\"blob\"", 
"{\"s\":\"string\", \"b\":true, \"i\":1}".getBytes("UTF-8"));
+        NodeDocument doc = this.ser.fromRow(Collection.NODES, row);
+        assertEquals("_foo", doc.getId());
+        assertEquals(false, doc.hasBinary());
+        assertEquals(2L, doc.getModCount());
+        assertEquals("string", doc.get("s"));
+        assertEquals(Boolean.TRUE, doc.get("b"));
+        assertEquals(1L, doc.get("i"));
+    }
+
+    @Test
+    public void testSimpleBoth() throws UnsupportedEncodingException {
+        try {
+            RDBRow row = new RDBRow("_foo", true, false, 1, 2, 3, "{}", 
"{}".getBytes("UTF-8"));
+            this.ser.fromRow(Collection.NODES, row);
+            fail("should fail");
+        }
+        catch (DocumentStoreException expected) {
+        }
+    }
+
+    @Test
+    public void testBlobAndDiff() throws UnsupportedEncodingException {
+        RDBRow row = new RDBRow("_foo", true, false, 1, 2, 3, "\"blob\", 
[[\"=\", \"foo\", \"bar\"],[\"M\", \"m1\", 1],[\"M\", \"m2\", 3]]", "{\"m1\":2, 
\"m2\":2}".getBytes("UTF-8"));
+        NodeDocument doc = this.ser.fromRow(Collection.NODES, row);
+        assertEquals("bar", doc.get("foo"));
+        assertEquals(2L, doc.get("m1"));
+        assertEquals(3L, doc.get("m2"));
+    }
+
+    @Test
+    public void testBlobAndDiffBorked() throws UnsupportedEncodingException {
+        try {
+            RDBRow row = new RDBRow("_foo", true, false, 1, 2, 3, "[[\"\", 
\"\", \"\"]]", "{}".getBytes("UTF-8"));
+            this.ser.fromRow(Collection.NODES, row);
+            fail("should fail");
+        }
+        catch (DocumentStoreException expected) {
+        }
+    }
+
+    @Test
+    public void testBrokenJSONTrailingComma() throws 
UnsupportedEncodingException {
+        try {
+            RDBRow row = new RDBRow("_foo", true, false, 1, 2, 3, "{ \"x\" : 
1, }", null);
+            this.ser.fromRow(Collection.NODES, row);
+            fail("should fail");
+        }
+        catch (DocumentStoreException expected) {
+        }
+    }
+
+    @Test
+    public void testBrokenJSONUnquotedIdentifier() throws 
UnsupportedEncodingException {
+        try {
+            RDBRow row = new RDBRow("_foo", true, false, 1, 2, 3, "{ x : 1, 
}", null);
+            this.ser.fromRow(Collection.NODES, row);
+            fail("should fail");
+        }
+        catch (DocumentStoreException expected) {
+        }
+    }
+
+    @Test
+    public void testSimpleStringNonAscii() {
+        RDBRow row = new RDBRow("_foo", true, false, 1, 2, 3, 
"{\"x\":\"\u20ac\uD834\uDD1E\"}", null);
+        NodeDocument doc = this.ser.fromRow(Collection.NODES, row);
+        assertEquals("_foo", doc.getId());
+        assertEquals("\u20ac\uD834\uDD1E", doc.get("x"));
+    }
+}

Propchange: 
jackrabbit/oak/branches/1.0/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentSerializerTest.java
------------------------------------------------------------------------------
    svn:eol-style = native


Reply via email to