This is an automated email from the ASF dual-hosted git repository.

reschke pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git


The following commit(s) were added to refs/heads/trunk by this push:
     new f2a67c8436 OAK-12109: MongoDocumentStore: improve diagnostics for too 
large docs in bulk request payloads (#2764)
f2a67c8436 is described below

commit f2a67c843691aef883c97e302f2ba50bacc15274
Author: Julian Reschke <[email protected]>
AuthorDate: Wed Feb 25 16:37:37 2026 +0100

    OAK-12109: MongoDocumentStore: improve diagnostics for too large docs in 
bulk request payloads (#2764)
---
 .../plugins/document/mongo/MongoDocumentStore.java | 26 +++++++++
 .../document/mongo/MongoDBExceptionTest.java       | 66 +++++++++++++++++++++-
 2 files changed, 89 insertions(+), 3 deletions(-)

diff --git 
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
 
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
index 3b5c7b2c1b..756edc8ce0 100644
--- 
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
+++ 
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
@@ -1651,7 +1651,33 @@ public class MongoDocumentStore implements DocumentStore 
{
             for (BulkWriteError err : e.getWriteErrors()) {
                 failedUpdates.add(bulkIds[err.getIndex()]);
             }
+        } catch (BSONException bsonException) {
+            LOG.error("bulkUpdate of size {} failed with: {}", 
updateOps.size(),
+                    bsonException.getMessage(), bsonException);
+
+            // add diagnostics
+            String idOfbiggestUpdate = "";
+            int estimatedSizeOfBiggestUpdate = 0;
+
+            for (UpdateOp updateOp : updateOps) {
+                String id = updateOp.getId();
+                // this could be made more precise my measuring the BSON 
serialization of
+                // conditions and updates
+                int estimatedSize = updateOp.toString().length();
+                LOG.debug("after bulk write: string serialization of changes 
for id={} had an approximate size of {}",
+                        id, estimatedSize);
+                if (estimatedSize > estimatedSizeOfBiggestUpdate) {
+                    idOfbiggestUpdate = id;
+                    estimatedSizeOfBiggestUpdate = estimatedSize;
+                }
+            }
+            LOG.error("bulkUpdate of size {} failed with: {}; biggest update 
was for i={} with approximate size of {}",
+                    updateOps.size(), bsonException.getMessage(), 
idOfbiggestUpdate, estimatedSizeOfBiggestUpdate,
+                    bsonException);
+            // rethrow
+            throw bsonException;
         }
+
         for (BulkWriteUpsert upsert : bulkResult.getUpserts()) {
             upserts.add(bulkIds[upsert.getIndex()]);
         }
diff --git 
a/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDBExceptionTest.java
 
b/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDBExceptionTest.java
index 5dbe01d66b..d6e7cb264e 100644
--- 
a/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDBExceptionTest.java
+++ 
b/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDBExceptionTest.java
@@ -16,6 +16,7 @@
  */
 package org.apache.jackrabbit.oak.plugins.document.mongo;
 
+import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.jackrabbit.oak.plugins.document.Collection;
 import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
 import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
@@ -26,22 +27,25 @@ import org.apache.jackrabbit.oak.plugins.document.Revision;
 import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
 import org.apache.jackrabbit.oak.plugins.document.util.Utils;
 import org.apache.jackrabbit.oak.commons.junit.LogCustomizer;
+import org.bson.BSONException;
 import org.junit.After;
 import org.junit.Assume;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.event.Level;
 
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
 import static java.util.Collections.singletonList;
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.containsString;
-import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
-import static org.junit.Assert.assertFalse;
 
 public class MongoDBExceptionTest {
 
@@ -157,6 +161,61 @@ public class MongoDBExceptionTest {
         customizer.finished();
     }
 
+    @Test
+    public void createOrUpdate16MBBatchWithMultiDocs() {
+        LogCustomizer log = 
LogCustomizer.forLogger(MongoDocumentStore.class.getName()).
+                enable(Level.ERROR).
+                matchesRegex("bulkUpdate.*biggest update.*approximate.*").
+                create();
+
+        try {
+            log.starting();
+            List<String> ids = new ArrayList<>();
+            List<UpdateOp> updateOps = new ArrayList<>();
+
+            String idOfReallyBig = "foo-really-big";
+
+            {
+                String id = "/foo-1MB";
+                ids.add(id);
+                UpdateOp updateOp = new UpdateOp(id, true);
+                updateOp = create1MBProp(updateOp);
+                updateOps.add(updateOp);
+            }
+            {
+                String id = idOfReallyBig;
+                ids.add(id);
+                UpdateOp updateOp = new UpdateOp(id, true);
+                updateOp.set("big", RandomStringUtils.secure().next(20 * 1024 
* 1024));
+                updateOps.add(updateOp);
+            }
+            {
+                String id = "/foo-small";
+                ids.add(id);
+                UpdateOp updateOp = new UpdateOp(id, true);
+                updateOps.add(updateOp);
+            }
+
+            store.remove(Collection.NODES, ids);
+
+            try {
+                store.createOrUpdate(Collection.NODES, updateOps);
+                fail("createOrUpdate(many with one >16MB) should have failed");
+            } catch (BSONException expected) {
+                // currently expected but incorrect -> OAK-12113
+                List<String> messages = log.getLogs();
+                assertEquals("only 1 message expected, but got: " + 
messages.size(),
+                        1, messages.size());
+                String message = messages.get(0);
+                assertTrue("log message should contain id " + idOfReallyBig + 
"/foo-really.big, got: " +  message,
+                        message.contains(idOfReallyBig));
+            }
+
+        } finally {
+            log.finished();
+        }
+    }
+
     @Test
     public void update16MBDoc() {
 
@@ -263,7 +322,7 @@ public class MongoDBExceptionTest {
     private UpdateOp create16MBProp(UpdateOp op) {
         // create a 1 MB property
         String content = create1MBContent();
-        
+
 
         //create 16MB property
         for (int i = 0; i < 16; i++) {
@@ -272,6 +331,7 @@ public class MongoDBExceptionTest {
         return op;
     }
 
+    // RED ALERT: OAK-12114
     private String create1MBContent() {
         char[] chars = new char[1024 * 1024];
         Arrays.fill(chars, '0');

Reply via email to