This is an automated email from the ASF dual-hosted git repository.

reschke pushed a commit to branch OAK-12109
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git

commit cde031713a1f2c4be48c9ba14111dba8becf0234
Author: Julian Reschke <[email protected]>
AuthorDate: Tue Feb 24 17:22:15 2026 +0100

    OAK-12109: MongoDocumentStore: improve diagnostics for too large docs in 
bulk request payloads
---
 .../plugins/document/mongo/MongoDocumentStore.java | 25 ++++++++++++++++++++++
 1 file changed, 25 insertions(+)

diff --git 
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
 
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
index 3b5c7b2c1b..ba0bb3f300 100644
--- 
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
+++ 
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
@@ -1651,7 +1651,32 @@ public class MongoDocumentStore implements DocumentStore 
{
             for (BulkWriteError err : e.getWriteErrors()) {
                 failedUpdates.add(bulkIds[err.getIndex()]);
             }
+        } catch (BSONException bsonException) {
+            LOG.error("bulkUpdate of size {} failed with: {}", 
updateOps.size(),
+                    bsonException.getMessage(), bsonException);
+
+            // add diagnostics
+            String idOfbiggestUpdate = "";
+            int estimatedSizeOfBiggestUpdate = 0;
+
+            for (UpdateOp updateOp : updateOps) {
+                String id = updateOp.getId();
+                // this could be made more precise my measuring the BSON 
serialization of
+                // conditions and updates
+                int estimatedSize = updateOp.toString().length();
+                LOG.debug("after bulk write: string serialization of changes 
for id={} had an approximate size of {}",
+                        id, estimatedSize);
+                if (estimatedSize > estimatedSizeOfBiggestUpdate) {
+                    idOfbiggestUpdate = id;
+                    estimatedSizeOfBiggestUpdate = estimatedSize;
+                }
+            }
+            LOG.error("bulkUpdate failure: biggest update was for i={} with 
approximate size of {}",
+                    idOfbiggestUpdate, estimatedSizeOfBiggestUpdate);
+            // rethrow
+            throw bsonException;
         }
+
         for (BulkWriteUpsert upsert : bulkResult.getUpserts()) {
             upserts.add(bulkIds[upsert.getIndex()]);
         }

Reply via email to