[ https://issues.apache.org/jira/browse/OAK-8602?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
zhouxu updated OAK-8602: ------------------------ Attachment: 5.jpg 4.jpg 3.jpg 2.jpg 1.jpg > javax.jcr.RepositoryException: OakOak0001: GC overhead limit exceeded > --------------------------------------------------------------------- > > Key: OAK-8602 > URL: https://issues.apache.org/jira/browse/OAK-8602 > Project: Jackrabbit Oak > Issue Type: Bug > Affects Versions: 1.14.0 > Environment: I am runing the oak instance on my local windows machine > and the version of oak is 1.14.0.The DocumentNodeStore is Mongodb and the > IndexProvider is SolrIndexProvider with async index. > Reporter: zhouxu > Priority: Major > Attachments: 1.jpg, 2.jpg, 3.jpg, 4.jpg, 5.jpg > > > This problem occurs to I am importing a large of data to the repository.I use > jmeter to import data in 5 threads and max loop times of each thread is > 200000.When the count of sample reach to about 20000,the oak throws the > javax.jcr.RepositoryException: OakOak0001: GC overhead limit exceeded.The > complete exception information is as follows: > {color:#FF0000}javax.jcr.RepositoryException: OakOak0001: GC overhead limit > exceeded > at > org.apache.jackrabbit.oak.api.CommitFailedException.asRepositoryException(CommitFailedException.java:250) > at > org.apache.jackrabbit.oak.api.CommitFailedException.asRepositoryException(CommitFailedException.java:213) > at > org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.newRepositoryException(SessionDelegate.java:669) > at > org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.save(SessionDelegate.java:495) > at > org.apache.jackrabbit.oak.jcr.session.SessionImpl$8.performVoid(SessionImpl.java:420) > at > org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.performVoid(SessionDelegate.java:273) > at > org.apache.jackrabbit.oak.jcr.session.SessionImpl.save(SessionImpl.java:417) > at cn.amberdata.afc.domain.permission.AfACL.grantJcrPermit(AfACL.java:215) > at cn.amberdata.afc.domain.permission.AfACL.grant(AfACL.java:182) > at cn.amberdata.afc.domain.permission.AfACL.grantPermit(AfACL.java:235) > at > cn.amberdata.afc.domain.object.AfPersistentObject.setACL(AfPersistentObject.java:381) > at > cn.amberdata.afc.common.util.ACLUtils.doSetAclToTargetFormSource(ACLUtils.java:62) > at > cn.amberdata.afc.common.util.ACLUtils.setAclToTargetFormSource(ACLUtils.java:40) > at > cn.amberdata.common.core.persistence.dao.impl.AsyncTaskDaoImpl.extendsParentAcl(AsyncTaskDaoImpl.java:105) > at sun.reflect.GeneratedMethodAccessor167.invoke(Unknown Source) > at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source) > at java.lang.reflect.Method.invoke(Unknown Source) > at > org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:343) > at > org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:197) > at > org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) > at > org.springframework.aop.interceptor.AsyncExecutionInterceptor.lambda$invoke$0(AsyncExecutionInterceptor.java:115) > at java.util.concurrent.FutureTask.run$$$capture(Unknown Source) > at java.util.concurrent.FutureTask.run(Unknown Source) > at java.lang.Thread.run(Unknown Source) > Caused by: org.apache.jackrabbit.oak.api.CommitFailedException: OakOak0001: > GC overhead limit exceeded > at > org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.mergeFailed(DocumentNodeStoreBranch.java:342) > at > org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.access$600(DocumentNodeStoreBranch.java:56) > at > org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch$InMemory.merge(DocumentNodeStoreBranch.java:554) > at > org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.merge0(DocumentNodeStoreBranch.java:196) > at > org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.merge(DocumentNodeStoreBranch.java:120) > at > org.apache.jackrabbit.oak.plugins.document.DocumentRootBuilder.merge(DocumentRootBuilder.java:170) > at > org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.merge(DocumentNodeStore.java:1875) > at org.apache.jackrabbit.oak.core.MutableRoot.commit(MutableRoot.java:251) > at > org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.commit(SessionDelegate.java:346) > at > org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.save(SessionDelegate.java:493) > ... 20 more > Caused by: java.lang.OutOfMemoryError: GC overhead limit exceeded > at java.util.Vector.<init>(Unknown Source) > at java.util.Vector.<init>(Unknown Source) > at java.util.Vector.<init>(Unknown Source) > at java.util.Stack.<init>(Unknown Source) > at org.bson.AbstractBsonWriter.<init>(AbstractBsonWriter.java:38) > at org.bson.AbstractBsonWriter.<init>(AbstractBsonWriter.java:50) > at org.bson.BsonDocumentWriter.<init>(BsonDocumentWriter.java:44) > at org.bson.BsonDocumentWrapper.getUnwrapped(BsonDocumentWrapper.java:194) > at org.bson.BsonDocumentWrapper.isEmpty(BsonDocumentWrapper.java:115) > at > com.mongodb.operation.BulkWriteBatch$WriteRequestEncoder.encode(BulkWriteBatch.java:395) > at > com.mongodb.operation.BulkWriteBatch$WriteRequestEncoder.encode(BulkWriteBatch.java:377) > at > org.bson.codecs.BsonDocumentWrapperCodec.encode(BsonDocumentWrapperCodec.java:63) > at > org.bson.codecs.BsonDocumentWrapperCodec.encode(BsonDocumentWrapperCodec.java:29) > at > com.mongodb.internal.connection.BsonWriterHelper.writeDocument(BsonWriterHelper.java:75) > at > com.mongodb.internal.connection.BsonWriterHelper.writePayload(BsonWriterHelper.java:59) > at > com.mongodb.internal.connection.CommandMessage.encodeMessageBodyWithMetadata(CommandMessage.java:143) > at > com.mongodb.internal.connection.RequestMessage.encode(RequestMessage.java:138) > at > com.mongodb.internal.connection.CommandMessage.encode(CommandMessage.java:57) > at > com.mongodb.internal.connection.InternalStreamConnection.sendAndReceive(InternalStreamConnection.java:244) > at > com.mongodb.internal.connection.UsageTrackingInternalConnection.sendAndReceive(UsageTrackingInternalConnection.java:99) > at > com.mongodb.internal.connection.DefaultConnectionPool$PooledConnection.sendAndReceive(DefaultConnectionPool.java:444) > at > com.mongodb.internal.connection.CommandProtocolImpl.execute(CommandProtocolImpl.java:72) > at > com.mongodb.internal.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:200) > at > com.mongodb.internal.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:269) > at > com.mongodb.internal.connection.DefaultServerConnection.command(DefaultServerConnection.java:131) > at > com.mongodb.operation.MixedBulkWriteOperation.executeCommand(MixedBulkWriteOperation.java:419) > at > com.mongodb.operation.MixedBulkWriteOperation.executeBulkWriteBatch(MixedBulkWriteOperation.java:257) > at > com.mongodb.operation.MixedBulkWriteOperation.access$700(MixedBulkWriteOperation.java:68) > at > com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:201) > at > com.mongodb.operation.MixedBulkWriteOperation$1.call(MixedBulkWriteOperation.java:192) > at > com.mongodb.operation.OperationHelper.withReleasableConnection(OperationHelper.java:424) > at > com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:192){color} -- This message was sent by Atlassian Jira (v8.3.2#803003)