Hi,
I get this kind of exception on the majority of the oak nodes when I'm trying
to concurrently write into a mongo cluster[0].This seems to happen only with
some nodes structures and is not reproducing all the time.
I created a small test that can reproduce the issue, you can find it the
attachment.Just make sure that you have a local mongo db running on the 27017
port.
Is this a known issue? Or i'm doing something wrong?
Thanks,
Tudor
[0]
javax.jcr.nodetype.ConstraintViolationException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
at java.lang.reflect.Constructor.newInstance(Constructor.java:513)
at
org.apache.jackrabbit.oak.api.CommitFailedException.throwRepositoryException(CommitFailedException.java:57)
at
org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.save(SessionDelegate.java:234)
at org.apache.jackrabbit.oak.jcr.SessionImpl.save(SessionImpl.java:320)
at
org.apache.jackrabbit.oakmongomk.RepoWrite.run(ConstraintViolationExceptionTest.java:83)
at
java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
at java.lang.Thread.run(Thread.java:662)
Caused by: org.apache.jackrabbit.oak.api.CommitFailedException:
javax.jcr.nodetype.ConstraintViolationException: Incorrect node type of child
node nodeZ442730
at
org.apache.jackrabbit.oak.plugins.nodetype.TypeEditor.constraintViolation(TypeEditor.java:158)
at
org.apache.jackrabbit.oak.plugins.nodetype.TypeEditor.getDefinition(TypeEditor.java:381)
at
org.apache.jackrabbit.oak.plugins.nodetype.TypeEditor.enter(TypeEditor.java:101)
at
org.apache.jackrabbit.oak.spi.commit.VisibleEditor.enter(VisibleEditor.java:58)
at
org.apache.jackrabbit.oak.spi.commit.CompositeEditor.enter(CompositeEditor.java:66)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook$EditorDiff.process(EditorHook.java:104)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.process(EditorHook.java:80)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.access$0(EditorHook.java:73)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook$EditorDiff.childNodeAdded(EditorHook.java:163)
at
org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.compareAgainstBaseState(ModifiedNodeState.java:335)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook$EditorDiff.process(EditorHook.java:109)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.process(EditorHook.java:80)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.access$0(EditorHook.java:73)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook$EditorDiff.childNodeAdded(EditorHook.java:163)
at
org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.compareAgainstBaseState(ModifiedNodeState.java:335)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook$EditorDiff.process(EditorHook.java:109)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.process(EditorHook.java:80)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.access$0(EditorHook.java:73)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook$EditorDiff.childNodeAdded(EditorHook.java:163)
at
org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.compareAgainstBaseState(ModifiedNodeState.java:335)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook$EditorDiff.process(EditorHook.java:109)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.process(EditorHook.java:80)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.access$0(EditorHook.java:73)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook$EditorDiff.childNodeChanged(EditorHook.java:176)
at
org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.compareAgainstBaseState(ModifiedNodeState.java:337)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook$EditorDiff.process(EditorHook.java:109)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.process(EditorHook.java:80)
at
org.apache.jackrabbit.oak.spi.commit.EditorHook.processCommit(EditorHook.java:54)
at
org.apache.jackrabbit.oak.spi.commit.CompositeHook.processCommit(CompositeHook.java:59)
at
org.apache.jackrabbit.oak.spi.commit.CompositeHook.processCommit(CompositeHook.java:59)
at
org.apache.jackrabbit.oak.kernel.KernelNodeStoreBranch.merge(KernelNodeStoreBranch.java:144)
at org.apache.jackrabbit.oak.core.RootImpl$2.run(RootImpl.java:278)
at org.apache.jackrabbit.oak.core.RootImpl$2.run(RootImpl.java:1)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:337)
at org.apache.jackrabbit.oak.core.RootImpl.commit(RootImpl.java:273)
at
org.apache.jackrabbit.oak.jcr.delegate.SessionDelegate.save(SessionDelegate.java:231)
... 5 more
Caused by: javax.jcr.nodetype.ConstraintViolationException: Incorrect node type
of child node nodeZ442730
at
org.apache.jackrabbit.oak.plugins.nodetype.TypeEditor.constraintViolation(TypeEditor.java:159)
... 41 more
package org.apache.jackrabbit.oakmongomk;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import javax.jcr.Node;
import javax.jcr.Repository;
import javax.jcr.Session;
import javax.jcr.SimpleCredentials;
import org.apache.jackrabbit.mongomk.impl.MongoConnection;
import org.apache.jackrabbit.mongomk.prototype.MongoMK;
import org.apache.jackrabbit.oak.jcr.Jcr;
import org.junit.Before;
import org.junit.Test;
public class ConstraintViolationExceptionTest {
int concurrentWriters = 2;
Repository[] repos;
ExecutorService threadExecutor;
@Before
public void before() throws Exception {
repos = new Repository[concurrentWriters];
threadExecutor = Executors.newFixedThreadPool(concurrentWriters);
for (int i = 0; i < concurrentWriters; i++) {
repos[i] = new Jcr(new MongoMK((new MongoConnection("localhost",
27017, "testDataBase")).getDB(), 1)).createRepository();
}
}
@Test
public void testConcurrentWriting() throws InterruptedException {
RepoWrite task;
for (int i = 0; i < concurrentWriters; i++) {
task = new RepoWrite(repos[i]);
threadExecutor.execute(task);
}
threadExecutor.shutdown();
threadExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
}
}
class RepoWrite implements Runnable {
Repository repo;
String clusterNodeId;
public RepoWrite(Repository repo) {
Random randomGenerator = new Random();
this.repo = repo;
clusterNodeId = Integer.toString(randomGenerator.nextInt(1000));
}
@Override
public void run() {
Session adminSession;
try {
adminSession = repo.login(new SimpleCredentials("admin", "admin"
.toCharArray()));
Node root = adminSession.getRootNode();
for (int k = 0; k < 10; k++) {
Node nk = root
.addNode("nodeR" + clusterNodeId + k, "nt:folder");
for (int j = 0; j < 50; j++) {
Node nj = nk.addNode("nodeX" + clusterNodeId + j,
"nt:folder");
for (int i = 0; i < 10; i++) {
Node ni = nj.addNode("nodeY" + clusterNodeId + i,
"nt:folder");
for (int l = 0; l < 1000; l++) {
ni.addNode("nodeZ" + clusterNodeId + l, "nt:folder");
}
long startTime = System.nanoTime();
adminSession.save();
System.out
.println(String.format(
"ClusterNode %1$s %2$f",
clusterNodeId,
(System.nanoTime() - startTime * 1.0) / 1000000));
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}