Hi Team,

I am facing below exception for adding node,

Caused by: org.apache.jackrabbit.oak.api.CommitFailedException: OakMerge0001:
OakMerge0001: failed update of 2:/OCB/odata (race?) after 10 retries
(retries 5, 30012 ms)
at
org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.merge0(DocumentNodeStoreBranch.java:222)
at
org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBranch.merge(DocumentNodeStoreBranch.java:129)
at
org.apache.jackrabbit.oak.plugins.document.DocumentRootBuilder.merge(DocumentRootBuilder.java:170)
at
org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore.merge(DocumentNodeStore.java:1987)
at org.apache.jackrabbit.oak.core.MutableRoot.commit(MutableRoot.java:252)
at org.apache.jackrabbit.oak.core.MutableRoot.commit(MutableRoot.java:263)
... 246 common frames omitted
Caused by: org.apache.jackrabbit.oak.plugins.document.DocumentStoreException:
failed update of 2:/OCB/odata (race?) after 10 retries
at
org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.internalUpdate(RDBDocumentStore.java:1729)
at
org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.internalCreateOrUpdate(RDBDocumentStore.java:1652)
at
org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.createOrUpdate(RDBDocumentStore.java:367)

how to avoid such issues?
I have two cluster nodes connecting to same repository.

Please review my code and suggest solution to avoid OakMerge0001

//LuceneCompatModeInitializer
import java.util.Arrays;
import java.util.Set;
import org.apache.jackrabbit.oak.api.Type;
import
org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneInitializerHelper;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;

class *LuceneCompatModeInitializer* extends LuceneInitializerHelper {
private final String name;
public LuceneCompatModeInitializer(String name, Set<String> propertyTypes) {
super(name, propertyTypes);
this.name = name;
}
public void initialize(NodeBuilder builder) {
if (!builder.hasChildNode("oak:index") ||
!builder.getChildNode("oak:index").hasChildNode(this.name)) {
NodeBuilder index = builder.child("oak:index").child(this.name);
index
.setProperty("jcr:primaryType", "oak:QueryIndexDefinition", Type.NAME)
.setProperty("type", "lucene")
.setProperty("reindex", Boolean.valueOf(true))
.setProperty("compatVersion", Long.valueOf(2L), Type.LONG)
.setProperty("async", "fulltext-async")
.setProperty("supersedes",
Arrays.asList(ExperienceManagerConstant.LUCENE_SUPERSEEDED_INDEX),
Type.STRINGS)
.setProperty("excludedPaths",
Arrays.asList(ExperienceManagerConstant.LUCENE_INDEX_EXCLUDE_SUBTREE),
Type.STRINGS)
.setProperty("evaluatePathRestrictions", Boolean.valueOf(true));
index
.child("suggestion")
.setProperty("jcr:primaryType", "nt:unstructured", Type.NAME)
.setProperty("suggestUpdateFrequencyMinutes", Integer.valueOf(10));
NodeBuilder rules = index.child("indexRules");
rules.setProperty("jcr:primaryType", "nt:unstructured", Type.NAME);
NodeBuilder ntBase = rules.child("ocb:base");
ntBase.setProperty("jcr:primaryType", "nt:unstructured", Type.NAME);
ntBase.setProperty("indexNodeName", Boolean.valueOf(true));
NodeBuilder props = ntBase.child("properties");
props.setProperty("jcr:primaryType", "nt:unstructured", Type.NAME);
functionBasedIndex(props, "upper(name())");
functionBasedIndex(props, "lower(name())");
functionBasedIndex(props, "upper(localname())");
functionBasedIndex(props, "lower(localname())");
functionBasedIndex(props, "upper([prop1])");
functionBasedIndex(props, "lower([prop1])");
enableFulltextIndex(props.child("allProps"));
}
}
private void enableFulltextIndex(NodeBuilder propNode) {
propNode
.setProperty("jcr:primaryType", "nt:unstructured", Type.NAME)
.setProperty("analyzed", Boolean.valueOf(true))
.setProperty("nodeScopeIndex", Boolean.valueOf(true))
.setProperty("useInExcerpt", Boolean.valueOf(true))
.setProperty("propertyIndex", Boolean.valueOf(true))
.setProperty("useInSpellcheck", Boolean.valueOf(true))
.setProperty("useInSuggest", Boolean.valueOf(true))
.setProperty("name", "^(ocb:|jcr:)[\\w]*$")
.setProperty("isRegexp", Boolean.valueOf(true));
}
private static void functionBasedIndex(NodeBuilder props, String function) {
props
.child(function.replace('[', '_').replace(']', '_'))
.setProperty("jcr:primaryType", "nt:unstructured", Type.NAME)
.setProperty("function", function);
}
}

//LucenePropertyIndexInitializer
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.apache.jackrabbit.oak.api.Type;
import
org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneIndexHelper;
import
org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneInitializerHelper;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;

public class *LucenePropertyIndexInitializer* extends
LuceneInitializerHelper {
private String name;
public LucenePropertyIndexInitializer(String name, Set<String>
propertyTypes) {
super(name, propertyTypes);
this.name = name;
}
public void initialize(NodeBuilder builder) {
if (!builder.hasChildNode("oak:index") ||
!builder.getChildNode("oak:index").hasChildNode(this.name)) {
Set<String> propSet = new HashSet
<>(Arrays.asList(ExperienceManagerConstant.PROPERTY_INDEX_PROPERTIES));
NodeBuilder index =
LuceneIndexHelper.newLucenePropertyIndexDefinition(builder
.getChildNode("oak:index"), this.name, propSet, "async");
index.setProperty("evaluatePathRestrictions", Boolean.valueOf(true));
index.setProperty("excludedPaths",
Arrays.asList(ExperienceManagerConstant.LUCENE_INDEX_EXCLUDE_SUBTREE),
Type.STRINGS);
}
}
}

//LuceneRepository
import com.google.common.collect.Lists;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Set;
import org.apache.jackrabbit.oak.Oak;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.ContentRepository;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
import
org.apache.jackrabbit.oak.plugins.index.aggregate.SimpleNodeAggregator;
import
org.apache.jackrabbit.oak.plugins.index.counter.NodeCounterEditorProvider;
import
org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexProvider;
import
org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneInitializerHelper;
import
org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider;
import
org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexProvider;
import org.apache.jackrabbit.oak.plugins.nodetype.TypeEditorProvider;
import org.apache.jackrabbit.oak.spi.commit.EditorProvider;
import org.apache.jackrabbit.oak.spi.commit.Observer;
import org.apache.jackrabbit.oak.spi.lifecycle.RepositoryInitializer;
import org.apache.jackrabbit.oak.spi.query.QueryIndex;
import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
import org.apache.jackrabbit.oak.spi.security.OpenSecurityProvider;
import org.apache.jackrabbit.oak.spi.security.SecurityProvider;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class *LuceneRepository* {
private static final Logger logger =
LoggerFactory.getLogger(LuceneRepository.class);
public static ContentRepository createRepositoryWithLuceneIndex(NodeStore
documentNodeStoreService, RepositoryInitializer indexInitializer, String
workspace, HashMap<String, String> extra) throws CommitFailedException {
LuceneIndexProvider provider = (new
LuceneIndexProvider()).with(getNodeAggregator());
String methodName = "LuceneRepository.createRepositoryWithLuceneIndex : ";
PropertyIndexProvider propertyIndexProvider = new PropertyIndexProvider();
Oak oak = (new Oak(documentNodeStoreService)).with((RepositoryInitializer)
new *LuceneCompatModeInitializer*("luceneFreetext3", (Set<String>)null
)).with((RepositoryInitializer)new *LucenePropertyIndexInitializer*(
"luceneGlobal", null
)).with(indexInitializer).with((QueryIndexProvider)provider).with((Observer)provider).with((SecurityProvider)
new OpenSecurityProvider()).with((IndexEditorProvider)new
LuceneIndexEditorProvider()).with((QueryIndexProvider)propertyIndexProvider).with((IndexEditorProvider)
new PropertyIndexEditorProvider()).with(workspace).with((EditorProvider)new
TypeEditorProvider()).with((IndexEditorProvider)new
NodeCounterEditorProvider());
if (extra != null) {
if (extra.containsKey("ocb.oak.indexInterval.async") &&
!"0".equalsIgnoreCase(((String)extra.get("ocb.oak.indexInterval.async")).trim()))
{
oak.withAsyncIndexing("async",
Integer.parseInt(extra.get("ocb.oak.indexInterval.async")));
logger.info(methodName + "Indexer interval is " + methodName + "for lane >
async");
} else {
logger.warn(methodName + "Indexer disable for lane > async");
}
if (extra.containsKey("ocb.oak.indexInterval.fulltext") &&
!"0".equalsIgnoreCase(((String)extra
.get("ocb.oak.indexInterval.fulltext")).trim())) {
oak.withAsyncIndexing("fulltext-async",
Integer.parseInt(extra.get("ocb.oak.indexInterval.fulltext")));
logger.info(methodName + "Indexer interval is " + methodName + "for lane >
fulltext-async");
} else {
logger.warn(methodName + "Indexer disable for lane > fulltext-async");
}
}
return oak.createContentRepository();
}
private static QueryIndex.NodeAggregator getNodeAggregator() {
return (QueryIndex.NodeAggregator)(new SimpleNodeAggregator())
.newRuleWithName("nt:file", Lists.newArrayList((Object[])new String[] {
"jcr:content", "jcr:content/*" }));
}

/** helper class to initialize the Property index definition */
static class *IndexInitializer* implements RepositoryInitializer {
private String indexType;
private String name;
private Set<String> properties;

public IndexInitializer(
final String indexType, final String name, final Set<String> properties) {
this.indexType = indexType;
this.name = checkNotNull(name);
this.properties = checkNotNull(properties);
}

private boolean isAlreadyThere(final NodeBuilder root) {
return checkNotNull(root).hasChildNode(INDEX_DEFINITIONS_NAME)
&& root.getChildNode(INDEX_DEFINITIONS_NAME).hasChildNode(name);
}

@Override
public void initialize(final NodeBuilder builder) {
if (!isAlreadyThere(builder)) {
if (indexType.equals("luceneIndex")) {
addLuceneIndex(builder, name, properties);
}

if (indexType.equals("propertyIndex")) {
addPropertyIndex(builder, name, properties);
}
}
}
} // end of helper class


// Code Retrieve DocumentNodeStore Service
private DocumentNodeStore getDocumentNodeStore() {
ServiceTracker<DocumentNodeStore, DocumentNodeStore>
nodeStoreServiceTracker = null;
try {
BundleContext context =
FrameworkUtil.getBundle(DocumentNodeStore.class).getBundleContext();
nodeStoreServiceTracker =
new ServiceTracker<>(context, DocumentNodeStore.class, null) {
@Override
public DocumentNodeStore addingService(ServiceReference<DocumentNodeStore>
reference) {
logger.info("DocumentNodeStore service reference received");
documentNodeStore = context.getService(reference);
return super.addingService(reference);
}
};
nodeStoreServiceTracker.open();
documentNodeStore =
nodeStoreServiceTracker.waitForService(SERVICE_WAIT_TIME);
if (documentNodeStore != null) {
logger.info(
"ContentManagementServiceImpl.getDocumentNodeStore-DocumentNodeStore
service"
+ " reference found");

EditorHook hook =
new EditorHook(
new CompositeEditorProvider(
new NamespaceEditorProvider(), new TypeEditorProvider()));

OakInitializer.initialize(documentNodeStore, new InitialContent(), hook);
}

} catch (InterruptedException e) {
logger.error(
"ContentManagementServiceImpl.getDocumentNodeStore- Failed to get
documentNodeStore",
e);
} finally {
nodeStoreServiceTracker.close();
}

return documentNodeStore;
}

// Repository creation code
private ContentRepository createRepository( String workspace)
throws CommitFailedException {
HashMap<String, String> extraMap = new HashMap<>();
NodeStore documentNodeStoreService=getDocumentNodeStore();
IndexInitializer propertyIndexInitializer =
new *IndexInitializer*(
"propertyIndex",
"schemaPropertyIndex",
new HashSet
<>(Arrays.asList(ExperienceManagerConstant.PROPERTY_INDEX_PROPERTIES)));

String asyncIndexTime = "5";
String freeTextAsyncIndexTime = "5";

extraMap.put(OAK_ASYNC_INDEX_LANE_INTERVAL, asyncIndexTime);
extraMap.put(OAK_FULLTEXT_ASYNC_INDEX_LANE_INTERVAL,
freeTextAsyncIndexTime);

return *LuceneRepository*.createRepositoryWithLuceneIndex(
documentNodeStoreService, propertyIndexInitializer, workspace, extraMap);
}


// Code causing oakmerge issue
try {
contentSession = repositoryService.getContentSession(workSpaceName);
if (contentSession != null) {
Root rootNode = contentSession.getLatestRoot();
rootNode.rebase();
Tree contentNode =
FormDataUtil.addFileContentNode(rootNode, formDataFile, parentTreePath,
extra);
// commit node
if (contentNode != null) {
rootNode.commit();
contentId = "" + contentNode.getPath();
formDataFile.setFilepath(contentId);
}
}

} catch (CommitFailedException e) {
logger.error("Content Manager: {} Unable to create node ", methodName);
}


//Configuration PID:
org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreService file

#A path on the file system where repository data like cache will be stored
repository.home=
# Document Store type
documentStoreType=RDB
maxReplicationLagInSecs=21600
leaseCheckMode=DISABLED
#Property indicating that this component will not register as a NodeStore
but as a "NodeStoreProvider" with given role
role=
cache=512
nodeCachePercentage=35
prevDocCachePercentage=4
childrenCachePercentage=15
diffCachePercentage=30
cacheSegmentCount=16
cacheStackMoveDistance=16
blobCacheSize=16
updateLimit=100000
persistentCache=cache,size\=512,+compact
persistentCacheIncludes=/
journalCache=diff-cache

journalGCInterval=300000
journalGCMaxAge=86400000
versionGcMaxAgeInSecs=86400
versionGCExpression= 0 0 2 * * ?
versionGCTimeLimitInSecs=10800
blobGcMaxAgeInSecs=86400
blobTrackSnapshotIntervalInSecs=43200




Thanks & Regards,
Sandeep Ambule

Reply via email to