Modified: jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java?rev=1792993&r1=1792992&r2=1792993&view=diff ============================================================================== --- jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java (original) +++ jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java Fri Apr 28 07:16:13 2017 @@ -16,88 +16,58 @@ */ package org.apache.jackrabbit.oak.upgrade; +import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static com.google.common.collect.ImmutableSet.copyOf; +import static com.google.common.collect.ImmutableSet.of; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Lists.newArrayListWithCapacity; import static com.google.common.collect.Maps.newHashMap; -import static java.util.Arrays.asList; -import static org.apache.jackrabbit.JcrConstants.JCR_AUTOCREATED; -import static org.apache.jackrabbit.JcrConstants.JCR_CHILDNODEDEFINITION; -import static org.apache.jackrabbit.JcrConstants.JCR_DEFAULTPRIMARYTYPE; -import static org.apache.jackrabbit.JcrConstants.JCR_DEFAULTVALUES; -import static org.apache.jackrabbit.JcrConstants.JCR_HASORDERABLECHILDNODES; -import static org.apache.jackrabbit.JcrConstants.JCR_ISMIXIN; -import static org.apache.jackrabbit.JcrConstants.JCR_MANDATORY; -import static org.apache.jackrabbit.JcrConstants.JCR_MULTIPLE; -import static org.apache.jackrabbit.JcrConstants.JCR_NAME; -import static org.apache.jackrabbit.JcrConstants.JCR_NODETYPENAME; -import static org.apache.jackrabbit.JcrConstants.JCR_ONPARENTVERSION; -import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYITEMNAME; -import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; -import static org.apache.jackrabbit.JcrConstants.JCR_PROPERTYDEFINITION; -import static org.apache.jackrabbit.JcrConstants.JCR_PROTECTED; -import static org.apache.jackrabbit.JcrConstants.JCR_REQUIREDPRIMARYTYPES; -import static org.apache.jackrabbit.JcrConstants.JCR_REQUIREDTYPE; -import static org.apache.jackrabbit.JcrConstants.JCR_SAMENAMESIBLINGS; -import static org.apache.jackrabbit.JcrConstants.JCR_SUPERTYPES; +import static com.google.common.collect.Sets.newHashSet; +import static com.google.common.collect.Sets.union; import static org.apache.jackrabbit.JcrConstants.JCR_SYSTEM; -import static org.apache.jackrabbit.JcrConstants.JCR_VALUECONSTRAINTS; -import static org.apache.jackrabbit.JcrConstants.JCR_VERSIONSTORAGE; -import static org.apache.jackrabbit.JcrConstants.NT_CHILDNODEDEFINITION; -import static org.apache.jackrabbit.JcrConstants.NT_NODETYPE; -import static org.apache.jackrabbit.JcrConstants.NT_PROPERTYDEFINITION; -import static org.apache.jackrabbit.core.RepositoryImpl.ACTIVITIES_NODE_ID; -import static org.apache.jackrabbit.core.RepositoryImpl.ROOT_NODE_ID; -import static org.apache.jackrabbit.core.RepositoryImpl.VERSION_STORAGE_NODE_ID; -import static org.apache.jackrabbit.oak.api.Type.BOOLEANS; -import static org.apache.jackrabbit.oak.api.Type.DECIMALS; -import static org.apache.jackrabbit.oak.api.Type.DOUBLES; -import static org.apache.jackrabbit.oak.api.Type.LONGS; -import static org.apache.jackrabbit.oak.api.Type.NAME; -import static org.apache.jackrabbit.oak.api.Type.NAMES; -import static org.apache.jackrabbit.oak.api.Type.PATHS; -import static org.apache.jackrabbit.oak.api.Type.STRINGS; -import static org.apache.jackrabbit.oak.plugins.memory.PropertyStates.createProperty; import static org.apache.jackrabbit.oak.plugins.name.Namespaces.addCustomMapping; -import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_AVAILABLE_QUERY_OPERATORS; -import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_ABSTRACT; -import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_FULLTEXT_SEARCHABLE; -import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_QUERYABLE; -import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_QUERY_ORDERABLE; -import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_NODE_TYPES; +import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.NODE_TYPES_PATH; import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.JCR_ALL; -import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.NT_REP_PRIVILEGE; -import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.NT_REP_PRIVILEGES; -import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.REP_AGGREGATES; -import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.REP_BITS; -import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.REP_IS_ABSTRACT; -import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.REP_NEXT; -import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.REP_PRIVILEGES; -import static org.apache.jackrabbit.spi.commons.name.NameConstants.ANY_NAME; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory.SKIP_NAME_CHECK; +import static org.apache.jackrabbit.oak.upgrade.nodestate.FilteringNodeState.ALL; +import static org.apache.jackrabbit.oak.upgrade.nodestate.FilteringNodeState.NONE; +import static org.apache.jackrabbit.oak.upgrade.nodestate.NodeStateCopier.copyProperties; import java.io.File; import java.io.IOException; import java.io.InputStream; -import java.math.BigDecimal; +import java.util.Calendar; import java.util.Collection; -import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.Set; +import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import javax.jcr.NamespaceException; -import javax.jcr.PropertyType; +import javax.jcr.Node; import javax.jcr.RepositoryException; -import javax.jcr.UnsupportedRepositoryOperationException; +import javax.jcr.Session; +import javax.jcr.Value; +import javax.jcr.ValueFactory; +import javax.jcr.nodetype.NodeDefinitionTemplate; +import javax.jcr.nodetype.NodeTypeManager; +import javax.jcr.nodetype.NodeTypeTemplate; +import javax.jcr.nodetype.PropertyDefinitionTemplate; import javax.jcr.security.Privilege; -import javax.jcr.version.OnParentVersionAction; -import com.google.common.base.Charsets; +import com.google.common.base.Function; import com.google.common.base.Stopwatch; import com.google.common.collect.HashBiMap; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import org.apache.jackrabbit.api.security.authorization.PrivilegeManager; +import org.apache.jackrabbit.core.IndexAccessor; import org.apache.jackrabbit.core.RepositoryContext; import org.apache.jackrabbit.core.config.BeanConfig; import org.apache.jackrabbit.core.config.LoginModuleConfig; @@ -106,14 +76,13 @@ import org.apache.jackrabbit.core.config import org.apache.jackrabbit.core.fs.FileSystem; import org.apache.jackrabbit.core.fs.FileSystemException; import org.apache.jackrabbit.core.nodetype.NodeTypeRegistry; -import org.apache.jackrabbit.core.persistence.PersistenceManager; +import org.apache.jackrabbit.core.query.lucene.FieldNames; import org.apache.jackrabbit.core.security.authorization.PrivilegeRegistry; import org.apache.jackrabbit.core.security.user.UserManagerImpl; import org.apache.jackrabbit.oak.api.CommitFailedException; -import org.apache.jackrabbit.oak.api.PropertyState; -import org.apache.jackrabbit.oak.api.Type; -import org.apache.jackrabbit.oak.namepath.GlobalNameMapper; -import org.apache.jackrabbit.oak.namepath.NameMapper; +import org.apache.jackrabbit.oak.api.Root; +import org.apache.jackrabbit.oak.api.Tree; +import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.plugins.index.CompositeIndexEditorProvider; import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider; import org.apache.jackrabbit.oak.plugins.index.IndexUpdate; @@ -124,7 +93,8 @@ import org.apache.jackrabbit.oak.plugins import org.apache.jackrabbit.oak.plugins.name.Namespaces; import org.apache.jackrabbit.oak.plugins.nodetype.TypeEditorProvider; import org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent; -import org.apache.jackrabbit.oak.plugins.segment.SegmentNodeBuilder; +import org.apache.jackrabbit.oak.plugins.nodetype.write.ReadWriteNodeTypeManager; +import org.apache.jackrabbit.oak.plugins.value.ValueFactoryImpl; import org.apache.jackrabbit.oak.security.SecurityProviderImpl; import org.apache.jackrabbit.oak.spi.commit.CommitHook; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; @@ -134,36 +104,57 @@ import org.apache.jackrabbit.oak.spi.com import org.apache.jackrabbit.oak.spi.commit.EditorProvider; import org.apache.jackrabbit.oak.spi.commit.ProgressNotificationEditor; import org.apache.jackrabbit.oak.spi.lifecycle.RepositoryInitializer; +import org.apache.jackrabbit.oak.spi.lifecycle.WorkspaceInitializer; import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters; import org.apache.jackrabbit.oak.spi.security.SecurityConfiguration; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits; +import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConfiguration; import org.apache.jackrabbit.oak.spi.security.user.UserConfiguration; import org.apache.jackrabbit.oak.spi.security.user.UserConstants; -import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.apache.jackrabbit.oak.upgrade.nodestate.NameFilteringNodeState; +import org.apache.jackrabbit.oak.upgrade.nodestate.report.LoggingReporter; +import org.apache.jackrabbit.oak.upgrade.nodestate.report.ReportingNodeState; +import org.apache.jackrabbit.oak.upgrade.nodestate.NodeStateCopier; +import org.apache.jackrabbit.oak.upgrade.security.AuthorizableFolderEditor; import org.apache.jackrabbit.oak.upgrade.security.GroupEditorProvider; import org.apache.jackrabbit.oak.upgrade.security.RestrictionEditorProvider; +import org.apache.jackrabbit.oak.upgrade.version.VersionCopyConfiguration; +import org.apache.jackrabbit.oak.upgrade.version.VersionHistoryUtil; +import org.apache.jackrabbit.oak.upgrade.version.VersionableEditor; +import org.apache.jackrabbit.oak.upgrade.version.VersionablePropertiesEditor; import org.apache.jackrabbit.spi.Name; -import org.apache.jackrabbit.spi.Path; -import org.apache.jackrabbit.spi.Path.Element; -import org.apache.jackrabbit.spi.QItemDefinition; import org.apache.jackrabbit.spi.QNodeDefinition; import org.apache.jackrabbit.spi.QNodeTypeDefinition; import org.apache.jackrabbit.spi.QPropertyDefinition; import org.apache.jackrabbit.spi.QValue; import org.apache.jackrabbit.spi.QValueConstraint; +import org.apache.jackrabbit.spi.commons.conversion.DefaultNamePathResolver; +import org.apache.jackrabbit.spi.commons.conversion.NamePathResolver; +import org.apache.jackrabbit.spi.commons.value.ValueFormat; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermDocs; +import org.apache.lucene.index.TermEnum; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.jackrabbit.oak.upgrade.version.VersionCopier.copyVersionStorage; +import static org.apache.jackrabbit.oak.upgrade.version.VersionHistoryUtil.getVersionStorage; + public class RepositoryUpgrade { - /** - * Logger instance - */ - private static final Logger logger = - LoggerFactory.getLogger(RepositoryUpgrade.class); + private static final Logger logger = LoggerFactory.getLogger(RepositoryUpgrade.class); + + private static final int LOG_NODE_COPY = Integer.getInteger("oak.upgrade.logNodeCopy", 10000); + + public static final Set<String> DEFAULT_INCLUDE_PATHS = ALL; + + public static final Set<String> DEFAULT_EXCLUDE_PATHS = NONE; + + public static final Set<String> DEFAULT_MERGE_PATHS = NONE; /** * Source repository context. @@ -175,14 +166,40 @@ public class RepositoryUpgrade { */ private final NodeStore target; - private boolean copyBinariesByReference = false; + /** + * Paths to include during the copy process. Defaults to the root path "/". + */ + private Set<String> includePaths = DEFAULT_INCLUDE_PATHS; - private boolean earlyShutdown = false; + /** + * Paths to exclude during the copy process. Empty by default. + */ + private Set<String> excludePaths = DEFAULT_EXCLUDE_PATHS; + + /** + * Paths to merge during the copy process. Empty by default. + */ + private Set<String> mergePaths = DEFAULT_MERGE_PATHS; + + /** + * Whether or not to copy binaries by reference. Defaults to false. + */ + private boolean copyBinariesByReference = false; private boolean skipOnError = false; + private boolean earlyShutdown = false; + private List<CommitHook> customCommitHooks = null; + private boolean checkLongNames = false; + + private boolean filterLongNames = true; + + private boolean skipInitialization = false; + + VersionCopyConfiguration versionCopyConfiguration = new VersionCopyConfiguration(); + /** * Copies the contents of the repository in the given source directory * to the given target node store. @@ -235,6 +252,14 @@ public class RepositoryUpgrade { this.copyBinariesByReference = copyBinariesByReference; } + public boolean isSkipOnError() { + return skipOnError; + } + + public void setSkipOnError(boolean skipOnError) { + this.skipOnError = skipOnError; + } + public boolean isEarlyShutdown() { return earlyShutdown; } @@ -243,12 +268,28 @@ public class RepositoryUpgrade { this.earlyShutdown = earlyShutdown; } - public boolean isSkipOnError() { - return skipOnError; + public boolean isCheckLongNames() { + return checkLongNames; } - public void setSkipOnError(boolean skipOnError) { - this.skipOnError = skipOnError; + public void setCheckLongNames(boolean checkLongNames) { + this.checkLongNames = checkLongNames; + } + + public boolean isFilterLongNames() { + return filterLongNames; + } + + public void setFilterLongNames(boolean filterLongNames) { + this.filterLongNames = filterLongNames; + } + + public boolean isSkipInitialization() { + return skipInitialization; + } + + public void setSkipInitialization(boolean skipInitialization) { + this.skipInitialization = skipInitialization; } /** @@ -272,6 +313,71 @@ public class RepositoryUpgrade { } /** + * Sets the paths that should be included when the source repository + * is copied to the target repository. + * + * @param includes Paths to be included in the copy. + */ + public void setIncludes(@Nonnull String... includes) { + this.includePaths = copyOf(checkNotNull(includes)); + } + + /** + * Sets the paths that should be excluded when the source repository + * is copied to the target repository. + * + * @param excludes Paths to be excluded from the copy. + */ + public void setExcludes(@Nonnull String... excludes) { + this.excludePaths = copyOf(checkNotNull(excludes)); + } + + /** + * Sets the paths that should be merged when the source repository + * is copied to the target repository. + * + * @param merges Paths to be merged during copy. + */ + public void setMerges(@Nonnull String... merges) { + this.mergePaths = copyOf(checkNotNull(merges)); + } + + /** + * Configures the version storage copy. Be default all versions are copied. + * One may disable it completely by setting {@code null} here or limit it to + * a selected date range: {@code <minDate, now()>}. + * + * @param minDate + * minimum date of the versions to copy or {@code null} to + * disable the storage version copying completely. Default value: + * {@code 1970-01-01 00:00:00}. + */ + public void setCopyVersions(Calendar minDate) { + versionCopyConfiguration.setCopyVersions(minDate); + } + + /** + * Configures copying of the orphaned version histories (eg. ones that are + * not referenced by the existing nodes). By default all orphaned version + * histories are copied. One may disable it completely by setting + * {@code null} here or limit it to a selected date range: + * {@code <minDate, now()>}. <br> + * <br> + * Please notice, that this option is overriden by the + * {@link #setCopyVersions(Calendar)}. You can't copy orphaned versions + * older than set in {@link #setCopyVersions(Calendar)} and if you set + * {@code null} there, this option will be ignored. + * + * @param minDate + * minimum date of the orphaned versions to copy or {@code null} + * to not copy them at all. Default value: + * {@code 1970-01-01 00:00:00}. + */ + public void setCopyOrphanedVersions(Calendar minDate) { + versionCopyConfiguration.setCopyOrphanedVersions(minDate); + } + + /** * Copies the full content from the source to the target repository. * <p> * The source repository <strong>must not be modified</strong> while @@ -285,46 +391,116 @@ public class RepositoryUpgrade { * @throws RepositoryException if the copy operation fails */ public void copy(RepositoryInitializer initializer) throws RepositoryException { + if (checkLongNames) { + assertNoLongNames(); + } + RepositoryConfig config = source.getRepositoryConfig(); - logger.info( - "Copying repository content from {} to Oak", config.getHomeDir()); + logger.info("Copying repository content from {} to Oak", config.getHomeDir()); try { - NodeState base = target.getRoot(); - NodeBuilder builder = base.builder(); + NodeBuilder targetBuilder = target.getRoot().builder(); + if (VersionHistoryUtil.getVersionStorage(targetBuilder).exists() && !versionCopyConfiguration.skipOrphanedVersionsCopy()) { + logger.warn("The version storage on destination already exists. Orphaned version histories will be skipped."); + versionCopyConfiguration.setCopyOrphanedVersions(null); + } + final Root upgradeRoot = new UpgradeRoot(targetBuilder); String workspaceName = source.getRepositoryConfig().getDefaultWorkspaceName(); SecurityProviderImpl security = new SecurityProviderImpl( mapSecurityConfig(config.getSecurityConfig())); - // init target repository first - new InitialContent().initialize(builder); - if (initializer != null) { - initializer.initialize(builder); - } - for (SecurityConfiguration sc : security.getConfigurations()) { - sc.getRepositoryInitializer().initialize(builder); - } - for (SecurityConfiguration sc : security.getConfigurations()) { - sc.getWorkspaceInitializer().initialize(builder, workspaceName); + if (skipInitialization) { + logger.info("Skipping the repository initialization"); + } else { + // init target repository first + logger.info("Initializing initial repository content from {}", config.getHomeDir()); + new InitialContent().initialize(targetBuilder); + if (initializer != null) { + initializer.initialize(targetBuilder); + } + logger.debug("InitialContent completed from {}", config.getHomeDir()); + + for (SecurityConfiguration sc : security.getConfigurations()) { + RepositoryInitializer ri = sc.getRepositoryInitializer(); + ri.initialize(targetBuilder); + logger.debug("Repository initializer '" + ri.getClass().getName() + "' completed", config.getHomeDir()); + } + for (SecurityConfiguration sc : security.getConfigurations()) { + WorkspaceInitializer wi = sc.getWorkspaceInitializer(); + wi.initialize(targetBuilder, workspaceName); + logger.debug("Workspace initializer '" + wi.getClass().getName() + "' completed", config.getHomeDir()); + } } HashBiMap<String, String> uriToPrefix = HashBiMap.create(); - Map<Integer, String> idxToPrefix = newHashMap(); - copyNamespaces(builder, uriToPrefix, idxToPrefix); - copyNodeTypes(builder, uriToPrefix.inverse()); - copyCustomPrivileges(builder); - - // Triggers compilation of type information, which we need for - // the type predicates used by the bulk copy operations below. - new TypeEditorProvider(false).getRootEditor( - base, builder.getNodeState(), builder, null); - - Map<String, String> versionablePaths = newHashMap(); - NodeState root = builder.getNodeState(); - copyWorkspace(builder, root, workspaceName, uriToPrefix, idxToPrefix, versionablePaths); - copyVersionStore(builder, root, workspaceName, uriToPrefix, idxToPrefix, versionablePaths); + logger.info("Copying registered namespaces"); + copyNamespaces(targetBuilder, uriToPrefix); + logger.debug("Namespace registration completed."); + + if (skipInitialization) { + logger.info("Skipping registering node types and privileges"); + } else { + logger.info("Copying registered node types"); + NodeTypeManager ntMgr = new ReadWriteNodeTypeManager() { + @Override + protected Tree getTypes() { + return upgradeRoot.getTree(NODE_TYPES_PATH); + } + + @Nonnull + @Override + protected Root getWriteRoot() { + return upgradeRoot; + } + }; + copyNodeTypes(ntMgr, new ValueFactoryImpl(upgradeRoot, NamePathMapper.DEFAULT)); + logger.debug("Node type registration completed."); + + // migrate privileges + logger.info("Copying registered privileges"); + PrivilegeConfiguration privilegeConfiguration = security.getConfiguration(PrivilegeConfiguration.class); + copyCustomPrivileges(privilegeConfiguration.getPrivilegeManager(upgradeRoot, NamePathMapper.DEFAULT)); + logger.debug("Privilege registration completed."); + + // Triggers compilation of type information, which we need for + // the type predicates used by the bulk copy operations below. + new TypeEditorProvider(false).getRootEditor( + targetBuilder.getBaseState(), targetBuilder.getNodeState(), targetBuilder, null); + } + + final NodeState reportingSourceRoot = ReportingNodeState.wrap( + JackrabbitNodeState.createRootNodeState( + source, workspaceName, targetBuilder.getNodeState(), + uriToPrefix, copyBinariesByReference, skipOnError + ), + new LoggingReporter(logger, "Migrating", LOG_NODE_COPY, -1) + ); + final NodeState sourceRoot; + if (filterLongNames) { + sourceRoot = NameFilteringNodeState.wrap(reportingSourceRoot); + } else { + sourceRoot = reportingSourceRoot; + } + + final Stopwatch watch = Stopwatch.createStarted(); + + logger.info("Copying workspace content"); + copyWorkspace(sourceRoot, targetBuilder, workspaceName); + targetBuilder.getNodeState(); // on TarMK this does call triggers the actual copy + logger.info("Upgrading workspace content completed in {}s ({})", watch.elapsed(TimeUnit.SECONDS), watch); + + if (!versionCopyConfiguration.skipOrphanedVersionsCopy()) { + logger.info("Copying version storage"); + watch.reset().start(); + copyVersionStorage(targetBuilder, getVersionStorage(sourceRoot), getVersionStorage(targetBuilder), versionCopyConfiguration); + targetBuilder.getNodeState(); // on TarMK this does call triggers the actual copy + logger.info("Version storage copied in {}s ({})", watch.elapsed(TimeUnit.SECONDS), watch); + } else { + logger.info("Skipping the version storage as the copyOrphanedVersions is set to false"); + } + watch.reset().start(); logger.info("Applying default commit hooks"); // TODO: default hooks? List<CommitHook> hooks = newArrayList(); @@ -334,11 +510,23 @@ public class RepositoryUpgrade { String groupsPath = userConf.getParameters().getConfigValue( UserConstants.PARAM_GROUP_PATH, UserConstants.DEFAULT_GROUP_PATH); + String usersPath = userConf.getParameters().getConfigValue( + UserConstants.PARAM_USER_PATH, + UserConstants.DEFAULT_USER_PATH); // hooks specific to the upgrade, need to run first hooks.add(new EditorHook(new CompositeEditorProvider( new RestrictionEditorProvider(), - new GroupEditorProvider(groupsPath)))); + new GroupEditorProvider(groupsPath), + // copy referenced version histories + new VersionableEditor.Provider(sourceRoot, workspaceName, versionCopyConfiguration), + new SameNameSiblingsEditor.Provider(), + AuthorizableFolderEditor.provider(groupsPath, usersPath) + ))); + + // this editor works on the VersionableEditor output, so it can't be + // a part of the same EditorHook + hooks.add(new EditorHook(new VersionablePropertiesEditor.Provider())); // security-related hooks for (SecurityConfiguration sc : security.getConfigurations()) { @@ -355,13 +543,33 @@ public class RepositoryUpgrade { createIndexEditorProvider() ))); - target.merge(builder, new LoggingCompositeHook(hooks, source, earlyShutdown), CommitInfo.EMPTY); + target.merge(targetBuilder, new LoggingCompositeHook(hooks, source, overrideEarlyShutdown()), CommitInfo.EMPTY); + logger.info("Processing commit hooks completed in {}s ({})", watch.elapsed(TimeUnit.SECONDS), watch); + logger.debug("Repository upgrade completed."); } catch (Exception e) { throw new RepositoryException("Failed to copy content", e); } } - private static EditorProvider createTypeEditorProvider() { + private boolean overrideEarlyShutdown() { + if (earlyShutdown == false) { + return false; + } + + final VersionCopyConfiguration c = this.versionCopyConfiguration; + if (c.isCopyVersions() && c.skipOrphanedVersionsCopy()) { + logger.info("Overriding early shutdown to false because of the copy versions settings"); + return false; + } + if (c.isCopyVersions() && !c.skipOrphanedVersionsCopy() + && c.getOrphanedMinDate().after(c.getVersionsMinDate())) { + logger.info("Overriding early shutdown to false because of the copy versions settings"); + return false; + } + return true; + } + + static EditorProvider createTypeEditorProvider() { return new EditorProvider() { @Override public Editor getRootEditor(NodeState before, NodeState after, NodeBuilder builder, CommitInfo info) @@ -378,7 +586,7 @@ public class RepositoryUpgrade { }; } - private static EditorProvider createIndexEditorProvider() { + static EditorProvider createIndexEditorProvider() { final ProgressTicker ticker = new AsciiArtTicker(); return new EditorProvider() { @Override @@ -413,13 +621,18 @@ public class RepositoryUpgrade { config.getLoginModuleConfig(), LoginModuleConfig.PARAM_ADMIN_ID, UserConstants.PARAM_ADMIN_ID, LoginModuleConfig.PARAM_ANONYMOUS_ID, UserConstants.PARAM_ANONYMOUS_ID); - ConfigurationParameters userConfig = mapConfigurationParameters( - config.getSecurityManagerConfig().getUserManagerConfig(), - UserManagerImpl.PARAM_USERS_PATH, UserConstants.PARAM_USER_PATH, - UserManagerImpl.PARAM_GROUPS_PATH, UserConstants.PARAM_GROUP_PATH, - UserManagerImpl.PARAM_DEFAULT_DEPTH, UserConstants.PARAM_DEFAULT_DEPTH, - UserManagerImpl.PARAM_PASSWORD_HASH_ALGORITHM, UserConstants.PARAM_PASSWORD_HASH_ALGORITHM, - UserManagerImpl.PARAM_PASSWORD_HASH_ITERATIONS, UserConstants.PARAM_PASSWORD_HASH_ITERATIONS); + ConfigurationParameters userConfig; + if (config.getSecurityManagerConfig() == null) { + userConfig = ConfigurationParameters.EMPTY; + } else { + userConfig = mapConfigurationParameters( + config.getSecurityManagerConfig().getUserManagerConfig(), + UserManagerImpl.PARAM_USERS_PATH, UserConstants.PARAM_USER_PATH, + UserManagerImpl.PARAM_GROUPS_PATH, UserConstants.PARAM_GROUP_PATH, + UserManagerImpl.PARAM_DEFAULT_DEPTH, UserConstants.PARAM_DEFAULT_DEPTH, + UserManagerImpl.PARAM_PASSWORD_HASH_ALGORITHM, UserConstants.PARAM_PASSWORD_HASH_ALGORITHM, + UserManagerImpl.PARAM_PASSWORD_HASH_ITERATIONS, UserConstants.PARAM_PASSWORD_HASH_ITERATIONS); + } return ConfigurationParameters.of(ImmutableMap.of( UserConfiguration.NAME, ConfigurationParameters.of(loginConfig, userConfig))); @@ -446,7 +659,7 @@ public class RepositoryUpgrade { if (uri == null || uri.isEmpty()) { return local; } else { - return source.getNamespaceRegistry().getPrefix(uri) + ":" + local; + return source.getNamespaceRegistry().getPrefix(uri) + ':' + local; } } @@ -454,21 +667,18 @@ public class RepositoryUpgrade { * Copies the registered namespaces to the target repository, and returns * the internal namespace index mapping used in bundle serialization. * - * @param root root builder + * @param targetRoot root builder of the target store * @param uriToPrefix namespace URI to prefix mapping - * @param idxToPrefix index to prefix mapping * @throws RepositoryException */ private void copyNamespaces( - NodeBuilder root, - Map<String, String> uriToPrefix, Map<Integer, String> idxToPrefix) + NodeBuilder targetRoot, + Map<String, String> uriToPrefix) throws RepositoryException { - NodeBuilder system = root.child(JCR_SYSTEM); + NodeBuilder system = targetRoot.child(JCR_SYSTEM); NodeBuilder namespaces = system.child(NamespaceConstants.REP_NAMESPACES); Properties registry = loadProperties("/namespaces/ns_reg.properties"); - Properties indexes = loadProperties("/namespaces/ns_idx.properties"); - for (String prefixHint : registry.stringPropertyNames()) { String prefix; String uri = registry.getProperty(prefixHint); @@ -477,27 +687,7 @@ public class RepositoryUpgrade { } else { prefix = addCustomMapping(namespaces, uri, prefixHint); } - - String index = null; - if (uri.isEmpty()) { - index = indexes.getProperty(".empty.key"); - } - if (index == null) { - index = indexes.getProperty(uri); - } - - Integer idx; - if (index != null) { - idx = Integer.decode(index); - } else { - int i = 0; - do { - idx = (uri.hashCode() + i++) & 0x00ffffff; - } while (idxToPrefix.containsKey(idx)); - } - checkState(uriToPrefix.put(uri, prefix) == null); - checkState(idxToPrefix.put(idx, prefix) == null); } Namespaces.buildIndexNode(namespaces); @@ -526,394 +716,282 @@ public class RepositoryUpgrade { } @SuppressWarnings("deprecation") - private void copyCustomPrivileges(NodeBuilder root) { + private void copyCustomPrivileges(PrivilegeManager pMgr) throws RepositoryException { PrivilegeRegistry registry = source.getPrivilegeRegistry(); - NodeBuilder privileges = root.child(JCR_SYSTEM).child(REP_PRIVILEGES); - privileges.setProperty(JCR_PRIMARYTYPE, NT_REP_PRIVILEGES, NAME); - PrivilegeBits next = PrivilegeBits.NEXT_AFTER_BUILT_INS; + List<Privilege> customAggrPrivs = Lists.newArrayList(); - logger.info("Copying registered privileges"); + logger.debug("Registering custom non-aggregated privileges"); for (Privilege privilege : registry.getRegisteredPrivileges()) { - String name = privilege.getName(); - if (PrivilegeBits.BUILT_IN.containsKey(name) || JCR_ALL.equals(name)) { - // Ignore built in privileges as those have been installed by - // the PrivilegesInitializer already + String privilegeName = privilege.getName(); + + if (hasPrivilege(pMgr, privilegeName)) { + logger.debug("Privilege {} already exists", privilegeName); continue; } - NodeBuilder def = privileges.child(name); - def.setProperty(JCR_PRIMARYTYPE, NT_REP_PRIVILEGE, NAME); - - if (privilege.isAbstract()) { - def.setProperty(REP_IS_ABSTRACT, true); + if (PrivilegeBits.BUILT_IN.containsKey(privilegeName) || JCR_ALL.equals(privilegeName)) { + // Ignore built in privileges as those have been installed by the PrivilegesInitializer already + logger.debug("Built-in privilege -> ignore."); + } else if (privilege.isAggregate()) { + // postpone + customAggrPrivs.add(privilege); + } else { + pMgr.registerPrivilege(privilegeName, privilege.isAbstract(), new String[0]); + logger.info("- " + privilegeName); } + } - Privilege[] aggregate = privilege.getDeclaredAggregatePrivileges(); - if (aggregate.length > 0) { - List<String> names = newArrayListWithCapacity(aggregate.length); - for (Privilege p : aggregate) { - names.add(p.getName()); + logger.debug("Registering custom aggregated privileges"); + while (!customAggrPrivs.isEmpty()) { + Iterator<Privilege> it = customAggrPrivs.iterator(); + boolean progress = false; + while (it.hasNext()) { + Privilege aggrPriv = it.next(); + + List<String> aggrNames = Lists.transform( + ImmutableList.copyOf(aggrPriv.getDeclaredAggregatePrivileges()), + new Function<Privilege, String>() { + @Nullable + @Override + public String apply(@Nullable Privilege input) { + return (input == null) ? null : input.getName(); + } + }); + if (allAggregatesRegistered(pMgr, aggrNames)) { + pMgr.registerPrivilege(aggrPriv.getName(), aggrPriv.isAbstract(), aggrNames.toArray(new String[aggrNames.size()])); + it.remove(); + logger.info("- " + aggrPriv.getName()); + progress = true; } - def.setProperty(REP_AGGREGATES, names, NAMES); } - - // FIXME: the privilege bits of aggregated privileges is not just 'next' but must be properly calculated from the aggregates - PrivilegeBits bits = PrivilegeBits.BUILT_IN.get(name); - if (bits != null) { - def.setProperty(bits.asPropertyState(REP_BITS)); - } else if (aggregate.length == 0) { - bits = next; - next = next.nextBits(); - def.setProperty(bits.asPropertyState(REP_BITS)); + if (!progress) { + break; } } - privileges.setProperty(next.asPropertyState(REP_NEXT)); - - // resolve privilege bits also for all aggregates - for (String name : privileges.getChildNodeNames()) { - resolvePrivilegeBits(privileges, name); + if (customAggrPrivs.isEmpty()) { + logger.debug("Registration of custom privileges completed."); + } else { + StringBuilder invalid = new StringBuilder("|"); + for (Privilege p : customAggrPrivs) { + invalid.append(p.getName()).append('|'); + } + throw new RepositoryException("Failed to register custom privileges. The following privileges contained an invalid aggregation:" + invalid); } } - private static PrivilegeBits resolvePrivilegeBits( - NodeBuilder privileges, String name) { - NodeBuilder def = privileges.getChildNode(name); - - PropertyState b = def.getProperty(REP_BITS); - if (b != null) { - return PrivilegeBits.getInstance(b); + private boolean hasPrivilege(PrivilegeManager pMgr, String privilegeName) throws RepositoryException { + final Privilege[] registeredPrivileges = pMgr.getRegisteredPrivileges(); + for (Privilege registeredPrivilege : registeredPrivileges) { + if (registeredPrivilege.getName().equals(privilegeName)) { + return true; + } } + return false; + } - PrivilegeBits bits = PrivilegeBits.getInstance(); - for (String n : def.getNames(REP_AGGREGATES)) { - bits.add(resolvePrivilegeBits(privileges, n)); + private static boolean allAggregatesRegistered(PrivilegeManager privilegeManager, List<String> aggrNames) { + for (String name : aggrNames) { + try { + privilegeManager.getPrivilege(name); + } catch (RepositoryException e) { + return false; + } } - def.setProperty(bits.asPropertyState(REP_BITS)); - return bits; + return true; } - private void copyNodeTypes(NodeBuilder root, Map<String, String> prefixToUri) - throws RepositoryException { + private void copyNodeTypes(NodeTypeManager ntMgr, ValueFactory valueFactory) throws RepositoryException { NodeTypeRegistry sourceRegistry = source.getNodeTypeRegistry(); - NodeBuilder system = root.child(JCR_SYSTEM); - NodeBuilder types = system.child(JCR_NODE_TYPES); - - logger.info("Copying registered node types"); + List<NodeTypeTemplate> templates = Lists.newArrayList(); for (Name name : sourceRegistry.getRegisteredNodeTypes()) { String oakName = getOakName(name); // skip built-in nodetypes (OAK-1235) - if (!types.hasChildNode(oakName)) { + if (!ntMgr.hasNodeType(oakName)) { QNodeTypeDefinition def = sourceRegistry.getNodeTypeDef(name); - NodeBuilder type = types.child(oakName); - copyNodeType(def, type, prefixToUri); + templates.add(createNodeTypeTemplate(valueFactory, ntMgr, oakName, def)); } } + ntMgr.registerNodeTypes(templates.toArray(new NodeTypeTemplate[templates.size()]), true); } - private void copyNodeType( - QNodeTypeDefinition def, NodeBuilder builder, Map<String, String> prefixToUri) - throws RepositoryException { - builder.setProperty(JCR_PRIMARYTYPE, NT_NODETYPE, NAME); + private NodeTypeTemplate createNodeTypeTemplate(ValueFactory valueFactory, NodeTypeManager ntMgr, String oakName, QNodeTypeDefinition def) throws RepositoryException { + NodeTypeTemplate tmpl = ntMgr.createNodeTypeTemplate(); + tmpl.setName(oakName); + tmpl.setAbstract(def.isAbstract()); + tmpl.setMixin(def.isMixin()); + tmpl.setOrderableChildNodes(def.hasOrderableChildNodes()); + tmpl.setQueryable(def.isQueryable()); + + Name primaryItemName = def.getPrimaryItemName(); + if (primaryItemName != null) { + tmpl.setPrimaryItemName(getOakName(primaryItemName)); + } - // - jcr:nodeTypeName (NAME) protected mandatory - builder.setProperty(JCR_NODETYPENAME, getOakName(def.getName()), NAME); - // - jcr:supertypes (NAME) protected multiple Name[] supertypes = def.getSupertypes(); if (supertypes != null && supertypes.length > 0) { List<String> names = newArrayListWithCapacity(supertypes.length); for (Name supertype : supertypes) { names.add(getOakName(supertype)); } - builder.setProperty(JCR_SUPERTYPES, names, NAMES); - } - // - jcr:isAbstract (BOOLEAN) protected mandatory - builder.setProperty(JCR_IS_ABSTRACT, def.isAbstract()); - // - jcr:isQueryable (BOOLEAN) protected mandatory - builder.setProperty(JCR_IS_QUERYABLE, def.isQueryable()); - // - jcr:isMixin (BOOLEAN) protected mandatory - builder.setProperty(JCR_ISMIXIN, def.isMixin()); - // - jcr:hasOrderableChildNodes (BOOLEAN) protected mandatory - builder.setProperty( - JCR_HASORDERABLECHILDNODES, def.hasOrderableChildNodes()); - // - jcr:primaryItemName (NAME) protected - Name primary = def.getPrimaryItemName(); - if (primary != null) { - builder.setProperty( - JCR_PRIMARYITEMNAME, getOakName(primary), NAME); - } - - // + jcr:propertyDefinition (nt:propertyDefinition) = nt:propertyDefinition protected sns - QPropertyDefinition[] properties = def.getPropertyDefs(); - for (int i = 0; i < properties.length; i++) { - String name = JCR_PROPERTYDEFINITION + '[' + (i + 1) + ']'; - copyPropertyDefinition(properties[i], builder.child(name), prefixToUri); + tmpl.setDeclaredSuperTypeNames(names.toArray(new String[names.size()])); } - // + jcr:childNodeDefinition (nt:childNodeDefinition) = nt:childNodeDefinition protected sns - QNodeDefinition[] childNodes = def.getChildNodeDefs(); - for (int i = 0; i < childNodes.length; i++) { - String name = JCR_CHILDNODEDEFINITION + '[' + (i + 1) + ']'; - copyChildNodeDefinition(childNodes[i], builder.child(name)); + List<PropertyDefinitionTemplate> propertyDefinitionTemplates = tmpl.getPropertyDefinitionTemplates(); + for (QPropertyDefinition qpd : def.getPropertyDefs()) { + PropertyDefinitionTemplate pdt = createPropertyDefinitionTemplate(valueFactory, ntMgr, qpd); + propertyDefinitionTemplates.add(pdt); } - } - - private void copyPropertyDefinition( - QPropertyDefinition def, NodeBuilder builder, Map<String, String> prefixToUri) - throws RepositoryException { - builder.setProperty(JCR_PRIMARYTYPE, NT_PROPERTYDEFINITION, NAME); - - copyItemDefinition(def, builder); - // - jcr:requiredType (STRING) protected mandatory - // < 'STRING', 'URI', 'BINARY', 'LONG', 'DOUBLE', - // 'DECIMAL', 'BOOLEAN', 'DATE', 'NAME', 'PATH', - // 'REFERENCE', 'WEAKREFERENCE', 'UNDEFINED' - builder.setProperty( - JCR_REQUIREDTYPE, - Type.fromTag(def.getRequiredType(), false).toString()); - // - jcr:valueConstraints (STRING) protected multiple - QValueConstraint[] constraints = def.getValueConstraints(); - if (constraints != null && constraints.length > 0) { - List<String> strings = newArrayListWithCapacity(constraints.length); - for (QValueConstraint constraint : constraints) { - strings.add(constraint.getString()); - } - builder.setProperty(JCR_VALUECONSTRAINTS, strings, STRINGS); - } - // - jcr:defaultValues (UNDEFINED) protected multiple - QValue[] qValues = def.getDefaultValues(); - if (qValues != null) { - copyDefaultValues(qValues, builder, new GlobalNameMapper(prefixToUri)); - } - // - jcr:multiple (BOOLEAN) protected mandatory - builder.setProperty(JCR_MULTIPLE, def.isMultiple()); - // - jcr:availableQueryOperators (NAME) protected mandatory multiple - List<String> operators = asList(def.getAvailableQueryOperators()); - builder.setProperty(JCR_AVAILABLE_QUERY_OPERATORS, operators, NAMES); - // - jcr:isFullTextSearchable (BOOLEAN) protected mandatory - builder.setProperty( - JCR_IS_FULLTEXT_SEARCHABLE, def.isFullTextSearchable()); - // - jcr:isQueryOrderable (BOOLEAN) protected mandatory - builder.setProperty(JCR_IS_QUERY_ORDERABLE, def.isQueryOrderable()); - } - - private static void copyDefaultValues(QValue[] qValues, NodeBuilder builder, - NameMapper nameMapper) throws RepositoryException { - if (qValues.length == 0) { - builder.setProperty(JCR_DEFAULTVALUES, Collections.<String>emptyList(), STRINGS); - } else { - int type = qValues[0].getType(); - switch (type) { - case PropertyType.STRING: - List<String> strings = newArrayListWithCapacity(qValues.length); - for (QValue qValue : qValues) { - strings.add(qValue.getString()); - } - builder.setProperty(createProperty(JCR_DEFAULTVALUES, strings, STRINGS)); - return; - case PropertyType.LONG: - List<Long> longs = newArrayListWithCapacity(qValues.length); - for (QValue qValue : qValues) { - longs.add(qValue.getLong()); - } - builder.setProperty(createProperty(JCR_DEFAULTVALUES, longs, LONGS)); - return; - case PropertyType.DOUBLE: - List<Double> doubles = newArrayListWithCapacity(qValues.length); - for (QValue qValue : qValues) { - doubles.add(qValue.getDouble()); - } - builder.setProperty(createProperty(JCR_DEFAULTVALUES, doubles, DOUBLES)); - return; - case PropertyType.BOOLEAN: - List<Boolean> booleans = Lists.newArrayListWithCapacity(qValues.length); - for (QValue qValue : qValues) { - booleans.add(qValue.getBoolean()); - } - builder.setProperty(createProperty(JCR_DEFAULTVALUES, booleans, BOOLEANS)); - return; - case PropertyType.NAME: - List<String> names = Lists.newArrayListWithCapacity(qValues.length); - for (QValue qValue : qValues) { - names.add(nameMapper.getOakName(qValue.getName().toString())); - } - builder.setProperty(createProperty(JCR_DEFAULTVALUES, names, NAMES)); - return; - case PropertyType.PATH: - List<String> paths = Lists.newArrayListWithCapacity(qValues.length); - for (QValue qValue : qValues) { - paths.add(getOakPath(qValue.getPath(), nameMapper)); - } - builder.setProperty(createProperty(JCR_DEFAULTVALUES, paths, PATHS)); - return; - case PropertyType.DECIMAL: - List<BigDecimal> decimals = Lists.newArrayListWithCapacity(qValues.length); - for (QValue qValue : qValues) { - decimals.add(qValue.getDecimal()); - } - builder.setProperty(createProperty(JCR_DEFAULTVALUES, decimals, DECIMALS)); - return; - case PropertyType.DATE: - case PropertyType.URI: - List<String> values = newArrayListWithCapacity(qValues.length); - for (QValue qValue : qValues) { - values.add(qValue.getString()); - } - builder.setProperty(createProperty(JCR_DEFAULTVALUES, values, Type.fromTag(type, true))); - return; - default: - throw new UnsupportedRepositoryOperationException( - "Cannot copy default value of type " + Type.fromTag(type, true)); - } + // + jcr:childNodeDefinition (nt:childNodeDefinition) = nt:childNodeDefinition protected sns + List<NodeDefinitionTemplate> nodeDefinitionTemplates = tmpl.getNodeDefinitionTemplates(); + for (QNodeDefinition qnd : def.getChildNodeDefs()) { + NodeDefinitionTemplate ndt = createNodeDefinitionTemplate(ntMgr, qnd); + nodeDefinitionTemplates.add(ndt); } - } - private static String getOakPath(Path path, NameMapper nameMapper) - throws RepositoryException { - StringBuilder oakPath = new StringBuilder(); - String sep = ""; - for (Element element: path.getElements()) { - if (element.denotesRoot()) { - oakPath.append('/'); - continue; - } else if (element.denotesName()) { - oakPath.append(sep).append(nameMapper.getOakName(element.getString())); - } else if (element.denotesCurrent()) { - oakPath.append(sep).append('.'); - } else if (element.denotesParent()) { - oakPath.append(sep).append(".."); - } else { - throw new UnsupportedRepositoryOperationException("Cannot copy default value " + path); - } - sep = "/"; - } - return oakPath.toString(); + return tmpl; } - private void copyChildNodeDefinition( - QNodeDefinition def, NodeBuilder builder) - throws NamespaceException { - builder.setProperty(JCR_PRIMARYTYPE, NT_CHILDNODEDEFINITION, NAME); + private NodeDefinitionTemplate createNodeDefinitionTemplate(NodeTypeManager ntMgr, QNodeDefinition def) throws RepositoryException { + NodeDefinitionTemplate tmpl = ntMgr.createNodeDefinitionTemplate(); - copyItemDefinition(def, builder); + Name name = def.getName(); + if (name != null) { + tmpl.setName(getOakName(name)); + } + tmpl.setAutoCreated(def.isAutoCreated()); + tmpl.setMandatory(def.isMandatory()); + tmpl.setOnParentVersion(def.getOnParentVersion()); + tmpl.setProtected(def.isProtected()); + tmpl.setSameNameSiblings(def.allowsSameNameSiblings()); - // - jcr:requiredPrimaryTypes (NAME) = 'nt:base' protected mandatory multiple - Name[] types = def.getRequiredPrimaryTypes(); - List<String> names = newArrayListWithCapacity(types.length); - for (Name type : types) { + List<String> names = newArrayListWithCapacity(def.getRequiredPrimaryTypes().length); + for (Name type : def.getRequiredPrimaryTypes()) { names.add(getOakName(type)); } - builder.setProperty(JCR_REQUIREDPRIMARYTYPES, names, NAMES); - // - jcr:defaultPrimaryType (NAME) protected + tmpl.setRequiredPrimaryTypeNames(names.toArray(new String[names.size()])); + Name type = def.getDefaultPrimaryType(); if (type != null) { - builder.setProperty(JCR_DEFAULTPRIMARYTYPE, getOakName(type), NAME); + tmpl.setDefaultPrimaryTypeName(getOakName(type)); } - // - jcr:sameNameSiblings (BOOLEAN) protected mandatory - builder.setProperty(JCR_SAMENAMESIBLINGS, def.allowsSameNameSiblings()); + + return tmpl; } - private void copyItemDefinition(QItemDefinition def, NodeBuilder builder) - throws NamespaceException { - // - jcr:name (NAME) protected + private PropertyDefinitionTemplate createPropertyDefinitionTemplate(ValueFactory valueFactory, NodeTypeManager ntMgr, QPropertyDefinition def) throws RepositoryException { + PropertyDefinitionTemplate tmpl = ntMgr.createPropertyDefinitionTemplate(); + Name name = def.getName(); - if (name != null && !name.equals(ANY_NAME)) { - builder.setProperty(JCR_NAME, getOakName(name), NAME); + if (name != null) { + tmpl.setName(getOakName(name)); } - // - jcr:autoCreated (BOOLEAN) protected mandatory - builder.setProperty(JCR_AUTOCREATED, def.isAutoCreated()); - // - jcr:mandatory (BOOLEAN) protected mandatory - builder.setProperty(JCR_MANDATORY, def.isMandatory()); - // - jcr:onParentVersion (STRING) protected mandatory - // < 'COPY', 'VERSION', 'INITIALIZE', 'COMPUTE', 'IGNORE', 'ABORT' - builder.setProperty( - JCR_ONPARENTVERSION, - OnParentVersionAction.nameFromValue(def.getOnParentVersion())); - // - jcr:protected (BOOLEAN) protected mandatory - builder.setProperty(JCR_PROTECTED, def.isProtected()); - } - - private void copyVersionStore( - NodeBuilder builder, NodeState root, String workspaceName, - Map<String, String> uriToPrefix, Map<Integer, String> idxToPrefix, - Map<String, String> versionablePaths) - throws RepositoryException, IOException { - PersistenceManager pm = - source.getInternalVersionManager().getPersistenceManager(); - NodeBuilder system = builder.child(JCR_SYSTEM); - - logger.info("Copying version histories"); - copyState(system, JCR_VERSIONSTORAGE, new JackrabbitNodeState( - pm, root, uriToPrefix, VERSION_STORAGE_NODE_ID, - "/jcr:system/jcr:versionStorage", - workspaceName, versionablePaths, copyBinariesByReference, skipOnError)); - - logger.info("Copying activities"); - copyState(system, "jcr:activities", new JackrabbitNodeState( - pm, root, uriToPrefix, ACTIVITIES_NODE_ID, - "/jcr:system/jcr:activities", - workspaceName, versionablePaths, copyBinariesByReference, skipOnError)); - } - - private String copyWorkspace( - NodeBuilder builder, NodeState root, String workspaceName, - Map<String, String> uriToPrefix, Map<Integer, String> idxToPrefix, - Map<String, String> versionablePaths) - throws RepositoryException, IOException { - logger.info("Copying workspace {}", workspaceName); - - PersistenceManager pm = - source.getWorkspaceInfo(workspaceName).getPersistenceManager(); - - NodeState state = new JackrabbitNodeState( - pm, root, uriToPrefix, ROOT_NODE_ID, "/", - workspaceName, versionablePaths, copyBinariesByReference, skipOnError); - - for (PropertyState property : state.getProperties()) { - builder.setProperty(property); - } - for (ChildNodeEntry child : state.getChildNodeEntries()) { - String childName = child.getName(); - if (!JCR_SYSTEM.equals(childName)) { - logger.info("Copying subtree /{}", childName); - copyState(builder, childName, child.getNodeState()); + tmpl.setAutoCreated(def.isAutoCreated()); + tmpl.setMandatory(def.isMandatory()); + tmpl.setOnParentVersion(def.getOnParentVersion()); + tmpl.setProtected(def.isProtected()); + tmpl.setRequiredType(def.getRequiredType()); + tmpl.setMultiple(def.isMultiple()); + tmpl.setAvailableQueryOperators(def.getAvailableQueryOperators()); + tmpl.setFullTextSearchable(def.isFullTextSearchable()); + tmpl.setQueryOrderable(def.isQueryOrderable()); + + QValueConstraint[] qConstraints = def.getValueConstraints(); + if (qConstraints != null && qConstraints.length > 0) { + String[] constraints = new String[qConstraints.length]; + for (int i = 0; i < qConstraints.length; i++) { + constraints[i] = qConstraints[i].getString(); } + tmpl.setValueConstraints(constraints); } - return workspaceName; + QValue[] qValues = def.getDefaultValues(); + if (qValues != null) { + NamePathResolver npResolver = new DefaultNamePathResolver(source.getNamespaceRegistry()); + Value[] vs = new Value[qValues.length]; + for (int i = 0; i < qValues.length; i++) { + vs[i] = ValueFormat.getJCRValue(qValues[i], npResolver, valueFactory); + } + tmpl.setDefaultValues(vs); + } + + return tmpl; } - private void copyState(NodeBuilder parent, String name, NodeState state) { - if (parent instanceof SegmentNodeBuilder) { - parent.setChildNode(name, state); - } else { - setChildNode(parent, name, state); + private String copyWorkspace(NodeState sourceRoot, NodeBuilder targetRoot, String workspaceName) + throws RepositoryException { + final Set<String> includes = calculateEffectiveIncludePaths(includePaths, sourceRoot); + final Set<String> excludes = union(copyOf(this.excludePaths), of("/jcr:system/jcr:versionStorage")); + final Set<String> merges = union(copyOf(this.mergePaths), of("/jcr:system")); + + logger.info("Copying workspace {} [i: {}, e: {}, m: {}]", workspaceName, includes, excludes, merges); + + NodeStateCopier.builder() + .include(includes) + .exclude(excludes) + .merge(merges) + .copy(sourceRoot, targetRoot); + + if (includePaths.contains("/")) { + copyProperties(sourceRoot, targetRoot); } + + return workspaceName; } - /** - * NodeState are copied by value by recursing down the complete tree - * This is a temporary approach for OAK-1760 for 1.0 branch. - */ - private void setChildNode(NodeBuilder parent, String name, NodeState state) { - // OAK-1589: maximum supported length of name for DocumentNodeStore - // is 150 bytes. Skip the sub tree if the the name is too long - if (name.length() > 37 && name.getBytes(Charsets.UTF_8).length > 150) { - logger.warn("Node name too long. Skipping {}", state); - return; + static Set<String> calculateEffectiveIncludePaths(Set<String> includePaths, NodeState sourceRoot) { + if (!includePaths.contains("/")) { + return copyOf(includePaths); } - NodeBuilder builder = parent.setChildNode(name); - for (PropertyState property : state.getProperties()) { - builder.setProperty(property); + + // include child nodes from source individually to avoid deleting other initialized content + final Set<String> includes = newHashSet(); + for (String childNodeName : sourceRoot.getChildNodeNames()) { + includes.add("/" + childNodeName); + } + return includes; + } + + void assertNoLongNames() throws RepositoryException { + Session session = source.getRepository().login(null, null); + boolean longNameFound = false; + try { + IndexReader reader = IndexAccessor.getReader(source); + if (reader == null) { + return; + } + TermEnum terms = reader.terms(new Term(FieldNames.LOCAL_NAME)); + while (terms.next()) { + Term t = terms.term(); + if (!FieldNames.LOCAL_NAME.equals(t.field())) { + continue; + } + String name = t.text(); + if (NameFilteringNodeState.isNameTooLong(name)) { + TermDocs docs = reader.termDocs(t); + if (docs.next()) { + int docId = docs.doc(); + String uuid = reader.document(docId).get(FieldNames.UUID); + Node n = session.getNodeByIdentifier(uuid); + logger.warn("Name too long: {}", n.getPath()); + longNameFound = true; + } + } + } + } catch (IOException e) { + throw new RepositoryException(e); + } finally { + session.logout(); } - for (ChildNodeEntry child : state.getChildNodeEntries()) { - setChildNode(builder, child.getName(), child.getNodeState()); + if (longNameFound) { + logger.error("Node with a long name has been found. Please fix the content or rerun the migration with {} option.", SKIP_NAME_CHECK); + throw new RepositoryException("Node with a long name has been found."); } } - private static class LoggingCompositeHook implements CommitHook { + static class LoggingCompositeHook implements CommitHook { private final Collection<CommitHook> hooks; private boolean started = false; private final boolean earlyShutdown; @@ -931,7 +1009,7 @@ public class RepositoryUpgrade { public NodeState processCommit(NodeState before, NodeState after, CommitInfo info) throws CommitFailedException { NodeState newState = after; Stopwatch watch = Stopwatch.createStarted(); - if (earlyShutdown && !started) { + if (earlyShutdown && source != null && !started) { logger.info("Shutting down source repository."); source.getRepository().shutdown(); started = true;
Added: jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/SameNameSiblingsEditor.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/SameNameSiblingsEditor.java?rev=1792993&view=auto ============================================================================== --- jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/SameNameSiblingsEditor.java (added) +++ jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/SameNameSiblingsEditor.java Fri Apr 28 07:16:13 2017 @@ -0,0 +1,299 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade; + +import static com.google.common.collect.Iterables.filter; +import static com.google.common.collect.Iterables.transform; +import static org.apache.jackrabbit.JcrConstants.JCR_SAMENAMESIBLINGS; +import static org.apache.jackrabbit.JcrConstants.JCR_SYSTEM; +import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_NODE_TYPES; +import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_NAMED_CHILD_NODE_DEFINITIONS; +import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_RESIDUAL_CHILD_NODE_DEFINITIONS; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.jackrabbit.oak.api.CommitFailedException; +import org.apache.jackrabbit.oak.plugins.nodetype.TypePredicate; +import org.apache.jackrabbit.oak.spi.commit.CommitInfo; +import org.apache.jackrabbit.oak.spi.commit.DefaultEditor; +import org.apache.jackrabbit.oak.spi.commit.Editor; +import org.apache.jackrabbit.oak.spi.commit.EditorProvider; +import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; +import org.apache.jackrabbit.oak.spi.state.NodeBuilder; +import org.apache.jackrabbit.oak.spi.state.NodeState; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Function; +import com.google.common.base.Predicate; + +/** + * This editor check if same name sibling nodes are allowed under a given + * parent. If they are not, they will be renamed by replacing brackets with a + * underscore: {@code sns_name[3] -> sns_name_3_}. + */ +public class SameNameSiblingsEditor extends DefaultEditor { + + private static final Logger logger = LoggerFactory.getLogger(SameNameSiblingsEditor.class); + + private static final Pattern SNS_REGEX = Pattern.compile("^(.+)\\[(\\d+)\\]$"); + + private static final Predicate<NodeState> NO_SNS_PROPERTY = new Predicate<NodeState>() { + @Override + public boolean apply(NodeState input) { + return !input.getBoolean(JCR_SAMENAMESIBLINGS); + } + }; + + /** + * List of node type definitions that doesn't allow to have SNS children. + */ + private final List<ChildTypeDef> childrenDefsWithoutSns; + + /** + * Builder of the current node. + */ + private final NodeBuilder builder; + + /** + * Path to the current node. + */ + private final String path; + + public static class Provider implements EditorProvider { + @Override + public Editor getRootEditor(NodeState before, NodeState after, NodeBuilder builder, CommitInfo info) + throws CommitFailedException { + return new SameNameSiblingsEditor(builder); + } + } + + public SameNameSiblingsEditor(NodeBuilder rootBuilder) { + this.childrenDefsWithoutSns = prepareChildDefsWithoutSns(rootBuilder.getNodeState()); + this.builder = rootBuilder; + this.path = ""; + } + + public SameNameSiblingsEditor(SameNameSiblingsEditor parent, String name, NodeBuilder builder) { + this.childrenDefsWithoutSns = parent.childrenDefsWithoutSns; + this.builder = builder; + this.path = new StringBuilder(parent.path).append('/').append(name).toString(); + } + + @Override + public Editor childNodeAdded(String name, NodeState after) throws CommitFailedException { + return new SameNameSiblingsEditor(this, name, builder.getChildNode(name)); + } + + @Override + public Editor childNodeChanged(String name, NodeState before, NodeState after) throws CommitFailedException { + return new SameNameSiblingsEditor(this, name, builder.getChildNode(name)); + } + + @Override + public void leave(NodeState before, NodeState after) throws CommitFailedException { + if (hasSameNamedChildren(after)) { + renameSameNamedChildren(builder); + } + } + + /** + * Prepare a list of node definitions that doesn't allow having SNS children. + * + * @param root Repository root + * @return a list of node definitions denying SNS children + */ + private static List<ChildTypeDef> prepareChildDefsWithoutSns(NodeState root) { + List<ChildTypeDef> defs = new ArrayList<ChildTypeDef>(); + NodeState types = root.getChildNode(JCR_SYSTEM).getChildNode(JCR_NODE_TYPES); + for (ChildNodeEntry typeEntry : types.getChildNodeEntries()) { + NodeState type = typeEntry.getNodeState(); + TypePredicate typePredicate = new TypePredicate(root, typeEntry.getName()); + defs.addAll(parseResidualChildNodeDefs(root, type, typePredicate)); + defs.addAll(parseNamedChildNodeDefs(root, type, typePredicate)); + } + return defs; + } + + private static List<ChildTypeDef> parseNamedChildNodeDefs(NodeState root, NodeState parentType, + TypePredicate parentTypePredicate) { + List<ChildTypeDef> defs = new ArrayList<ChildTypeDef>(); + NodeState namedChildNodeDefinitions = parentType.getChildNode(REP_NAMED_CHILD_NODE_DEFINITIONS); + for (ChildNodeEntry childName : namedChildNodeDefinitions.getChildNodeEntries()) { + for (String childType : filterChildren(childName.getNodeState(), NO_SNS_PROPERTY)) { + TypePredicate childTypePredicate = new TypePredicate(root, childType); + defs.add(new ChildTypeDef(parentTypePredicate, childName.getName(), childTypePredicate)); + } + } + return defs; + } + + private static List<ChildTypeDef> parseResidualChildNodeDefs(NodeState root, NodeState parentType, + TypePredicate parentTypePredicate) { + List<ChildTypeDef> defs = new ArrayList<ChildTypeDef>(); + NodeState resChildNodeDefinitions = parentType.getChildNode(REP_RESIDUAL_CHILD_NODE_DEFINITIONS); + for (String childType : filterChildren(resChildNodeDefinitions, NO_SNS_PROPERTY)) { + TypePredicate childTypePredicate = new TypePredicate(root, childType); + defs.add(new ChildTypeDef(parentTypePredicate, childTypePredicate)); + } + return defs; + } + + /** + * Filter children of the given node using predicate and return the list of matching child names. + * + * @param parent + * @param predicate + * @return a list of names of children accepting the predicate + */ + private static Iterable<String> filterChildren(NodeState parent, final Predicate<NodeState> predicate) { + return transform(filter(parent.getChildNodeEntries(), new Predicate<ChildNodeEntry>() { + @Override + public boolean apply(ChildNodeEntry input) { + return predicate.apply(input.getNodeState()); + } + }), new Function<ChildNodeEntry, String>() { + @Override + public String apply(ChildNodeEntry input) { + return input.getName(); + } + }); + } + + /** + * Check if there are SNS nodes under the given parent. + * + * @param parent + * @return {@code true} if there are SNS children + */ + private boolean hasSameNamedChildren(NodeState parent) { + for (String name : parent.getChildNodeNames()) { + if (SNS_REGEX.matcher(name).matches()) { + return true; + } + } + return false; + } + + /** + * Rename all SNS children which are not allowed under the given parent. + */ + private void renameSameNamedChildren(NodeBuilder parent) { + NodeState parentNode = parent.getNodeState(); + Map<String, String> toBeRenamed = new HashMap<String, String>(); + for (String name : parent.getChildNodeNames()) { + Matcher m = SNS_REGEX.matcher(name); + if (!m.matches()) { + continue; + } else if (isSnsAllowedForChild(parentNode, name)) { + continue; + } + String prefix = m.group(1); + String index = m.group(2); + toBeRenamed.put(name, createNewName(parentNode, prefix, index)); + } + for (Entry<String, String> e : toBeRenamed.entrySet()) { + logger.warn("Renaming SNS {}/{} to {}", path, e.getKey(), e.getValue()); + parent.getChildNode(e.getKey()).moveTo(parent, e.getValue()); + } + } + + /** + * Check if SNS with given name is allowed under the given parent using the {@link #childrenDefsWithoutSns} list. + */ + private boolean isSnsAllowedForChild(NodeState parent, String name) { + for (ChildTypeDef snsDef : childrenDefsWithoutSns) { + if (snsDef.applies(parent, name)) { + return false; + } + } + return true; + } + + /** + * Create new name for the conflicting SNS node. This method makes sure that + * no node with this name already exists. + * + * @param prefix prefix of the new name, eg. <b>my_name</b>[3] + * @param index SNS index, eg. my_name[<b>3</b>] + * @param parent of the SNS node + * @return new and unused name for the node + */ + private String createNewName(NodeState parent, String prefix, String index) { + String newName; + int i = 1; + do { + if (i == 1) { + newName = String.format("%s_%s_", prefix, index); + } else { + newName = String.format("%s_%s_%d", prefix, index, i); + } + i++; + } while (parent.getChildNode(newName).exists()); + return newName; + } + + /** + * Definition of a children type. It contains the parent type, the child + * type and an optional child name. + */ + private static class ChildTypeDef { + + private final TypePredicate parentType; + + private final String childNameConstraint; + + private final TypePredicate childType; + + public ChildTypeDef(TypePredicate parentType, String childName, TypePredicate childType) { + this.parentType = parentType; + this.childNameConstraint = childName; + this.childType = childType; + } + + public ChildTypeDef(TypePredicate parentType, TypePredicate childType) { + this(parentType, null, childType); + } + + public boolean applies(NodeState parent, String childName) { + boolean result = true; + result &= parentType.apply(parent); + result &= childNameConstraint == null || childName.startsWith(this.childNameConstraint + '['); + result &= childType.apply(parent.getChildNode(childName)); + return result; + } + + @Override + public String toString() { + StringBuilder result = new StringBuilder(); + result.append(parentType.toString()).append(" > "); + if (childNameConstraint == null) { + result.append("*"); + } else { + result.append(childNameConstraint); + } + result.append(childType.toString()); + return result.toString(); + } + } +} Added: jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/UpgradeRoot.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/UpgradeRoot.java?rev=1792993&view=auto ============================================================================== --- jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/UpgradeRoot.java (added) +++ jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/UpgradeRoot.java Fri Apr 28 07:16:13 2017 @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.jackrabbit.oak.upgrade; + +import static org.apache.jackrabbit.oak.commons.PathUtils.elements; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; + +import javax.annotation.Nonnull; + +import org.apache.jackrabbit.oak.api.AuthInfo; +import org.apache.jackrabbit.oak.api.Blob; +import org.apache.jackrabbit.oak.api.CommitFailedException; +import org.apache.jackrabbit.oak.api.ContentSession; +import org.apache.jackrabbit.oak.api.QueryEngine; +import org.apache.jackrabbit.oak.api.Root; +import org.apache.jackrabbit.oak.api.Tree; +import org.apache.jackrabbit.oak.spi.state.NodeBuilder; +import org.apache.jackrabbit.oak.upgrade.tree.NodeBuilderTree; + +/** + * Simplistic 'Root' implementation around the {@code NodeBuilder} used for the + * repository upgrade in order to be able to make use of existing functionality + * like privilege and node type registration without attempting to properly + * implement the full contract of the {@link Root} interface. + */ +class UpgradeRoot implements Root { + private final NodeBuilderTree rootTree; + + UpgradeRoot(NodeBuilder nodeBuilder) { + rootTree = new NodeBuilderTree("", nodeBuilder); + } + + @Override + public boolean move(String sourcePath, String destPath) { + throw new UnsupportedOperationException(); + } + + @Nonnull + @Override + public Tree getTree(@Nonnull String path) { + Tree child = rootTree; + for (String name : elements(path)) { + child = child.getChild(name); + } + return child; + } + + @Override + public void rebase() { + throw new UnsupportedOperationException(); + } + + @Override + public void refresh() { + // ignore + } + + @Override + public void commit(@Nonnull Map<String, Object> info) throws CommitFailedException { + // ignore + } + + @Override + public void commit() throws CommitFailedException { + // ignore + } + + @Override + public boolean hasPendingChanges() { + return false; + } + + @Nonnull + @Override + public QueryEngine getQueryEngine() { + throw new UnsupportedOperationException(); + } + + @Nonnull + @Override + public Blob createBlob(@Nonnull InputStream stream) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public Blob getBlob(@Nonnull String reference) { + throw new UnsupportedOperationException(); + } + + @Nonnull + @Override + public ContentSession getContentSession() { + return new ContentSession() { + @Nonnull + @Override + public AuthInfo getAuthInfo() { + throw new UnsupportedOperationException(); + } + + @Override + public String getWorkspaceName() { + throw new UnsupportedOperationException(); + } + + @Nonnull + @Override + public Root getLatestRoot() { + return UpgradeRoot.this; + } + + @Override + public void close() throws IOException { + // nothing to do + } + }; + } + +} Modified: jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java?rev=1792993&r1=1792992&r2=1792993&view=diff ============================================================================== --- jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java (original) +++ jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java Fri Apr 28 07:16:13 2017 @@ -57,27 +57,26 @@ import static com.google.common.base.Pre /** * A DelegatingDataStore can avoid performing expensive file system access by making * use of pre computed data related to files in DataStore. - * <p/> - * <p>During repository migration actual blob content is not accessed and instead + * <p> + * During repository migration actual blob content is not accessed and instead * only the blob length and blob references are accessed. DelegatingDataStore can be * configured with a mapping file which would be used to determine the length of given - * blob reference.</p> - * <p/> + * blob reference. + * <p> * Mapping file format - * <pre><![CDATA[ + * <pre>{@code * #< length >| < identifier > * 4432|dd10bca036f3134352c63e534d4568a3d2ac2fdc * 32167|dd10bca036f3134567c63e534d4568a3d2ac2fdc - * ]]></pre> - * <p/> + * }</pre> + * <p> * The Configuration: - * <p/> - * <pre><![CDATA[ + * <pre>{@code * <DataStore class="org.apache.jackrabbit.oak.upgrade.blob.LengthCachingDataStore"> * <param name="mappingFilePath" value="/path/to/mapping/file" /> * <param name="delegateClass" value="org.apache.jackrabbit.core.data.FileDataStore" /> * </DataStore> - * ]]></pre> + * }</pre> */ public class LengthCachingDataStore extends AbstractDataStore { private static final Logger log = LoggerFactory.getLogger(LengthCachingDataStore.class); @@ -210,6 +209,7 @@ public class LengthCachingDataStore exte this.mapping = recordSizeMapping; } + @Override public long getLength() throws DataStoreException { Long size = mapping.get(getIdentifier().toString()); if (size == null) { @@ -291,7 +291,7 @@ public class LengthCachingDataStore exte InputStream is = null; try { Properties props = new Properties(); - is = Files.newInputStreamSupplier(configFile).getInput(); + is = Files.asByteSource(configFile).openStream(); props.load(is); PropertiesUtil.populate(delegate, propsToMap(props), false); log.info("Configured the delegating DataStore via {}", configFile.getAbsolutePath()); Added: jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java?rev=1792993&view=auto ============================================================================== --- jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java (added) +++ jackrabbit/oak/branches/1.0/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java Fri Apr 28 07:16:13 2017 @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.jackrabbit.oak.upgrade.checkpoint; + +import com.google.common.base.Function; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import org.apache.jackrabbit.oak.plugins.document.DocumentCheckpointRetriever; +import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; +import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; +import org.apache.jackrabbit.oak.spi.state.NodeState; +import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.apache.jackrabbit.oak.upgrade.cli.node.TarNodeStore; + +import javax.annotation.Nullable; +import java.util.Collections; +import java.util.List; + +public final class CheckpointRetriever { + + public static class Checkpoint implements Comparable<Checkpoint> { + + private final String name; + + private final long created; + + private final long expiryTime; + + public Checkpoint(String name, long created, long expiryTime) { + this.name = name; + this.created = created; + this.expiryTime = expiryTime; + } + + public static Checkpoint createFromSegmentNode(String name, NodeState node) { + return new Checkpoint(name, node.getLong("created"), node.getLong("timestamp")); + } + + public String getName() { + return name; + } + + public long getExpiryTime() { + return expiryTime; + } + + @Override + public int compareTo(Checkpoint o) { + return compare(this.created, o.created); + } + + private static int compare(long x, long y) { + return (x < y) ? -1 : ((x == y) ? 0 : 1); + } + } + + private CheckpointRetriever() { + } + + public static List<Checkpoint> getCheckpoints(NodeStore nodeStore) { + List<Checkpoint> result; + if (nodeStore instanceof TarNodeStore) { + result = getCheckpoints((TarNodeStore) nodeStore); + } else if (nodeStore instanceof DocumentNodeStore) { + result = DocumentCheckpointRetriever.getCheckpoints((DocumentNodeStore) nodeStore); + } else { + result = Collections.emptyList(); + } + Collections.sort(result); + return result; + } + + private static List<Checkpoint> getCheckpoints(TarNodeStore nodeStore) { + return Lists.newArrayList(Iterables.transform(nodeStore.getSuperRoot().getChildNode("checkpoints").getChildNodeEntries(), new Function<ChildNodeEntry, Checkpoint>() { + @Nullable + @Override + public Checkpoint apply(@Nullable ChildNodeEntry input) { + return Checkpoint.createFromSegmentNode(input.getName(), input.getNodeState()); + } + })); + } +}
