Modified: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java?rev=1792995&r1=1792994&r2=1792995&view=diff
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
 (original)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
 Fri Apr 28 07:18:26 2017
@@ -16,33 +16,42 @@
  */
 package org.apache.jackrabbit.oak.upgrade;
 
+import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.collect.ImmutableSet.copyOf;
 import static com.google.common.collect.ImmutableSet.of;
 import static com.google.common.collect.Lists.newArrayList;
 import static com.google.common.collect.Lists.newArrayListWithCapacity;
 import static com.google.common.collect.Maps.newHashMap;
+import static com.google.common.collect.Sets.newHashSet;
+import static com.google.common.collect.Sets.union;
 import static org.apache.jackrabbit.JcrConstants.JCR_SYSTEM;
-import static org.apache.jackrabbit.core.RepositoryImpl.ACTIVITIES_NODE_ID;
-import static org.apache.jackrabbit.core.RepositoryImpl.ROOT_NODE_ID;
-import static 
org.apache.jackrabbit.core.RepositoryImpl.VERSION_STORAGE_NODE_ID;
 import static 
org.apache.jackrabbit.oak.plugins.name.Namespaces.addCustomMapping;
 import static 
org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.NODE_TYPES_PATH;
 import static 
org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.JCR_ALL;
+import static 
org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory.SKIP_NAME_CHECK;
+import static 
org.apache.jackrabbit.oak.upgrade.nodestate.FilteringNodeState.ALL;
+import static 
org.apache.jackrabbit.oak.upgrade.nodestate.FilteringNodeState.NONE;
+import static 
org.apache.jackrabbit.oak.upgrade.nodestate.NodeStateCopier.copyProperties;
 
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
+import java.util.Calendar;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
 
 import javax.annotation.Nonnull;
 import javax.annotation.Nullable;
 import javax.jcr.NamespaceException;
+import javax.jcr.Node;
 import javax.jcr.RepositoryException;
+import javax.jcr.Session;
 import javax.jcr.Value;
 import javax.jcr.ValueFactory;
 import javax.jcr.nodetype.NodeDefinitionTemplate;
@@ -51,7 +60,6 @@ import javax.jcr.nodetype.NodeTypeTempla
 import javax.jcr.nodetype.PropertyDefinitionTemplate;
 import javax.jcr.security.Privilege;
 
-import com.google.common.base.Charsets;
 import com.google.common.base.Function;
 import com.google.common.base.Stopwatch;
 import com.google.common.collect.HashBiMap;
@@ -59,6 +67,7 @@ import com.google.common.collect.Immutab
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 import org.apache.jackrabbit.api.security.authorization.PrivilegeManager;
+import org.apache.jackrabbit.core.IndexAccessor;
 import org.apache.jackrabbit.core.RepositoryContext;
 import org.apache.jackrabbit.core.config.BeanConfig;
 import org.apache.jackrabbit.core.config.LoginModuleConfig;
@@ -67,14 +76,12 @@ import org.apache.jackrabbit.core.config
 import org.apache.jackrabbit.core.fs.FileSystem;
 import org.apache.jackrabbit.core.fs.FileSystemException;
 import org.apache.jackrabbit.core.nodetype.NodeTypeRegistry;
-import org.apache.jackrabbit.core.persistence.PersistenceManager;
+import org.apache.jackrabbit.core.query.lucene.FieldNames;
 import org.apache.jackrabbit.core.security.authorization.PrivilegeRegistry;
 import org.apache.jackrabbit.core.security.user.UserManagerImpl;
 import org.apache.jackrabbit.oak.api.CommitFailedException;
-import org.apache.jackrabbit.oak.api.PropertyState;
 import org.apache.jackrabbit.oak.api.Root;
 import org.apache.jackrabbit.oak.api.Tree;
-import org.apache.jackrabbit.oak.commons.PathUtils;
 import org.apache.jackrabbit.oak.namepath.NamePathMapper;
 import org.apache.jackrabbit.oak.plugins.index.CompositeIndexEditorProvider;
 import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
@@ -104,13 +111,20 @@ import org.apache.jackrabbit.oak.spi.sec
 import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConfiguration;
 import org.apache.jackrabbit.oak.spi.security.user.UserConfiguration;
 import org.apache.jackrabbit.oak.spi.security.user.UserConstants;
-import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.spi.state.NodeState;
 import org.apache.jackrabbit.oak.spi.state.NodeStore;
+import org.apache.jackrabbit.oak.upgrade.nodestate.NameFilteringNodeState;
+import org.apache.jackrabbit.oak.upgrade.nodestate.report.LoggingReporter;
+import org.apache.jackrabbit.oak.upgrade.nodestate.report.ReportingNodeState;
 import org.apache.jackrabbit.oak.upgrade.nodestate.NodeStateCopier;
+import org.apache.jackrabbit.oak.upgrade.security.AuthorizableFolderEditor;
 import org.apache.jackrabbit.oak.upgrade.security.GroupEditorProvider;
 import org.apache.jackrabbit.oak.upgrade.security.RestrictionEditorProvider;
+import org.apache.jackrabbit.oak.upgrade.version.VersionCopyConfiguration;
+import org.apache.jackrabbit.oak.upgrade.version.VersionHistoryUtil;
+import org.apache.jackrabbit.oak.upgrade.version.VersionableEditor;
+import org.apache.jackrabbit.oak.upgrade.version.VersionablePropertiesEditor;
 import org.apache.jackrabbit.spi.Name;
 import org.apache.jackrabbit.spi.QNodeDefinition;
 import org.apache.jackrabbit.spi.QNodeTypeDefinition;
@@ -120,13 +134,28 @@ import org.apache.jackrabbit.spi.QValueC
 import org.apache.jackrabbit.spi.commons.conversion.DefaultNamePathResolver;
 import org.apache.jackrabbit.spi.commons.conversion.NamePathResolver;
 import org.apache.jackrabbit.spi.commons.value.ValueFormat;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.index.TermEnum;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static 
org.apache.jackrabbit.oak.upgrade.version.VersionCopier.copyVersionStorage;
+import static 
org.apache.jackrabbit.oak.upgrade.version.VersionHistoryUtil.getVersionStorage;
+
 public class RepositoryUpgrade {
 
     private static final Logger logger = 
LoggerFactory.getLogger(RepositoryUpgrade.class);
 
+    private static final int LOG_NODE_COPY = 
Integer.getInteger("oak.upgrade.logNodeCopy", 10000);
+
+    public static final Set<String> DEFAULT_INCLUDE_PATHS = ALL;
+
+    public static final Set<String> DEFAULT_EXCLUDE_PATHS = NONE;
+
+    public static final Set<String> DEFAULT_MERGE_PATHS = NONE;
+
     /**
      * Source repository context.
      */
@@ -137,14 +166,40 @@ public class RepositoryUpgrade {
      */
     private final NodeStore target;
 
-    private boolean copyBinariesByReference = false;
+    /**
+     * Paths to include during the copy process. Defaults to the root path "/".
+     */
+    private Set<String> includePaths = DEFAULT_INCLUDE_PATHS;
 
-    private boolean earlyShutdown = false;
+    /**
+     * Paths to exclude during the copy process. Empty by default.
+     */
+    private Set<String> excludePaths = DEFAULT_EXCLUDE_PATHS;
+
+    /**
+     * Paths to merge during the copy process. Empty by default.
+     */
+    private Set<String> mergePaths = DEFAULT_MERGE_PATHS;
+
+    /**
+     * Whether or not to copy binaries by reference. Defaults to false.
+     */
+    private boolean copyBinariesByReference = false;
 
     private boolean skipOnError = false;
 
+    private boolean earlyShutdown = false;
+
     private List<CommitHook> customCommitHooks = null;
 
+    private boolean checkLongNames = false;
+
+    private boolean filterLongNames = true;
+
+    private boolean skipInitialization = false;
+
+    VersionCopyConfiguration versionCopyConfiguration = new 
VersionCopyConfiguration();
+
     /**
      * Copies the contents of the repository in the given source directory
      * to the given target node store.
@@ -197,6 +252,14 @@ public class RepositoryUpgrade {
         this.copyBinariesByReference = copyBinariesByReference;
     }
 
+    public boolean isSkipOnError() {
+        return skipOnError;
+    }
+
+    public void setSkipOnError(boolean skipOnError) {
+        this.skipOnError = skipOnError;
+    }
+
     public boolean isEarlyShutdown() {
         return earlyShutdown;
     }
@@ -205,12 +268,28 @@ public class RepositoryUpgrade {
         this.earlyShutdown = earlyShutdown;
     }
 
-    public boolean isSkipOnError() {
-        return skipOnError;
+    public boolean isCheckLongNames() {
+        return checkLongNames;
     }
 
-    public void setSkipOnError(boolean skipOnError) {
-        this.skipOnError = skipOnError;
+    public void setCheckLongNames(boolean checkLongNames) {
+        this.checkLongNames = checkLongNames;
+    }
+
+    public boolean isFilterLongNames() {
+        return filterLongNames;
+    }
+
+    public void setFilterLongNames(boolean filterLongNames) {
+        this.filterLongNames = filterLongNames;
+    }
+
+    public boolean isSkipInitialization() {
+        return skipInitialization;
+    }
+
+    public void setSkipInitialization(boolean skipInitialization) {
+        this.skipInitialization = skipInitialization;
     }
 
     /**
@@ -234,6 +313,71 @@ public class RepositoryUpgrade {
     }
 
     /**
+     * Sets the paths that should be included when the source repository
+     * is copied to the target repository.
+     *
+     * @param includes Paths to be included in the copy.
+     */
+    public void setIncludes(@Nonnull String... includes) {
+        this.includePaths = copyOf(checkNotNull(includes));
+    }
+
+    /**
+     * Sets the paths that should be excluded when the source repository
+     * is copied to the target repository.
+     *
+     * @param excludes Paths to be excluded from the copy.
+     */
+    public void setExcludes(@Nonnull String... excludes) {
+        this.excludePaths = copyOf(checkNotNull(excludes));
+    }
+
+    /**
+     * Sets the paths that should be merged when the source repository
+     * is copied to the target repository.
+     *
+     * @param merges Paths to be merged during copy.
+     */
+    public void setMerges(@Nonnull String... merges) {
+        this.mergePaths = copyOf(checkNotNull(merges));
+    }
+
+    /**
+     * Configures the version storage copy. Be default all versions are copied.
+     * One may disable it completely by setting {@code null} here or limit it 
to
+     * a selected date range: {@code <minDate, now()>}.
+     * 
+     * @param minDate
+     *            minimum date of the versions to copy or {@code null} to
+     *            disable the storage version copying completely. Default 
value:
+     *            {@code 1970-01-01 00:00:00}.
+     */
+    public void setCopyVersions(Calendar minDate) {
+        versionCopyConfiguration.setCopyVersions(minDate);
+    }
+
+    /**
+     * Configures copying of the orphaned version histories (eg. ones that are
+     * not referenced by the existing nodes). By default all orphaned version
+     * histories are copied. One may disable it completely by setting
+     * {@code null} here or limit it to a selected date range:
+     * {@code <minDate, now()>}. <br>
+     * <br>
+     * Please notice, that this option is overriden by the
+     * {@link #setCopyVersions(Calendar)}. You can't copy orphaned versions
+     * older than set in {@link #setCopyVersions(Calendar)} and if you set
+     * {@code null} there, this option will be ignored.
+     * 
+     * @param minDate
+     *            minimum date of the orphaned versions to copy or {@code null}
+     *            to not copy them at all. Default value:
+     *            {@code 1970-01-01 00:00:00}.
+     */
+    public void setCopyOrphanedVersions(Calendar minDate) {
+        versionCopyConfiguration.setCopyOrphanedVersions(minDate);
+    }
+
+    /**
      * Copies the full content from the source to the target repository.
      * <p>
      * The source repository <strong>must not be modified</strong> while
@@ -247,80 +391,116 @@ public class RepositoryUpgrade {
      * @throws RepositoryException if the copy operation fails
      */
     public void copy(RepositoryInitializer initializer) throws 
RepositoryException {
+        if (checkLongNames) {
+            assertNoLongNames();
+        }
+
         RepositoryConfig config = source.getRepositoryConfig();
         logger.info("Copying repository content from {} to Oak", 
config.getHomeDir());
         try {
-            NodeState base = target.getRoot();
-            NodeBuilder builder = base.builder();
-            final Root upgradeRoot = new UpgradeRoot(builder);
+            NodeBuilder targetBuilder = target.getRoot().builder();
+            if (VersionHistoryUtil.getVersionStorage(targetBuilder).exists() 
&& !versionCopyConfiguration.skipOrphanedVersionsCopy()) {
+                logger.warn("The version storage on destination already 
exists. Orphaned version histories will be skipped.");
+                versionCopyConfiguration.setCopyOrphanedVersions(null);
+            }
+            final Root upgradeRoot = new UpgradeRoot(targetBuilder);
 
             String workspaceName =
                     source.getRepositoryConfig().getDefaultWorkspaceName();
             SecurityProviderImpl security = new SecurityProviderImpl(
                     mapSecurityConfig(config.getSecurityConfig()));
 
-            // init target repository first
-            logger.info("Initializing initial repository content", 
config.getHomeDir());
-            new InitialContent().initialize(builder);
-            if (initializer != null) {
-                initializer.initialize(builder);
-            }
-            logger.debug("InitialContent completed", config.getHomeDir());
+            if (skipInitialization) {
+                logger.info("Skipping the repository initialization");
+            } else {
+                // init target repository first
+                logger.info("Initializing initial repository content from {}", 
config.getHomeDir());
+                new InitialContent().initialize(targetBuilder);
+                if (initializer != null) {
+                    initializer.initialize(targetBuilder);
+                }
+                logger.debug("InitialContent completed from {}", 
config.getHomeDir());
 
-            for (SecurityConfiguration sc : security.getConfigurations()) {
-                RepositoryInitializer ri = sc.getRepositoryInitializer();
-                ri.initialize(builder);
-                logger.debug("Repository initializer '" + 
ri.getClass().getName() + "' completed", config.getHomeDir());
-            }
-            for (SecurityConfiguration sc : security.getConfigurations()) {
-                WorkspaceInitializer wi = sc.getWorkspaceInitializer();
-                wi.initialize(builder, workspaceName);
-                logger.debug("Workspace initializer '" + 
wi.getClass().getName() + "' completed", config.getHomeDir());
+                for (SecurityConfiguration sc : security.getConfigurations()) {
+                    RepositoryInitializer ri = sc.getRepositoryInitializer();
+                    ri.initialize(targetBuilder);
+                    logger.debug("Repository initializer '" + 
ri.getClass().getName() + "' completed", config.getHomeDir());
+                }
+                for (SecurityConfiguration sc : security.getConfigurations()) {
+                    WorkspaceInitializer wi = sc.getWorkspaceInitializer();
+                    wi.initialize(targetBuilder, workspaceName);
+                    logger.debug("Workspace initializer '" + 
wi.getClass().getName() + "' completed", config.getHomeDir());
+                }
             }
 
             HashBiMap<String, String> uriToPrefix = HashBiMap.create();
             logger.info("Copying registered namespaces");
-            copyNamespaces(builder, uriToPrefix);
+            copyNamespaces(targetBuilder, uriToPrefix);
             logger.debug("Namespace registration completed.");
 
-            logger.info("Copying registered node types");
-            NodeTypeManager ntMgr = new ReadWriteNodeTypeManager() {
-                @Override
-                protected Tree getTypes() {
-                    return upgradeRoot.getTree(NODE_TYPES_PATH);
-                }
+            if (skipInitialization) {
+                logger.info("Skipping registering node types and privileges");
+            } else {
+                logger.info("Copying registered node types");
+                NodeTypeManager ntMgr = new ReadWriteNodeTypeManager() {
+                    @Override
+                    protected Tree getTypes() {
+                        return upgradeRoot.getTree(NODE_TYPES_PATH);
+                    }
 
-                @Nonnull
-                @Override
-                protected Root getWriteRoot() {
-                    return upgradeRoot;
-                }
-            };
-            copyNodeTypes(ntMgr, new ValueFactoryImpl(upgradeRoot, 
NamePathMapper.DEFAULT));
-            logger.debug("Node type registration completed.");
-
-            // migrate privileges
-            logger.info("Copying registered privileges");
-            PrivilegeConfiguration privilegeConfiguration = 
security.getConfiguration(PrivilegeConfiguration.class);
-            
copyCustomPrivileges(privilegeConfiguration.getPrivilegeManager(upgradeRoot, 
NamePathMapper.DEFAULT));
-            logger.debug("Privilege registration completed.");
-
-            // Triggers compilation of type information, which we need for
-            // the type predicates used by the bulk  copy operations below.
-            new TypeEditorProvider(false).getRootEditor(
-                    base, builder.getNodeState(), builder, null);
+                    @Nonnull
+                    @Override
+                    protected Root getWriteRoot() {
+                        return upgradeRoot;
+                    }
+                };
+                copyNodeTypes(ntMgr, new ValueFactoryImpl(upgradeRoot, 
NamePathMapper.DEFAULT));
+                logger.debug("Node type registration completed.");
+
+                // migrate privileges
+                logger.info("Copying registered privileges");
+                PrivilegeConfiguration privilegeConfiguration = 
security.getConfiguration(PrivilegeConfiguration.class);
+                
copyCustomPrivileges(privilegeConfiguration.getPrivilegeManager(upgradeRoot, 
NamePathMapper.DEFAULT));
+                logger.debug("Privilege registration completed.");
+
+                // Triggers compilation of type information, which we need for
+                // the type predicates used by the bulk  copy operations below.
+                new TypeEditorProvider(false).getRootEditor(
+                        targetBuilder.getBaseState(), 
targetBuilder.getNodeState(), targetBuilder, null);
+            }
+
+            final NodeState reportingSourceRoot = ReportingNodeState.wrap(
+                    JackrabbitNodeState.createRootNodeState(
+                            source, workspaceName, 
targetBuilder.getNodeState(), 
+                            uriToPrefix, copyBinariesByReference, skipOnError
+                    ),
+                    new LoggingReporter(logger, "Migrating", LOG_NODE_COPY, -1)
+            );
+            final NodeState sourceRoot;
+            if (filterLongNames) {
+                sourceRoot = NameFilteringNodeState.wrap(reportingSourceRoot);
+            } else {
+                sourceRoot = reportingSourceRoot;
+            }
 
-            Map<String, String> versionablePaths = newHashMap();
-            NodeState root = builder.getNodeState();
+            final Stopwatch watch = Stopwatch.createStarted();
 
             logger.info("Copying workspace content");
-            copyWorkspace(builder, root, workspaceName, uriToPrefix, 
versionablePaths);
-            logger.debug("Upgrading workspace content completed.");
-
-            logger.info("Copying version store content");
-            copyVersionStore(builder, root, workspaceName, uriToPrefix, 
versionablePaths);
-            logger.debug("Upgrading version store content completed.");
+            copyWorkspace(sourceRoot, targetBuilder, workspaceName);
+            targetBuilder.getNodeState(); // on TarMK this does call triggers 
the actual copy
+            logger.info("Upgrading workspace content completed in {}s ({})", 
watch.elapsed(TimeUnit.SECONDS), watch);
+
+            if (!versionCopyConfiguration.skipOrphanedVersionsCopy()) {
+                logger.info("Copying version storage");
+                watch.reset().start();
+                copyVersionStorage(targetBuilder, 
getVersionStorage(sourceRoot), getVersionStorage(targetBuilder), 
versionCopyConfiguration);
+                targetBuilder.getNodeState(); // on TarMK this does call 
triggers the actual copy
+                logger.info("Version storage copied in {}s ({})", 
watch.elapsed(TimeUnit.SECONDS), watch);
+            } else {
+                logger.info("Skipping the version storage as the 
copyOrphanedVersions is set to false");
+            }
 
+            watch.reset().start();
             logger.info("Applying default commit hooks");
             // TODO: default hooks?
             List<CommitHook> hooks = newArrayList();
@@ -330,11 +510,23 @@ public class RepositoryUpgrade {
             String groupsPath = userConf.getParameters().getConfigValue(
                     UserConstants.PARAM_GROUP_PATH,
                     UserConstants.DEFAULT_GROUP_PATH);
+            String usersPath = userConf.getParameters().getConfigValue(
+                    UserConstants.PARAM_USER_PATH,
+                    UserConstants.DEFAULT_USER_PATH);
 
             // hooks specific to the upgrade, need to run first
             hooks.add(new EditorHook(new CompositeEditorProvider(
                     new RestrictionEditorProvider(),
-                    new GroupEditorProvider(groupsPath))));
+                    new GroupEditorProvider(groupsPath),
+                    // copy referenced version histories
+                    new VersionableEditor.Provider(sourceRoot, workspaceName, 
versionCopyConfiguration),
+                    new SameNameSiblingsEditor.Provider(),
+                    AuthorizableFolderEditor.provider(groupsPath, usersPath)
+            )));
+
+            // this editor works on the VersionableEditor output, so it can't 
be
+            // a part of the same EditorHook
+            hooks.add(new EditorHook(new 
VersionablePropertiesEditor.Provider()));
 
             // security-related hooks
             for (SecurityConfiguration sc : security.getConfigurations()) {
@@ -351,14 +543,33 @@ public class RepositoryUpgrade {
                 createIndexEditorProvider()
             )));
 
-            target.merge(builder, new LoggingCompositeHook(hooks, source, 
earlyShutdown), CommitInfo.EMPTY);
+            target.merge(targetBuilder, new LoggingCompositeHook(hooks, 
source, overrideEarlyShutdown()), CommitInfo.EMPTY);
+            logger.info("Processing commit hooks completed in {}s ({})", 
watch.elapsed(TimeUnit.SECONDS), watch);
             logger.debug("Repository upgrade completed.");
         } catch (Exception e) {
             throw new RepositoryException("Failed to copy content", e);
         }
     }
 
-    private static EditorProvider createTypeEditorProvider() {
+    private boolean overrideEarlyShutdown() {
+        if (earlyShutdown == false) {
+            return false;
+        }
+
+        final VersionCopyConfiguration c = this.versionCopyConfiguration;
+        if (c.isCopyVersions() && c.skipOrphanedVersionsCopy()) {
+            logger.info("Overriding early shutdown to false because of the 
copy versions settings");
+            return false;
+        }
+        if (c.isCopyVersions() && !c.skipOrphanedVersionsCopy()
+                && c.getOrphanedMinDate().after(c.getVersionsMinDate())) {
+            logger.info("Overriding early shutdown to false because of the 
copy versions settings");
+            return false;
+        }
+        return true;
+    }
+
+    static EditorProvider createTypeEditorProvider() {
         return new EditorProvider() {
             @Override
             public Editor getRootEditor(NodeState before, NodeState after, 
NodeBuilder builder, CommitInfo info)
@@ -375,7 +586,7 @@ public class RepositoryUpgrade {
         };
     }
 
-    private static EditorProvider createIndexEditorProvider() {
+    static EditorProvider createIndexEditorProvider() {
         final ProgressTicker ticker = new AsciiArtTicker();
         return new EditorProvider() {
             @Override
@@ -410,13 +621,18 @@ public class RepositoryUpgrade {
                 config.getLoginModuleConfig(),
                 LoginModuleConfig.PARAM_ADMIN_ID, UserConstants.PARAM_ADMIN_ID,
                 LoginModuleConfig.PARAM_ANONYMOUS_ID, 
UserConstants.PARAM_ANONYMOUS_ID);
-        ConfigurationParameters userConfig = mapConfigurationParameters(
-                config.getSecurityManagerConfig().getUserManagerConfig(),
-                UserManagerImpl.PARAM_USERS_PATH, 
UserConstants.PARAM_USER_PATH,
-                UserManagerImpl.PARAM_GROUPS_PATH, 
UserConstants.PARAM_GROUP_PATH,
-                UserManagerImpl.PARAM_DEFAULT_DEPTH, 
UserConstants.PARAM_DEFAULT_DEPTH,
-                UserManagerImpl.PARAM_PASSWORD_HASH_ALGORITHM, 
UserConstants.PARAM_PASSWORD_HASH_ALGORITHM,
-                UserManagerImpl.PARAM_PASSWORD_HASH_ITERATIONS, 
UserConstants.PARAM_PASSWORD_HASH_ITERATIONS);
+        ConfigurationParameters userConfig;
+        if (config.getSecurityManagerConfig() == null) {
+            userConfig = ConfigurationParameters.EMPTY;
+        } else {
+            userConfig = mapConfigurationParameters(
+                    config.getSecurityManagerConfig().getUserManagerConfig(),
+                    UserManagerImpl.PARAM_USERS_PATH, 
UserConstants.PARAM_USER_PATH,
+                    UserManagerImpl.PARAM_GROUPS_PATH, 
UserConstants.PARAM_GROUP_PATH,
+                    UserManagerImpl.PARAM_DEFAULT_DEPTH, 
UserConstants.PARAM_DEFAULT_DEPTH,
+                    UserManagerImpl.PARAM_PASSWORD_HASH_ALGORITHM, 
UserConstants.PARAM_PASSWORD_HASH_ALGORITHM,
+                    UserManagerImpl.PARAM_PASSWORD_HASH_ITERATIONS, 
UserConstants.PARAM_PASSWORD_HASH_ITERATIONS);
+        }
         return ConfigurationParameters.of(ImmutableMap.of(
                 UserConfiguration.NAME,
                 ConfigurationParameters.of(loginConfig, userConfig)));
@@ -451,15 +667,15 @@ public class RepositoryUpgrade {
      * Copies the registered namespaces to the target repository, and returns
      * the internal namespace index mapping used in bundle serialization.
      *
-     * @param root root builder
+     * @param targetRoot root builder of the target store
      * @param uriToPrefix namespace URI to prefix mapping
      * @throws RepositoryException
      */
     private void copyNamespaces(
-            NodeBuilder root,
+            NodeBuilder targetRoot,
             Map<String, String> uriToPrefix)
             throws RepositoryException {
-        NodeBuilder system = root.child(JCR_SYSTEM);
+        NodeBuilder system = targetRoot.child(JCR_SYSTEM);
         NodeBuilder namespaces = 
system.child(NamespaceConstants.REP_NAMESPACES);
 
         Properties registry = loadProperties("/namespaces/ns_reg.properties");
@@ -704,74 +920,78 @@ public class RepositoryUpgrade {
         return tmpl;
     }
 
-    private void copyVersionStore(
-            NodeBuilder builder, NodeState root, String workspaceName,
-            Map<String, String> uriToPrefix,
-            Map<String, String> versionablePaths) {
-        PersistenceManager pm = 
source.getInternalVersionManager().getPersistenceManager();
-        NodeBuilder system = builder.child(JCR_SYSTEM);
-
-        logger.info("Copying version histories");
-        copyState(system, "/jcr:system/jcr:versionStorage", new 
JackrabbitNodeState(
-                pm, root, uriToPrefix, VERSION_STORAGE_NODE_ID,
-                "/jcr:system/jcr:versionStorage",
-                workspaceName, versionablePaths, copyBinariesByReference, 
skipOnError),
-                true);
-
-        logger.info("Copying activities");
-        copyState(system, "/jcr:system/jcr:activities", new 
JackrabbitNodeState(
-                pm, root, uriToPrefix, ACTIVITIES_NODE_ID,
-                "/jcr:system/jcr:activities",
-                workspaceName, versionablePaths, copyBinariesByReference, 
skipOnError),
-                true);
-    }
-
-    private String copyWorkspace(
-            NodeBuilder builder, NodeState root, String workspaceName,
-            Map<String, String> uriToPrefix, Map<String, String> 
versionablePaths)
+    private String copyWorkspace(NodeState sourceRoot, NodeBuilder targetRoot, 
String workspaceName)
             throws RepositoryException {
-        logger.info("Copying workspace {}", workspaceName);
+        final Set<String> includes = 
calculateEffectiveIncludePaths(includePaths, sourceRoot);
+        final Set<String> excludes = union(copyOf(this.excludePaths), 
of("/jcr:system/jcr:versionStorage"));
+        final Set<String> merges = union(copyOf(this.mergePaths), 
of("/jcr:system"));
+
+        logger.info("Copying workspace {} [i: {}, e: {}, m: {}]", 
workspaceName, includes, excludes, merges);
+
+        NodeStateCopier.builder()
+                .include(includes)
+                .exclude(excludes)
+                .merge(merges)
+                .copy(sourceRoot, targetRoot);
 
-        PersistenceManager pm =
-                source.getWorkspaceInfo(workspaceName).getPersistenceManager();
-
-        NodeState state = new JackrabbitNodeState(
-                pm, root, uriToPrefix, ROOT_NODE_ID, "/",
-                workspaceName, versionablePaths, copyBinariesByReference, 
skipOnError);
-
-        for (PropertyState property : state.getProperties()) {
-            builder.setProperty(property);
-        }
-        for (ChildNodeEntry child : state.getChildNodeEntries()) {
-            String childName = child.getName();
-            if (!JCR_SYSTEM.equals(childName)) {
-                final String path = PathUtils.concat("/", childName);
-                logger.info("Copying subtree {}", path);
-                copyState(builder, path, child.getNodeState(), false);
-            }
+        if (includePaths.contains("/")) {
+            copyProperties(sourceRoot, targetRoot);
         }
 
         return workspaceName;
     }
 
-    private void copyState(NodeBuilder targetParent, String path, NodeState 
source, boolean merge) {
-        final String name = PathUtils.getName(path);
-        // OAK-1589: maximum supported length of name for DocumentNodeStore
-        // is 150 bytes. Skip the sub tree if the the name is too long
-        if (name.length() > 37 && name.getBytes(Charsets.UTF_8).length > 150) {
-            logger.warn("Node name too long. Skipping {}", source);
-            return;
-        }
-        NodeBuilder target = targetParent.child(name);
-        NodeStateCopier.copyNodeState(
-                source,
-                target,
-                path,
-                merge ? of(path) : Collections.<String>emptySet()
-        );
+    static Set<String> calculateEffectiveIncludePaths(Set<String> 
includePaths, NodeState sourceRoot) {
+        if (!includePaths.contains("/")) {
+            return copyOf(includePaths);
+        }
+
+        // include child nodes from source individually to avoid deleting 
other initialized content
+        final Set<String> includes = newHashSet();
+        for (String childNodeName : sourceRoot.getChildNodeNames()) {
+            includes.add("/" + childNodeName);
+        }
+        return includes;
+    }
+
+    void assertNoLongNames() throws RepositoryException {
+        Session session = source.getRepository().login(null, null);
+        boolean longNameFound = false;
+        try {
+            IndexReader reader = IndexAccessor.getReader(source);
+            if (reader == null) {
+                return;
+            }
+            TermEnum terms = reader.terms(new Term(FieldNames.LOCAL_NAME));
+            while (terms.next()) {
+                Term t = terms.term();
+                if (!FieldNames.LOCAL_NAME.equals(t.field())) {
+                    continue;
+                }
+                String name = t.text();
+                if (NameFilteringNodeState.isNameTooLong(name)) {
+                    TermDocs docs = reader.termDocs(t);
+                    if (docs.next()) {
+                        int docId = docs.doc();
+                        String uuid = 
reader.document(docId).get(FieldNames.UUID);
+                        Node n = session.getNodeByIdentifier(uuid);
+                        logger.warn("Name too long: {}", n.getPath());
+                        longNameFound = true;
+                    }
+                }
+            }
+        } catch (IOException e) {
+            throw new RepositoryException(e);
+        } finally {
+            session.logout();
+        }
+        if (longNameFound) {
+            logger.error("Node with a long name has been found. Please fix the 
content or rerun the migration with {} option.", SKIP_NAME_CHECK);
+            throw new RepositoryException("Node with a long name has been 
found.");
+        }
     }
 
-    private static class LoggingCompositeHook implements CommitHook {
+    static class LoggingCompositeHook implements CommitHook {
         private final Collection<CommitHook> hooks;
         private boolean started = false;
         private final boolean earlyShutdown;
@@ -789,7 +1009,7 @@ public class RepositoryUpgrade {
         public NodeState processCommit(NodeState before, NodeState after, 
CommitInfo info) throws CommitFailedException {
             NodeState newState = after;
             Stopwatch watch = Stopwatch.createStarted();
-            if (earlyShutdown && !started) {
+            if (earlyShutdown && source != null && !started) {
                 logger.info("Shutting down source repository.");
                 source.getRepository().shutdown();
                 started = true;

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/SameNameSiblingsEditor.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/SameNameSiblingsEditor.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/SameNameSiblingsEditor.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/SameNameSiblingsEditor.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,299 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade;
+
+import static com.google.common.collect.Iterables.filter;
+import static com.google.common.collect.Iterables.transform;
+import static org.apache.jackrabbit.JcrConstants.JCR_SAMENAMESIBLINGS;
+import static org.apache.jackrabbit.JcrConstants.JCR_SYSTEM;
+import static 
org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_NODE_TYPES;
+import static 
org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_NAMED_CHILD_NODE_DEFINITIONS;
+import static 
org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.REP_RESIDUAL_CHILD_NODE_DEFINITIONS;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.plugins.nodetype.TypePredicate;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.DefaultEditor;
+import org.apache.jackrabbit.oak.spi.commit.Editor;
+import org.apache.jackrabbit.oak.spi.commit.EditorProvider;
+import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Function;
+import com.google.common.base.Predicate;
+
+/**
+ * This editor check if same name sibling nodes are allowed under a given
+ * parent. If they are not, they will be renamed by replacing brackets with a
+ * underscore: {@code sns_name[3] -> sns_name_3_}.
+ */
+public class SameNameSiblingsEditor extends DefaultEditor {
+
+    private static final Logger logger = 
LoggerFactory.getLogger(SameNameSiblingsEditor.class);
+
+    private static final Pattern SNS_REGEX = 
Pattern.compile("^(.+)\\[(\\d+)\\]$");
+
+    private static final Predicate<NodeState> NO_SNS_PROPERTY = new 
Predicate<NodeState>() {
+        @Override
+        public boolean apply(NodeState input) {
+            return !input.getBoolean(JCR_SAMENAMESIBLINGS);
+        }
+    };
+
+    /**
+     * List of node type definitions that doesn't allow to have SNS children.
+     */
+    private final List<ChildTypeDef> childrenDefsWithoutSns;
+
+    /**
+     * Builder of the current node.
+     */
+    private final NodeBuilder builder;
+
+    /**
+     * Path to the current node.
+     */
+    private final String path;
+
+    public static class Provider implements EditorProvider {
+        @Override
+        public Editor getRootEditor(NodeState before, NodeState after, 
NodeBuilder builder, CommitInfo info)
+                throws CommitFailedException {
+            return new SameNameSiblingsEditor(builder);
+        }
+    }
+
+    public SameNameSiblingsEditor(NodeBuilder rootBuilder) {
+        this.childrenDefsWithoutSns = 
prepareChildDefsWithoutSns(rootBuilder.getNodeState());
+        this.builder = rootBuilder;
+        this.path = "";
+    }
+
+    public SameNameSiblingsEditor(SameNameSiblingsEditor parent, String name, 
NodeBuilder builder) {
+        this.childrenDefsWithoutSns = parent.childrenDefsWithoutSns;
+        this.builder = builder;
+        this.path = new 
StringBuilder(parent.path).append('/').append(name).toString();
+    }
+
+    @Override
+    public Editor childNodeAdded(String name, NodeState after) throws 
CommitFailedException {
+        return new SameNameSiblingsEditor(this, name, 
builder.getChildNode(name));
+    }
+
+    @Override
+    public Editor childNodeChanged(String name, NodeState before, NodeState 
after) throws CommitFailedException {
+        return new SameNameSiblingsEditor(this, name, 
builder.getChildNode(name));
+    }
+
+    @Override
+    public void leave(NodeState before, NodeState after) throws 
CommitFailedException {
+        if (hasSameNamedChildren(after)) {
+            renameSameNamedChildren(builder);
+        }
+    }
+
+    /**
+     * Prepare a list of node definitions that doesn't allow having SNS 
children.
+     *
+     * @param root Repository root
+     * @return a list of node definitions denying SNS children
+     */
+    private static List<ChildTypeDef> prepareChildDefsWithoutSns(NodeState 
root) {
+        List<ChildTypeDef> defs = new ArrayList<ChildTypeDef>();
+        NodeState types = 
root.getChildNode(JCR_SYSTEM).getChildNode(JCR_NODE_TYPES);
+        for (ChildNodeEntry typeEntry : types.getChildNodeEntries()) {
+            NodeState type = typeEntry.getNodeState();
+            TypePredicate typePredicate = new TypePredicate(root, 
typeEntry.getName());
+            defs.addAll(parseResidualChildNodeDefs(root, type, typePredicate));
+            defs.addAll(parseNamedChildNodeDefs(root, type, typePredicate));
+        }
+        return defs;
+    }
+
+    private static List<ChildTypeDef> parseNamedChildNodeDefs(NodeState root, 
NodeState parentType,
+            TypePredicate parentTypePredicate) {
+        List<ChildTypeDef> defs = new ArrayList<ChildTypeDef>();
+        NodeState namedChildNodeDefinitions = 
parentType.getChildNode(REP_NAMED_CHILD_NODE_DEFINITIONS);
+        for (ChildNodeEntry childName : 
namedChildNodeDefinitions.getChildNodeEntries()) {
+            for (String childType : filterChildren(childName.getNodeState(), 
NO_SNS_PROPERTY)) {
+                TypePredicate childTypePredicate = new TypePredicate(root, 
childType);
+                defs.add(new ChildTypeDef(parentTypePredicate, 
childName.getName(), childTypePredicate));
+            }
+        }
+        return defs;
+    }
+
+    private static List<ChildTypeDef> parseResidualChildNodeDefs(NodeState 
root, NodeState parentType,
+            TypePredicate parentTypePredicate) {
+        List<ChildTypeDef> defs = new ArrayList<ChildTypeDef>();
+        NodeState resChildNodeDefinitions = 
parentType.getChildNode(REP_RESIDUAL_CHILD_NODE_DEFINITIONS);
+        for (String childType : filterChildren(resChildNodeDefinitions, 
NO_SNS_PROPERTY)) {
+            TypePredicate childTypePredicate = new TypePredicate(root, 
childType);
+            defs.add(new ChildTypeDef(parentTypePredicate, 
childTypePredicate));
+        }
+        return defs;
+    }
+
+    /**
+     * Filter children of the given node using predicate and return the list 
of matching child names.
+     *
+     * @param parent
+     * @param predicate
+     * @return a list of names of children accepting the predicate
+     */
+    private static Iterable<String> filterChildren(NodeState parent, final 
Predicate<NodeState> predicate) {
+        return transform(filter(parent.getChildNodeEntries(), new 
Predicate<ChildNodeEntry>() {
+            @Override
+            public boolean apply(ChildNodeEntry input) {
+                return predicate.apply(input.getNodeState());
+            }
+        }), new Function<ChildNodeEntry, String>() {
+            @Override
+            public String apply(ChildNodeEntry input) {
+                return input.getName();
+            }
+        });
+    }
+
+    /**
+     * Check if there are SNS nodes under the given parent.
+     *
+     * @param parent
+     * @return {@code true} if there are SNS children
+     */
+    private boolean hasSameNamedChildren(NodeState parent) {
+        for (String name : parent.getChildNodeNames()) {
+            if (SNS_REGEX.matcher(name).matches()) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Rename all SNS children which are not allowed under the given parent.
+     */
+    private void renameSameNamedChildren(NodeBuilder parent) {
+        NodeState parentNode = parent.getNodeState();
+        Map<String, String> toBeRenamed = new HashMap<String, String>();
+        for (String name : parent.getChildNodeNames()) {
+            Matcher m = SNS_REGEX.matcher(name);
+            if (!m.matches()) {
+                continue;
+            } else if (isSnsAllowedForChild(parentNode, name)) {
+                continue;
+            }
+            String prefix = m.group(1);
+            String index = m.group(2);
+            toBeRenamed.put(name, createNewName(parentNode, prefix, index));
+        }
+        for (Entry<String, String> e : toBeRenamed.entrySet()) {
+            logger.warn("Renaming SNS {}/{} to {}", path, e.getKey(), 
e.getValue());
+            parent.getChildNode(e.getKey()).moveTo(parent, e.getValue());
+        }
+    }
+
+    /**
+     * Check if SNS with given name is allowed under the given parent using 
the {@link #childrenDefsWithoutSns} list.
+     */
+    private boolean isSnsAllowedForChild(NodeState parent, String name) {
+        for (ChildTypeDef snsDef : childrenDefsWithoutSns) {
+            if (snsDef.applies(parent, name)) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    /**
+     * Create new name for the conflicting SNS node. This method makes sure 
that
+     * no node with this name already exists.
+     *
+     * @param prefix prefix of the new name, eg. <b>my_name</b>[3]
+     * @param index SNS index, eg. my_name[<b>3</b>]
+     * @param parent of the SNS node
+     * @return new and unused name for the node
+     */
+    private String createNewName(NodeState parent, String prefix, String 
index) {
+        String newName;
+        int i = 1;
+        do {
+            if (i == 1) {
+                newName = String.format("%s_%s_", prefix, index);
+            } else {
+                newName = String.format("%s_%s_%d", prefix, index, i);
+            }
+            i++;
+        } while (parent.getChildNode(newName).exists());
+        return newName;
+    }
+
+    /**
+     * Definition of a children type. It contains the parent type, the child
+     * type and an optional child name.
+     */
+    private static class ChildTypeDef {
+
+        private final TypePredicate parentType;
+
+        private final String childNameConstraint;
+
+        private final TypePredicate childType;
+
+        public ChildTypeDef(TypePredicate parentType, String childName, 
TypePredicate childType) {
+            this.parentType = parentType;
+            this.childNameConstraint = childName;
+            this.childType = childType;
+        }
+
+        public ChildTypeDef(TypePredicate parentType, TypePredicate childType) 
{
+            this(parentType, null, childType);
+        }
+
+        public boolean applies(NodeState parent, String childName) {
+            boolean result = true;
+            result &= parentType.apply(parent);
+            result &= childNameConstraint == null || 
childName.startsWith(this.childNameConstraint + '[');
+            result &= childType.apply(parent.getChildNode(childName));
+            return result;
+        }
+
+        @Override
+        public String toString() {
+            StringBuilder result = new StringBuilder();
+            result.append(parentType.toString()).append(" > ");
+            if (childNameConstraint == null) {
+                result.append("*");
+            } else {
+                result.append(childNameConstraint);
+            }
+            result.append(childType.toString());
+            return result.toString();
+        }
+    }
+}

Modified: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java?rev=1792995&r1=1792994&r2=1792995&view=diff
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java
 (original)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java
 Fri Apr 28 07:18:26 2017
@@ -57,27 +57,26 @@ import static com.google.common.base.Pre
 /**
  * A DelegatingDataStore can avoid performing expensive file system access by 
making
  * use of pre computed data related to files in DataStore.
- * <p/>
- * <p>During repository migration actual blob content is not accessed and 
instead
+ * <p>
+ * During repository migration actual blob content is not accessed and instead
  * only the blob length and blob references are accessed. DelegatingDataStore 
can be
  * configured with a mapping file which would be used to determine the length 
of given
- * blob reference.</p>
- * <p/>
+ * blob reference.
+ * <p>
  * Mapping file format
- * <pre><![CDATA[
+ * <pre>{@code
  *     #< length >| < identifier >
  *     4432|dd10bca036f3134352c63e534d4568a3d2ac2fdc
  *     32167|dd10bca036f3134567c63e534d4568a3d2ac2fdc
- * ]]></pre>
- * <p/>
+ * }</pre>
+ * <p>
  * The Configuration:
- * <p/>
- * <pre><![CDATA[
+ * <pre>{@code
  *  <DataStore 
class="org.apache.jackrabbit.oak.upgrade.blob.LengthCachingDataStore">
  *      <param name="mappingFilePath" value="/path/to/mapping/file" />
  *      <param name="delegateClass" 
value="org.apache.jackrabbit.core.data.FileDataStore" />
  *  </DataStore>
- * ]]></pre>
+ * }</pre>
  */
 public class LengthCachingDataStore extends AbstractDataStore {
     private static final Logger log = 
LoggerFactory.getLogger(LengthCachingDataStore.class);
@@ -210,6 +209,7 @@ public class LengthCachingDataStore exte
             this.mapping = recordSizeMapping;
         }
 
+        @Override
         public long getLength() throws DataStoreException {
             Long size = mapping.get(getIdentifier().toString());
             if (size == null) {
@@ -291,7 +291,7 @@ public class LengthCachingDataStore exte
             InputStream is = null;
             try {
                 Properties props = new Properties();
-                is = Files.newInputStreamSupplier(configFile).getInput();
+                is = Files.asByteSource(configFile).openStream();
                 props.load(is);
                 PropertiesUtil.populate(delegate, propsToMap(props), false);
                 log.info("Configured the delegating DataStore via {}", 
configFile.getAbsolutePath());

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.checkpoint;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import org.apache.jackrabbit.oak.plugins.document.DocumentCheckpointRetriever;
+import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore;
+import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.apache.jackrabbit.oak.spi.state.NodeStore;
+import org.apache.jackrabbit.oak.upgrade.cli.node.TarNodeStore;
+
+import javax.annotation.Nullable;
+import java.util.Collections;
+import java.util.List;
+
+public final class CheckpointRetriever {
+
+    public static class Checkpoint implements Comparable<Checkpoint> {
+
+        private final String name;
+
+        private final long created;
+
+        private final long expiryTime;
+
+        public Checkpoint(String name, long created, long expiryTime) {
+            this.name = name;
+            this.created = created;
+            this.expiryTime = expiryTime;
+        }
+
+        public static Checkpoint createFromSegmentNode(String name, NodeState 
node) {
+            return new Checkpoint(name, node.getLong("created"), 
node.getLong("timestamp"));
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public long getExpiryTime() {
+            return expiryTime;
+        }
+
+        @Override
+        public int compareTo(Checkpoint o) {
+            return compare(this.created, o.created);
+        }
+
+        private static int compare(long x, long y) {
+            return (x < y) ? -1 : ((x == y) ? 0 : 1);
+        }
+    }
+
+    private CheckpointRetriever() {
+    }
+
+    public static List<Checkpoint> getCheckpoints(NodeStore nodeStore) {
+        List<Checkpoint> result;
+        if (nodeStore instanceof TarNodeStore) {
+            result = getCheckpoints((TarNodeStore) nodeStore);
+        } else if (nodeStore instanceof DocumentNodeStore) {
+            result = 
DocumentCheckpointRetriever.getCheckpoints((DocumentNodeStore) nodeStore);
+        } else {
+            result = Collections.emptyList();
+        }
+        Collections.sort(result);
+        return result;
+    }
+
+    private static List<Checkpoint> getCheckpoints(TarNodeStore nodeStore) {
+        return 
Lists.newArrayList(Iterables.transform(nodeStore.getSuperRoot().getChildNode("checkpoints").getChildNodeEntries(),
 new Function<ChildNodeEntry, Checkpoint>() {
+            @Nullable
+            @Override
+            public Checkpoint apply(@Nullable ChildNodeEntry input) {
+                return Checkpoint.createFromSegmentNode(input.getName(), 
input.getNodeState());
+            }
+        }));
+    }
+}

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/CliUtils.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/CliUtils.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/CliUtils.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/CliUtils.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.commons.io.IOUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.io.Closer;
+
+public class CliUtils {
+
+    private static final Logger log = 
LoggerFactory.getLogger(OakUpgrade.class);
+
+    public static void displayUsage() throws IOException {
+        System.out.println(getUsage().replace("${command}", "java -jar 
oak-run-*-jr2.jar upgrade"));
+    }
+
+    public static String getUsage() throws IOException {
+        InputStream is = 
CliUtils.class.getClassLoader().getResourceAsStream("upgrade_usage.txt");
+        try {
+            return IOUtils.toString(is);
+        } finally {
+            IOUtils.closeQuietly(is);
+        }
+    }
+
+    public static void handleSigInt(final Closer closer) {
+        Runtime.getRuntime().addShutdownHook(new Thread() {
+            @Override
+            public void run() {
+                try {
+                    closer.close();
+                } catch (IOException e) {
+                    log.error("Can't close", e);
+                }
+            }
+        });
+    }
+}

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/MigrationFactory.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/MigrationFactory.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/MigrationFactory.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/MigrationFactory.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.ServiceLoader;
+
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.core.RepositoryContext;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+import org.apache.jackrabbit.oak.spi.commit.CommitHook;
+import org.apache.jackrabbit.oak.spi.state.NodeStore;
+import org.apache.jackrabbit.oak.upgrade.RepositorySidegrade;
+import org.apache.jackrabbit.oak.upgrade.RepositoryUpgrade;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.CliArgumentException;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.DatastoreArguments;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.MigrationOptions;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.StoreArguments;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.io.Closer;
+
+public class MigrationFactory {
+
+    protected final MigrationOptions options;
+
+    protected final StoreArguments stores;
+
+    protected final DatastoreArguments datastores;
+
+    protected final Closer closer;
+
+    public MigrationFactory(MigrationOptions options, StoreArguments stores, 
DatastoreArguments datastores, Closer closer) {
+        this.options = options;
+        this.stores = stores;
+        this.datastores = datastores;
+        this.closer = closer;
+    }
+
+    public RepositoryUpgrade createUpgrade() throws IOException, 
RepositoryException, CliArgumentException {
+        RepositoryContext src = stores.getSrcStore().create(closer);
+        BlobStore srcBlobStore = new DataStoreBlobStore(src.getDataStore());
+        NodeStore dstStore = createTarget(closer, srcBlobStore);
+        return createUpgrade(src, dstStore);
+    }
+
+    public RepositorySidegrade createSidegrade() throws IOException, 
CliArgumentException {
+        BlobStore srcBlobStore = datastores.getSrcBlobStore().create(closer);
+        NodeStore srcStore = stores.getSrcStore().create(srcBlobStore, closer);
+        NodeStore dstStore = createTarget(closer, srcBlobStore);
+        return createSidegrade(srcStore, dstStore);
+    }
+
+    protected NodeStore createTarget(Closer closer, BlobStore srcBlobStore) 
throws IOException {
+        BlobStore dstBlobStore = 
datastores.getDstBlobStore(srcBlobStore).create(closer);
+        NodeStore dstStore = stores.getDstStore().create(dstBlobStore, closer);
+        return dstStore;
+    }
+
+    protected RepositoryUpgrade createUpgrade(RepositoryContext source, 
NodeStore dstStore) {
+        RepositoryUpgrade upgrade = new RepositoryUpgrade(source, dstStore);
+        upgrade.setCopyBinariesByReference(datastores.getBlobMigrationCase() 
== DatastoreArguments.BlobMigrationCase.COPY_REFERENCES);
+        upgrade.setCopyVersions(options.getCopyVersions());
+        upgrade.setCopyOrphanedVersions(options.getCopyOrphanedVersions());
+        if (options.getIncludePaths() != null) {
+            upgrade.setIncludes(options.getIncludePaths());
+        }
+        if (options.getExcludePaths() != null) {
+            upgrade.setExcludes(options.getExcludePaths());
+        }
+        if (options.getMergePaths() != null) {
+            upgrade.setMerges(options.getMergePaths());
+        }
+        upgrade.setFilterLongNames(!stores.getDstType().isSupportLongNames());
+        upgrade.setCheckLongNames(!options.isSkipNameCheck() && 
!stores.getDstType().isSupportLongNames());
+        upgrade.setSkipOnError(!options.isFailOnError());
+        upgrade.setEarlyShutdown(options.isEarlyShutdown());
+        upgrade.setSkipInitialization(options.isSkipInitialization());
+        ServiceLoader<CommitHook> loader = 
ServiceLoader.load(CommitHook.class);
+        Iterator<CommitHook> iterator = loader.iterator();
+        ImmutableList.Builder<CommitHook> builder = ImmutableList.<CommitHook> 
builder().addAll(iterator);
+        upgrade.setCustomCommitHooks(builder.build());
+        return upgrade;
+    }
+
+    private RepositorySidegrade createSidegrade(NodeStore srcStore, NodeStore 
dstStore) {
+        RepositorySidegrade sidegrade = new RepositorySidegrade(srcStore, 
dstStore);
+        sidegrade.setCopyVersions(options.getCopyVersions());
+        sidegrade.setCopyOrphanedVersions(options.getCopyOrphanedVersions());
+        if (options.getIncludePaths() != null) {
+            sidegrade.setIncludes(options.getIncludePaths());
+        }
+        if (options.getExcludePaths() != null) {
+            sidegrade.setExcludes(options.getExcludePaths());
+        }
+        if (options.getMergePaths() != null) {
+            sidegrade.setMerges(options.getMergePaths());
+        }
+        sidegrade.setFilterLongNames(stores.getSrcType().isSupportLongNames() 
&& !stores.getDstType().isSupportLongNames());
+        sidegrade.setVerify(options.isVerify());
+        sidegrade.setOnlyVerify(options.isOnlyVerify());
+        sidegrade.setSkipCheckpoints(options.isSkipCheckpoints());
+        return sidegrade;
+    }
+
+}

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/OakUpgrade.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/OakUpgrade.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/OakUpgrade.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/OakUpgrade.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.ServiceLoader;
+
+import javax.jcr.RepositoryException;
+
+import com.google.common.collect.Lists;
+import com.google.common.io.Closer;
+
+import joptsimple.OptionSet;
+import org.apache.jackrabbit.oak.spi.lifecycle.CompositeInitializer;
+import org.apache.jackrabbit.oak.spi.lifecycle.RepositoryInitializer;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.CliArgumentException;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.DatastoreArguments;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.MigrationCliArguments;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.MigrationOptions;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory;
+import org.apache.jackrabbit.oak.upgrade.cli.parser.StoreArguments;
+
+public class OakUpgrade {
+
+    public static void main(String... args) throws IOException {
+        OptionSet options = OptionParserFactory.create().parse(args);
+        try {
+            MigrationCliArguments cliArguments = new 
MigrationCliArguments(options);
+            if (cliArguments.hasOption(OptionParserFactory.HELP) || 
cliArguments.getArguments().isEmpty()) {
+                CliUtils.displayUsage();
+                return;
+            }
+            migrate(cliArguments);
+        } catch(CliArgumentException e) {
+            if (e.getMessage() != null) {
+                System.err.println(e.getMessage());
+            }
+            System.exit(e.getExitCode());
+        }
+    }
+
+    public static void migrate(MigrationCliArguments argumentParser) throws 
IOException, CliArgumentException {
+        MigrationOptions options = new MigrationOptions(argumentParser);
+        options.logOptions();
+
+        StoreArguments stores = new StoreArguments(options, 
argumentParser.getArguments());
+        stores.logOptions();
+
+        boolean srcEmbedded = stores.srcUsesEmbeddedDatastore();
+        DatastoreArguments datastores = new DatastoreArguments(options, 
stores, srcEmbedded);
+
+        migrate(options, stores, datastores);
+    }
+
+    public static void migrate(MigrationOptions options, StoreArguments 
stores, DatastoreArguments datastores) throws IOException, CliArgumentException 
{
+        Closer closer = Closer.create();
+        CliUtils.handleSigInt(closer);
+        MigrationFactory factory = new MigrationFactory(options, stores, 
datastores, closer);
+        try {
+            if (stores.getSrcStore().isJcr2()) {
+                upgrade(factory);
+            } else {
+                sidegrade(factory);
+            }
+        } catch (Throwable t) {
+            throw closer.rethrow(t);
+        } finally {
+            closer.close();
+        }
+    }
+
+    private static void upgrade(MigrationFactory migrationFactory) throws 
IOException, RepositoryException, CliArgumentException {
+        migrationFactory.createUpgrade().copy(createCompositeInitializer());
+    }
+
+    private static void sidegrade(MigrationFactory migrationFactory) throws 
IOException, RepositoryException, CliArgumentException {
+        migrationFactory.createSidegrade().copy();
+    }
+
+    private static RepositoryInitializer createCompositeInitializer() {
+        ServiceLoader<RepositoryInitializer> loader = 
ServiceLoader.load(RepositoryInitializer.class);
+        List<RepositoryInitializer> initializers = 
Lists.newArrayList(loader.iterator());
+        return new CompositeInitializer(initializers);
+    }
+
+}
\ No newline at end of file

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/BlobStoreFactory.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/BlobStoreFactory.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/BlobStoreFactory.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/BlobStoreFactory.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import java.io.IOException;
+
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+
+import com.google.common.io.Closer;
+
+public interface BlobStoreFactory {
+    BlobStore create(Closer closer) throws IOException;
+}

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/ConstantBlobStoreFactory.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/ConstantBlobStoreFactory.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/ConstantBlobStoreFactory.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/ConstantBlobStoreFactory.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import com.google.common.io.Closer;
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+
+import java.io.IOException;
+
+public class ConstantBlobStoreFactory implements BlobStoreFactory {
+
+    private final BlobStore blobStore;
+
+    public ConstantBlobStoreFactory(BlobStore blobStore) {
+        this.blobStore = blobStore;
+    }
+
+    @Override
+    public BlobStore create(Closer closer) throws IOException {
+        return blobStore;
+    }
+}

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/DummyBlobStoreFactory.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/DummyBlobStoreFactory.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/DummyBlobStoreFactory.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/DummyBlobStoreFactory.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+
+import com.google.common.io.Closer;
+
+public class DummyBlobStoreFactory implements BlobStoreFactory {
+
+    @Override
+    public BlobStore create(Closer closer) {
+        return null;
+    }
+
+    @Override
+    public String toString() {
+        return "DummyBlobStore";
+    }
+}

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FileBlobStoreFactory.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FileBlobStoreFactory.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FileBlobStoreFactory.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FileBlobStoreFactory.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+import org.apache.jackrabbit.oak.spi.blob.FileBlobStore;
+
+import com.google.common.io.Closer;
+
+public class FileBlobStoreFactory implements BlobStoreFactory {
+
+    private final String directory;
+
+    public FileBlobStoreFactory(String directory) {
+        this.directory = directory;
+    }
+
+    @Override
+    public BlobStore create(Closer closer) {
+        return new FileBlobStore(directory);
+    }
+
+    @Override
+    public String toString() {
+        return String.format("FileBlobStore[%s]", directory);
+    }
+}

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FileDataStoreFactory.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FileDataStoreFactory.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FileDataStoreFactory.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FileDataStoreFactory.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+import org.apache.jackrabbit.core.data.FileDataStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStore;
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+
+import com.google.common.io.Closer;
+
+public class FileDataStoreFactory implements BlobStoreFactory {
+
+    private final String directory;
+
+    private final boolean ignoreMissingBlobs;
+
+    public FileDataStoreFactory(String directory, boolean ignoreMissingBlobs) {
+        this.directory = directory;
+        this.ignoreMissingBlobs = ignoreMissingBlobs;
+    }
+
+    @Override
+    public BlobStore create(Closer closer) {
+        OakFileDataStore delegate = new OakFileDataStore();
+        delegate.setPath(directory);
+        delegate.init(null);
+        closer.register(asCloseable(delegate));
+
+        if (ignoreMissingBlobs) {
+            return new SafeDataStoreBlobStore(delegate);
+        } else {
+            return new DataStoreBlobStore(delegate);
+        }
+    }
+
+    private static Closeable asCloseable(final FileDataStore store) {
+        return new Closeable() {
+            @Override
+            public void close() throws IOException {
+                store.close();
+            }
+        };
+    }
+
+    @Override
+    public String toString() {
+        return String.format("FileDataStore[%s]", directory);
+    }
+}

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStore.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStore.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStore.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStore.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+
+/**
+ * Utility BlobStore implementation to be used in tooling that can work with a
+ * FileStore without the need of the DataStore being present locally
+ */
+public class MissingBlobStore implements BlobStore {
+
+    @Override
+    public String writeBlob(InputStream in) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public int readBlob(String blobId, long pos, byte[] buff, int off,
+            int length) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public long getBlobLength(String blobId) throws IOException {
+        // best effort length extraction
+        int indexOfSep = blobId.lastIndexOf("#");
+        if (indexOfSep != -1) {
+            return Long.valueOf(blobId.substring(indexOfSep + 1));
+        }
+        return -1;
+    }
+
+    @Override
+    public InputStream getInputStream(String blobId) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public String getBlobId(String reference) {
+        return reference;
+    }
+
+    @Override
+    public String getReference(String blobId) {
+        return blobId;
+    }
+}

Added: 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreFactory.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreFactory.java?rev=1792995&view=auto
==============================================================================
--- 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreFactory.java
 (added)
+++ 
jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreFactory.java
 Fri Apr 28 07:18:26 2017
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+
+import com.google.common.io.Closer;
+
+public class MissingBlobStoreFactory implements BlobStoreFactory {
+
+    @Override
+    public BlobStore create(Closer closer) {
+        return new MissingBlobStore();
+    }
+
+    @Override
+    public String toString() {
+        return "MissingBlobStore";
+    }
+}


Reply via email to