Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/S3DataStoreFactory.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/S3DataStoreFactory.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/S3DataStoreFactory.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/S3DataStoreFactory.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.blob; + +import java.io.Closeable; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.HashSet; +import java.util.Properties; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.jcr.RepositoryException; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.jackrabbit.aws.ext.ds.S3DataStore; +import org.apache.jackrabbit.core.data.CachingDataStore; +import org.apache.jackrabbit.core.data.DataStoreException; +import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.io.Closer; +import com.google.common.io.Files; + +public class S3DataStoreFactory implements BlobStoreFactory { + + private static final Logger log = LoggerFactory.getLogger(S3DataStoreFactory.class); + + private static final Pattern STRIP_VALUE_PATTERN = Pattern.compile("^[TILFDXSCB]?\"(.*)\"\\W*$"); + + private final Properties props; + + private final String directory; + + private final File tempHomeDir; + + private final boolean ignoreMissingBlobs; + + public S3DataStoreFactory(String configuration, String directory, boolean ignoreMissingBlobs) throws IOException { + this.props = new Properties(); + FileReader reader = new FileReader(new File(configuration)); + try { + props.load(reader); + } finally { + IOUtils.closeQuietly(reader); + } + + for (Object key : new HashSet<Object>(props.keySet())) { + String value = props.getProperty((String) key); + props.put(key, stripValue(value)); + } + + this.directory = directory; + this.tempHomeDir = Files.createTempDir(); + this.ignoreMissingBlobs = ignoreMissingBlobs; + } + + @Override + public BlobStore create(Closer closer) throws IOException { + S3DataStore delegate = new S3DataStore(); + delegate.setProperties(props); + delegate.setPath(directory); + try { + delegate.init(tempHomeDir.getPath()); + } catch (RepositoryException e) { + throw new IOException(e); + } + closer.register(asCloseable(delegate, tempHomeDir)); + if (ignoreMissingBlobs) { + return new SafeDataStoreBlobStore(delegate); + } else { + return new DataStoreBlobStore(delegate); + } + } + + private static Closeable asCloseable(final CachingDataStore store, final File tempHomeDir) { + return new Closeable() { + @Override + public void close() throws IOException { + try { + while (!store.getPendingUploads().isEmpty()) { + log.info("Waiting for following uploads to finish: " + store.getPendingUploads()); + Thread.sleep(1000); + } + store.close(); + FileUtils.deleteDirectory(tempHomeDir); + } catch (DataStoreException e) { + throw new IOException(e); + } catch (InterruptedException e) { + throw new IOException(e); + } + } + }; + } + + static String stripValue(String value) { + Matcher matcher = STRIP_VALUE_PATTERN.matcher(value); + if (matcher.matches()) { + return matcher.group(1); + } else { + return value; + } + } + + @Override + public String toString() { + return String.format("S3DataStore[%s]", directory); + } +}
Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/SafeDataStoreBlobStore.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/SafeDataStoreBlobStore.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/SafeDataStoreBlobStore.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/SafeDataStoreBlobStore.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.blob; + +import org.apache.jackrabbit.core.data.DataIdentifier; +import org.apache.jackrabbit.core.data.DataRecord; +import org.apache.jackrabbit.core.data.DataStore; +import org.apache.jackrabbit.core.data.DataStoreException; +import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; +import org.apache.jackrabbit.oak.plugins.blob.datastore.InMemoryDataRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nonnull; +import java.io.BufferedInputStream; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; + +import static com.google.common.base.Preconditions.checkNotNull; + +/** + * This implementation of the DataStoreBlobStore won't throw an exception if + * it can't find blob with given id. The WARN message will be emitted instead + * and the empty InputStream will be returned. + */ +public class SafeDataStoreBlobStore extends DataStoreBlobStore { + + private static final Logger log = LoggerFactory.getLogger(SafeDataStoreBlobStore.class); + + public SafeDataStoreBlobStore(DataStore delegate) { + super(delegate); + } + + @Override + public String getReference(@Nonnull String encodedBlobId) { + checkNotNull(encodedBlobId); + String blobId = extractBlobId(encodedBlobId); + //Reference are not created for in memory record + if (InMemoryDataRecord.isInstance(blobId)) { + return null; + } + + DataRecord record; + try { + record = delegate.getRecordIfStored(new DataIdentifier(blobId)); + if (record != null) { + return record.getReference(); + } else { + log.debug("No blob found for id [{}]", blobId); + } + } catch (DataStoreException e) { + log.warn("Unable to access the blobId for [{}]", blobId, e); + } + return null; + } + + + @Override + protected InputStream getStream(String blobId) throws IOException { + try { + DataRecord record = getDataRecord(blobId); + if (record == null) { + log.warn("No blob found for id [{}]", blobId); + return new ByteArrayInputStream(new byte[0]); + } + InputStream in = getDataRecord(blobId).getStream(); + if (!(in instanceof BufferedInputStream)){ + in = new BufferedInputStream(in); + } + return in; + } catch (DataStoreException e) { + throw new IOException(e); + } + } + + @Override + protected DataRecord getDataRecord(String blobId) throws DataStoreException { + DataRecord id; + if (InMemoryDataRecord.isInstance(blobId)) { + id = InMemoryDataRecord.getInstance(blobId); + } else { + id = delegate.getRecordIfStored(new DataIdentifier(blobId)); + } + return id; + } +} \ No newline at end of file Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/Jackrabbit2Factory.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/Jackrabbit2Factory.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/Jackrabbit2Factory.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/Jackrabbit2Factory.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.node; + +import java.io.BufferedReader; +import java.io.Closeable; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; + +import javax.jcr.RepositoryException; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.jackrabbit.core.RepositoryContext; +import org.apache.jackrabbit.core.config.RepositoryConfig; + +import com.google.common.io.Closer; + +public class Jackrabbit2Factory { + + private final File repositoryDir; + + private final File repositoryFile; + + public Jackrabbit2Factory(String repositoryDir, String repositoryFile) { + if (!isJcr2Repository(repositoryDir)) { + throw new IllegalArgumentException("Repository directory not found: " + repositoryDir); + } + this.repositoryDir = new File(repositoryDir); + this.repositoryFile = new File(repositoryFile); + if (!isRepositoryXml(repositoryFile)) { + throw new IllegalArgumentException("Repository configuration not found: " + repositoryFile); + } + } + + public RepositoryContext create(Closer closer) throws RepositoryException { + RepositoryContext source = RepositoryContext.create(RepositoryConfig.create(repositoryFile, repositoryDir)); + closer.register(asCloseable(source)); + return source; + } + + public File getRepositoryDir() { + return repositoryDir; + } + + private static Closeable asCloseable(final RepositoryContext context) { + return new Closeable() { + @Override + public void close() throws IOException { + context.getRepository().shutdown(); + } + }; + } + + public static boolean isRepositoryXml(String path) { + File file = new File(path); + if (file.isFile()) { + BufferedReader reader = null; + try { + reader = new BufferedReader(new FileReader(file)); + String line; + while ((line = reader.readLine()) != null) { + if (StringUtils.containsIgnoreCase(line, "<Repository>")) { + return true; + } + } + } catch (FileNotFoundException e) { + return false; + } catch (IOException e) { + return false; + } finally { + IOUtils.closeQuietly(reader); + } + } + return false; + } + + public static boolean isJcr2Repository(String directory) { + File dir = new File(directory); + if (!dir.isDirectory()) { + return false; + } + File workspaces = new File(dir, "workspaces"); + return workspaces.isDirectory(); + } + + @Override + public String toString() { + return String.format("JCR2[%s, %s]", repositoryDir, repositoryFile); + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/JdbcFactory.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/JdbcFactory.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/JdbcFactory.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/JdbcFactory.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.node; + +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; +import org.apache.jackrabbit.oak.plugins.document.rdb.RDBBlobStore; +import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDataSourceFactory; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.io.Closer; + +import javax.sql.DataSource; +import java.io.Closeable; +import java.io.IOException; + +public class JdbcFactory implements NodeStoreFactory { + + private static final Logger log = LoggerFactory.getLogger(JdbcFactory.class); + + private final String jdbcUri; + + private final int cacheSize; + + private final String user; + + private final String password; + + private final boolean readOnly; + + public JdbcFactory(String jdbcUri, int cacheSize, String user, String password, boolean readOnly) { + this.jdbcUri = jdbcUri; + this.cacheSize = cacheSize; + if (user == null || password == null) { + throw new IllegalArgumentException("RBD requires username and password parameters."); + } + this.user = user; + this.password = password; + this.readOnly = readOnly; + } + + @Override + public NodeStore create(BlobStore blobStore, Closer closer) { + DocumentMK.Builder builder = MongoFactory.getBuilder(cacheSize); + if (blobStore != null) { + builder.setBlobStore(blobStore); + } + builder.setRDBConnection(getDataSource(closer)); + if (readOnly) { + log.warn("Read-only mode for the DocumentMK is not available in 1.4"); + } + log.info("Initialized DocumentNodeStore on RDB with Cache size : {} MB, Fast migration : {}", cacheSize, + builder.isDisableBranches()); + DocumentNodeStore documentNodeStore = builder.getNodeStore(); + closer.register(MongoFactory.asCloseable(documentNodeStore)); + return documentNodeStore; + } + + private DataSource getDataSource(Closer closer) { + DataSource ds = RDBDataSourceFactory.forJdbcUrl(jdbcUri, user, password); + if (ds instanceof Closeable) { + closer.register((Closeable)ds); + } + return ds; + } + + @Override + public boolean hasExternalBlobReferences() throws IOException { + Closer closer = Closer.create(); + try { + DataSource ds = getDataSource(closer); + RDBBlobStore blobStore = new RDBBlobStore(ds); + return !blobStore.getAllChunkIds(0).hasNext(); + } catch(Throwable e) { + throw closer.rethrow(e); + } finally { + closer.close(); + } + } + + @Override + public String toString() { + return String.format("DocumentNodeStore[%s]", jdbcUri); + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/MongoFactory.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/MongoFactory.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/MongoFactory.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/MongoFactory.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.node; + +import com.mongodb.DB; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; +import org.apache.jackrabbit.oak.plugins.document.mongo.MongoBlobStore; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.state.NodeStore; + +import com.google.common.io.Closer; +import com.mongodb.MongoClient; +import com.mongodb.MongoClientURI; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.Closeable; +import java.io.IOException; +import java.net.UnknownHostException; + +public class MongoFactory implements NodeStoreFactory { + + private static final Logger log = LoggerFactory.getLogger(MongoFactory.class); + + private static final long MB = 1024 * 1024; + + private final MongoClientURI uri; + + private final int cacheSize; + + private final boolean readOnly; + + public MongoFactory(String repoDesc, int cacheSize, boolean readOnly) { + this.uri = new MongoClientURI(repoDesc); + this.cacheSize = cacheSize; + this.readOnly = readOnly; + } + + @Override + public NodeStore create(BlobStore blobStore, Closer closer) throws UnknownHostException { + DocumentMK.Builder builder = getBuilder(cacheSize); + builder.setMongoDB(getDB(closer)); + if (blobStore != null) { + builder.setBlobStore(blobStore); + } + if (readOnly) { + log.warn("Read-only mode for the DocumentMK is not available in 1.4"); + } + DocumentNodeStore documentNodeStore = builder.getNodeStore(); + closer.register(asCloseable(documentNodeStore)); + return documentNodeStore; + } + + private DB getDB(Closer closer) throws UnknownHostException { + String db; + if (uri.getDatabase() == null) { + db = "aem-author"; // assume an author instance + } else { + db = uri.getDatabase(); + } + MongoClient client = new MongoClient(uri); + closer.register(asCloseable(client)); + return client.getDB(db); + } + + @Override + public boolean hasExternalBlobReferences() throws IOException { + Closer closer = Closer.create(); + try { + MongoBlobStore mongoBlobStore = new MongoBlobStore(getDB(closer)); + return !mongoBlobStore.getAllChunkIds(0).hasNext(); + } catch(Throwable e) { + throw closer.rethrow(e); + } finally { + closer.close(); + } + } + + static Closeable asCloseable(final DocumentNodeStore documentNodeStore) { + return new Closeable() { + @Override + public void close() throws IOException { + documentNodeStore.dispose(); + } + }; + } + + private static Closeable asCloseable(final MongoClient client) { + return new Closeable() { + @Override + public void close() throws IOException { + client.close(); + } + }; + } + + static DocumentMK.Builder getBuilder(int cacheSize) { + boolean fastMigration = !Boolean.getBoolean("mongomk.disableFastMigration"); + DocumentMK.Builder builder = new DocumentMK.Builder(); + builder.memoryCacheSize(cacheSize * MB); + if (fastMigration) { + builder.disableBranches(); + } + return builder; + } + + @Override + public String toString() { + return String.format("DocumentNodeStore[%s]", uri.toString()); + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/NodeStoreFactory.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/NodeStoreFactory.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/NodeStoreFactory.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/NodeStoreFactory.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.node; + +import java.io.IOException; + +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.state.NodeStore; + +import com.google.common.io.Closer; + +public interface NodeStoreFactory { + + NodeStore create(BlobStore blobStore, Closer closer) throws IOException; + + boolean hasExternalBlobReferences() throws IOException; +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentFactory.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentFactory.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentFactory.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentFactory.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.node; + +import java.io.Closeable; +import java.io.File; +import java.io.IOException; + +import org.apache.jackrabbit.oak.plugins.blob.ReferenceCollector; +import org.apache.jackrabbit.oak.plugins.segment.SegmentNodeStore; +import org.apache.jackrabbit.oak.plugins.segment.file.FileStore; +import org.apache.jackrabbit.oak.plugins.segment.file.FileStore.Builder; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.state.NodeState; +import org.apache.jackrabbit.oak.spi.state.NodeStore; + +import com.google.common.io.Closer; + +import javax.annotation.Nullable; + +public class SegmentFactory implements NodeStoreFactory { + + private final File dir; + + private final boolean disableMmap; + + private final boolean readOnly; + + public SegmentFactory(String directory, boolean disableMmap, boolean readOnly) { + this.dir = new File(directory); + this.disableMmap = disableMmap; + this.readOnly = readOnly; + createDirectoryIfMissing(dir); + if (!dir.isDirectory()) { + throw new IllegalArgumentException("Not a directory: " + dir.getPath()); + } + } + + private void createDirectoryIfMissing(File directory) { + if (!directory.exists()) { + directory.mkdirs(); + } + } + + @Override + public NodeStore create(BlobStore blobStore, Closer closer) throws IOException { + File directory = new File(dir, "segmentstore"); + boolean mmapEnabled = !disableMmap && "64".equals(System.getProperty("sun.arch.data.model", "32")); + + final FileStore fs; + Builder builder; + if (readOnly) { + builder = FileStore.ReadOnlyStore.newFileStore(directory); + } else { + builder = FileStore.newFileStore(directory); + } + + if (blobStore != null) { + builder.withBlobStore(blobStore); + } + builder.withMaxFileSize(256); + builder.withMemoryMapping(mmapEnabled); + fs = builder.create(); + + closer.register(asCloseable(fs)); + + return new TarNodeStore(new SegmentNodeStore(fs), new TarNodeStore.SuperRootProvider() { + @Override + public NodeState getSuperRoot() { + return fs.getHead(); + } + }); + } + + @Override + public boolean hasExternalBlobReferences() throws IOException { + FileStore fs = FileStore.ReadOnlyStore.newFileStore(new File(dir, "segmentstore")).create(); + try { + fs.getTracker().collectBlobReferences(new ReferenceCollector() { + @Override + public void addReference(String reference, @Nullable String nodeId) { + // FIXME the collector should allow to stop processing + // see java.nio.file.FileVisitor + throw new ExternalBlobFound(); + } + }); + return false; + } catch (ExternalBlobFound e) { + return true; + } finally { + fs.close(); + } + } + + public File getRepositoryDir() { + return dir; + } + + private static Closeable asCloseable(final FileStore fs) { + return new Closeable() { + @Override + public void close() throws IOException { + fs.close(); + } + }; + } + + @Override + public String toString() { + return String.format("SegmentNodeStore[%s]", dir); + } + + private static class ExternalBlobFound extends RuntimeException { + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/StoreFactory.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/StoreFactory.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/StoreFactory.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/StoreFactory.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.node; + +import java.io.IOException; + +import javax.jcr.RepositoryException; + +import org.apache.jackrabbit.core.RepositoryContext; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.state.NodeStore; + +import com.google.common.io.Closer; + +public class StoreFactory { + + private final Jackrabbit2Factory jcr2Factory; + + private final NodeStoreFactory nodeStoreFactory; + + public StoreFactory(Jackrabbit2Factory crx2Factory) { + this.jcr2Factory = crx2Factory; + this.nodeStoreFactory = null; + } + + public StoreFactory(NodeStoreFactory nodeStoreFactory) { + this.jcr2Factory = null; + this.nodeStoreFactory = nodeStoreFactory; + } + + public RepositoryContext create(Closer closer) throws IOException, RepositoryException { + if (jcr2Factory == null) { + throw new UnsupportedOperationException(); + } + return jcr2Factory.create(closer); + } + + public NodeStore create(BlobStore blobStore, Closer closer) throws IOException { + if (nodeStoreFactory == null) { + throw new UnsupportedOperationException(); + } + return nodeStoreFactory.create(blobStore, closer); + } + + public boolean isJcr2() { + return jcr2Factory != null; + } + + public boolean hasExternalBlobReferences() throws IOException { + if (isJcr2()) { + return true; + } else { + return nodeStoreFactory.hasExternalBlobReferences(); + } + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/TarNodeStore.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/TarNodeStore.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/TarNodeStore.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/TarNodeStore.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.node; + +import org.apache.jackrabbit.oak.spi.state.NodeState; +import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.apache.jackrabbit.oak.spi.state.ProxyNodeStore; + +public class TarNodeStore extends ProxyNodeStore { + + private final NodeStore ns; + + private final SuperRootProvider superRootProvider; + + public TarNodeStore(NodeStore ns, SuperRootProvider superRootProvider) { + this.ns = ns; + this.superRootProvider = superRootProvider; + } + + public NodeState getSuperRoot() { + return superRootProvider.getSuperRoot(); + } + + @Override + protected NodeStore getNodeStore() { + return ns; + } + + public interface SuperRootProvider { + + NodeState getSuperRoot(); + + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/CliArgumentException.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/CliArgumentException.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/CliArgumentException.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/CliArgumentException.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.parser; + +public class CliArgumentException extends Exception { + + private static final long serialVersionUID = -7579563789244874904L; + + private final int exitCode; + + public CliArgumentException(int exitCode) { + super(); + this.exitCode = exitCode; + } + + public CliArgumentException(String message, int exitCode) { + super(message); + this.exitCode = exitCode; + } + + public int getExitCode() { + return exitCode; + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/DatastoreArguments.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/DatastoreArguments.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/DatastoreArguments.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/DatastoreArguments.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,238 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.parser; + +import org.apache.commons.lang.text.StrSubstitutor; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.upgrade.cli.blob.BlobStoreFactory; +import org.apache.jackrabbit.oak.upgrade.cli.blob.ConstantBlobStoreFactory; +import org.apache.jackrabbit.oak.upgrade.cli.blob.DummyBlobStoreFactory; +import org.apache.jackrabbit.oak.upgrade.cli.blob.FileBlobStoreFactory; +import org.apache.jackrabbit.oak.upgrade.cli.blob.FileDataStoreFactory; +import org.apache.jackrabbit.oak.upgrade.cli.blob.MissingBlobStoreFactory; +import org.apache.jackrabbit.oak.upgrade.cli.blob.S3DataStoreFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Map; + +import static com.google.common.collect.Maps.newHashMap; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.StoreType.JCR2_DIR_XML; + +/** + * This class parses the input provided by the user and analyses the given node stores + * in order to find out which datastore combination should be used for the migration. + * + * The desired outcome for the combinations of user input can be found in the table below. + * The table is a kind of heuristics that tries to match the user intentions. + * <pre> + * For sidegrade: + || src blobstore defined || src blobs embedded || dst blobstore defined || --copy-binaries || outcome src blobstore || outcome action + | - | - | - | - | missing | copy references¹ + | - | - | - | + | missing | (x) not supported + | - | - | + | * | missing | (x) not supported + | - | + | - | * | embedded | copy to embedded + | - | + | + | * | embedded | copy to defined blobstore + | + | * | - | - | as in src | copy references + | + | * | - | + | as in src | copy to embedded + | + | * | + | * | as in src | copy to defined blobstore + + ¹ - (x) not supported for SegmentMK -> MongoMK migration + + For upgrade: + + || dst blobstore defined || --copy-binaries || outcome src blobstore || outcome action + | - | - | defined by JCR2 | copy references + | - | + | defined by JCR2 | copy to embedded + | + | * | defined by JCR2 | copy to defined blobstore + * </pre> + */ +public class DatastoreArguments { + + private static final Logger log = LoggerFactory.getLogger(DatastoreArguments.class); + + private final BlobStoreFactory definedSrcBlob; + + private final BlobStoreFactory definedDstBlob; + + private final StoreArguments storeArguments; + + private final BlobMigrationCase blobMigrationCase; + + private final MigrationOptions options; + + private final boolean srcEmbedded; + + public DatastoreArguments(MigrationOptions options, StoreArguments storeArguments, boolean srcEmbedded) throws CliArgumentException { + this.storeArguments = storeArguments; + this.options = options; + this.srcEmbedded = srcEmbedded; + + try { + blobMigrationCase = discoverBlobMigrationCase(); + } catch (IOException e) { + log.error("Can't figure out the right blob migration path", e); + throw new CliArgumentException(1); + } + + if (blobMigrationCase == BlobMigrationCase.UNSUPPORTED) { + throw new CliArgumentException("This combination of data- and node-stores is not supported", 1); + } + + try { + definedSrcBlob = options.isSrcBlobStoreDefined() ? getDefinedSrcBlobStore() : null; + definedDstBlob = options.isDstBlobStoreDefined() ? getDefinedDstBlobStore() : null; + } catch(IOException e) { + log.error("Can't read the blob configuration", e); + throw new CliArgumentException(1); + } + + log.info(blobMigrationCase.getDescription(this)); + } + + public BlobStoreFactory getSrcBlobStore() throws IOException { + BlobStoreFactory result; + if (options.isSrcBlobStoreDefined()) { + result = definedSrcBlob; + } else if (blobMigrationCase == BlobMigrationCase.COPY_REFERENCES) { + result = new MissingBlobStoreFactory(); + } else { + result = new DummyBlobStoreFactory(); // embedded + } + log.info("Source blob store: {}", result); + return result; + } + + public BlobStoreFactory getDstBlobStore(BlobStore srcBlobStore) throws IOException { + BlobStoreFactory result; + if (options.isDstBlobStoreDefined()) { + result = definedDstBlob; + } else if (blobMigrationCase == BlobMigrationCase.COPY_REFERENCES && (options.isSrcBlobStoreDefined() || storeArguments.getSrcType() == JCR2_DIR_XML)) { + result = new ConstantBlobStoreFactory(srcBlobStore); + } else if (blobMigrationCase == BlobMigrationCase.COPY_REFERENCES) { + result = new MissingBlobStoreFactory(); + } else { + result = new DummyBlobStoreFactory(); // embedded + } + + log.info("Destination blob store: {}", result); + return result; + } + + private BlobStoreFactory getDefinedSrcBlobStore() throws IOException { + boolean ignoreMissingBinaries = options.isIgnoreMissingBinaries(); + if (options.isSrcFbs()) { + return new FileBlobStoreFactory(options.getSrcFbs()); + } else if (options.isSrcS3()) { + return new S3DataStoreFactory(options.getSrcS3Config(), options.getSrcS3(), ignoreMissingBinaries); + } else if (options.isSrcFds()) { + return new FileDataStoreFactory(options.getSrcFds(), ignoreMissingBinaries); + } else { + return null; + } + } + + private BlobStoreFactory getDefinedDstBlobStore() throws IOException { + if (options.isDstFbs()) { + return new FileBlobStoreFactory(options.getDstFbs()); + } else if (options.isDstS3()) { + return new S3DataStoreFactory(options.getDstS3Config(), options.getDstS3(), false); + } else if (options.isDstFds()) { + return new FileDataStoreFactory(options.getDstFds(), false); + } else { + return null; + } + } + + public enum BlobMigrationCase { + COPY_REFERENCES("Only blob references will be copied"), + EMBEDDED_TO_EMBEDDED("Blobs embedded in ${srcnode} will be embedded in ${dstnode}"), + EMBEDDED_TO_EXTERNAL("Blobs embedded in ${srcnode} will be copied to ${dstblob}"), + EXTERNAL_TO_EMBEDDED("Blobs stored in ${srcblob} will be embedded in ${dstnode}"), + EXTERNAL_TO_EXTERNAL("Blobs stored in ${srcblob} will be copied to ${dstblob}"), + UNSUPPORTED("Unsupported case"); + + private final String description; + + BlobMigrationCase(String description) { + this.description = description; + } + + private String getDescription(DatastoreArguments datastoreArguments) { + Map<String, String> map = newHashMap(); + map.put("srcnode", datastoreArguments.storeArguments.getSrcDescriptor()); + map.put("dstnode", datastoreArguments.storeArguments.getDstDescriptor()); + + if (datastoreArguments.storeArguments.getSrcType() == JCR2_DIR_XML) { + map.put("srcblob", "CRX2 datastore"); + } else { + map.put("srcblob", datastoreArguments.definedSrcBlob == null ? "?" : datastoreArguments.definedSrcBlob.toString()); + } + map.put("dstblob", datastoreArguments.definedDstBlob == null ? "?" : datastoreArguments.definedDstBlob.toString()); + + StrSubstitutor subst = new StrSubstitutor(map); + return subst.replace(description); + } + + } + + public BlobMigrationCase getBlobMigrationCase() { + return blobMigrationCase; + } + + private BlobMigrationCase discoverBlobMigrationCase() throws IOException { + boolean srcDefined = options.isSrcBlobStoreDefined() || storeArguments.getSrcType() == JCR2_DIR_XML; + boolean dstDefined = options.isDstBlobStoreDefined(); + boolean copyBinaries = options.isCopyBinaries(); + + boolean srcSegment = storeArguments.getSrcType().isSegment(); + boolean dstSegment = storeArguments.getDstType().isSegment(); + + // default case, no datastore-related arguments given, but blobs are stored externally + if (!srcDefined && !dstDefined && !srcEmbedded && !copyBinaries) { + if (srcSegment && !dstSegment) { // segment -> document is not supported for this case + return BlobMigrationCase.UNSUPPORTED; + } else { // we try to copy references using MissingBlobStore + return BlobMigrationCase.COPY_REFERENCES; + } + // can't copy binaries if they are stored externally and we don't know where + } else if (!srcDefined && !dstDefined && !srcEmbedded && copyBinaries) { + return BlobMigrationCase.UNSUPPORTED; + // can't copy binaries if they are stored externally and we don't know where + // (even if the destination datastore is defined) + } else if (!srcDefined && !srcEmbedded && dstDefined) { + return BlobMigrationCase.UNSUPPORTED; + // source is embedded and no destination given + } else if (!srcDefined && srcEmbedded && !dstDefined) { + return BlobMigrationCase.EMBEDDED_TO_EMBEDDED; + // source is embedded and the destination is given + } else if (!srcDefined && srcEmbedded && dstDefined) { + return BlobMigrationCase.EMBEDDED_TO_EXTERNAL; + // source is given, no destination, but also no --copy-binaries -> copy references + } else if (srcDefined && !dstDefined && !copyBinaries) { + return BlobMigrationCase.COPY_REFERENCES; + // source is given, no destination, but --copy-binaries -> copy to embedded + } else if (srcDefined && !dstDefined && copyBinaries) { + return BlobMigrationCase.EXTERNAL_TO_EMBEDDED; + // source and destination is given + } else if (srcDefined && dstDefined) { + return BlobMigrationCase.EXTERNAL_TO_EXTERNAL; + } + return BlobMigrationCase.UNSUPPORTED; + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationCliArguments.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationCliArguments.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationCliArguments.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationCliArguments.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.parser; + +import java.util.ArrayList; +import java.util.List; + +import joptsimple.OptionSet; + +public final class MigrationCliArguments { + + private final OptionSet options; + + private final List<String> arguments; + + public MigrationCliArguments(OptionSet options) throws CliArgumentException { + this.options = options; + this.arguments = getNonOptionArguments(); + } + + private List<String> getNonOptionArguments() { + List<String> args = new ArrayList<String>(); + for (Object o : options.nonOptionArguments()) { + args.add(o.toString()); + } + return args; + } + + public boolean hasOption(String optionName) { + return options.has(optionName); + } + + public String getOption(String optionName) { + return (String) options.valueOf(optionName); + } + + public int getIntOption(String optionName) { + return (Integer) options.valueOf(optionName); + } + + public String[] getOptionList(String optionName) { + String option = getOption(optionName); + if (option == null) { + return null; + } else { + return option.split(","); + } + } + + public List<String> getArguments() { + return arguments; + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,385 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.parser; + +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Calendar; + +import org.apache.commons.lang.StringUtils; +import org.apache.jackrabbit.oak.commons.PathUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class MigrationOptions { + + private static final Logger log = LoggerFactory.getLogger(MigrationOptions.class); + + private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd"); + + private final boolean copyBinaries; + + private final boolean disableMmap; + + private final int cacheSizeInMB; + + private final Calendar copyVersions; + + private final Calendar copyOrphanedVersions; + + private final String[] includePaths; + + private final String[] excludePaths; + + private final String[] mergePaths; + + private final boolean failOnError; + + private final boolean earlyShutdown; + + private final boolean skipInitialization; + + private final boolean skipNameCheck; + + private final boolean ignoreMissingBinaries; + + private final boolean verify; + + private final boolean onlyVerify; + + private final boolean skipCheckpoints; + + private final String srcUser; + + private final String srcPassword; + + private final String dstUser; + + private final String dstPassword; + + private final String srcFbs; + + private final String srcFds; + + private final String srcS3Config; + + private final String srcS3; + + private final String dstFbs; + + private final String dstFds; + + private final String dstS3Config; + + private final String dstS3; + + private final Boolean srcExternalBlobs; + + public MigrationOptions(MigrationCliArguments args) throws CliArgumentException { + this.disableMmap = args.hasOption(OptionParserFactory.DISABLE_MMAP); + this.copyBinaries = args.hasOption(OptionParserFactory.COPY_BINARIES); + if (args.hasOption(OptionParserFactory.CACHE_SIZE)) { + this.cacheSizeInMB = args.getIntOption(OptionParserFactory.CACHE_SIZE); + } else { + this.cacheSizeInMB = 256; + } + + final Calendar epoch = Calendar.getInstance(); + epoch.setTimeInMillis(0); + if (args.hasOption(OptionParserFactory.COPY_VERSIONS)) { + this.copyVersions = parseVersionCopyArgument(args.getOption(OptionParserFactory.COPY_VERSIONS)); + } else { + this.copyVersions = epoch; + } + if (args.hasOption(OptionParserFactory.COPY_ORPHANED_VERSIONS)) { + this.copyOrphanedVersions = parseVersionCopyArgument(args.getOption(OptionParserFactory.COPY_ORPHANED_VERSIONS)); + } else { + this.copyOrphanedVersions = epoch; + } + this.includePaths = checkPaths(args.getOptionList(OptionParserFactory.INCLUDE_PATHS)); + this.excludePaths = checkPaths(args.getOptionList(OptionParserFactory.EXCLUDE_PATHS)); + this.mergePaths = checkPaths(args.getOptionList(OptionParserFactory.MERGE_PATHS)); + this.failOnError = args.hasOption(OptionParserFactory.FAIL_ON_ERROR); + this.earlyShutdown = args.hasOption(OptionParserFactory.EARLY_SHUTDOWN); + this.skipInitialization = args.hasOption(OptionParserFactory.SKIP_INIT); + this.skipNameCheck = args.hasOption(OptionParserFactory.SKIP_NAME_CHECK); + this.ignoreMissingBinaries = args.hasOption(OptionParserFactory.IGNORE_MISSING_BINARIES); + this.verify = args.hasOption(OptionParserFactory.VERIFY); + this.onlyVerify = args.hasOption(OptionParserFactory.ONLY_VERIFY); + this.skipCheckpoints = args.hasOption(OptionParserFactory.SKIP_CHECKPOINTS); + + this.srcUser = args.getOption(OptionParserFactory.SRC_USER); + this.srcPassword = args.getOption(OptionParserFactory.SRC_USER); + this.dstUser = args.getOption(OptionParserFactory.DST_USER); + this.dstPassword = args.getOption(OptionParserFactory.DST_PASSWORD); + + this.srcFbs = args.getOption(OptionParserFactory.SRC_FBS); + this.srcFds = args.getOption(OptionParserFactory.SRC_FDS); + this.srcS3 = args.getOption(OptionParserFactory.SRC_S3); + this.srcS3Config = args.getOption(OptionParserFactory.SRC_S3_CONFIG); + + this.dstFbs = args.getOption(OptionParserFactory.DST_FBS); + this.dstFds = args.getOption(OptionParserFactory.DST_FDS); + this.dstS3 = args.getOption(OptionParserFactory.DST_S3); + this.dstS3Config = args.getOption(OptionParserFactory.DST_S3_CONFIG); + + if (args.hasOption(OptionParserFactory.SRC_EXTERNAL_BLOBS)) { + this.srcExternalBlobs = Boolean + .valueOf(OptionParserFactory.SRC_EXTERNAL_BLOBS); + } else { + this.srcExternalBlobs = null; + } + } + + public boolean isCopyBinaries() { + return copyBinaries; + } + + public boolean isDisableMmap() { + return disableMmap; + } + + public int getCacheSizeInMB() { + return cacheSizeInMB; + } + + public Calendar getCopyVersions() { + return copyVersions; + } + + public Calendar getCopyOrphanedVersions() { + return copyOrphanedVersions; + } + + public String[] getIncludePaths() { + return includePaths; + } + + public String[] getExcludePaths() { + return excludePaths; + } + + public String[] getMergePaths() { + return mergePaths; + } + + public boolean isFailOnError() { + return failOnError; + } + + public boolean isEarlyShutdown() { + return earlyShutdown; + } + + public boolean isSkipInitialization() { + return skipInitialization; + } + + public boolean isSkipNameCheck() { + return skipNameCheck; + } + + public boolean isIgnoreMissingBinaries() { + return ignoreMissingBinaries; + } + + public boolean isVerify() { + return verify; + } + + public boolean isOnlyVerify() { + return onlyVerify; + } + + public boolean isSkipCheckpoints() { + return skipCheckpoints; + } + + public String getSrcUser() { + return srcUser; + } + + public String getSrcPassword() { + return srcPassword; + } + + public String getDstUser() { + return dstUser; + } + + public String getDstPassword() { + return dstPassword; + } + + public String getSrcFbs() { + return srcFbs; + } + + public String getSrcFds() { + return srcFds; + } + + public String getSrcS3Config() { + return srcS3Config; + } + + public String getSrcS3() { + return srcS3; + } + + public String getDstFbs() { + return dstFbs; + } + + public String getDstFds() { + return dstFds; + } + + public String getDstS3Config() { + return dstS3Config; + } + + public String getDstS3() { + return dstS3; + } + + public boolean isSrcFds() { + return StringUtils.isNotBlank(srcFds); + } + + public boolean isSrcFbs() { + return StringUtils.isNotBlank(srcFbs); + } + + public boolean isSrcS3() { + return StringUtils.isNotBlank(srcS3) && StringUtils.isNotBlank(srcS3Config); + } + + public boolean isDstFds() { + return StringUtils.isNotBlank(dstFds); + } + + public boolean isDstFbs() { + return StringUtils.isNotBlank(dstFbs); + } + + public boolean isDstS3() { + return StringUtils.isNotBlank(dstS3) && StringUtils.isNotBlank(dstS3Config); + } + + public boolean isSrcBlobStoreDefined() { + return isSrcFbs() || isSrcFds() || isSrcS3(); + } + + public boolean isDstBlobStoreDefined() { + return isDstFbs() || isDstFds() || isDstS3(); + } + + public void logOptions() { + if (disableMmap) { + log.info("Disabling memory mapped file access for Segment Store"); + } + + if (copyVersions == null) { + log.info("copyVersions parameter set to false"); + } else { + log.info("copyVersions parameter set to {}", DATE_FORMAT.format(copyVersions.getTime())); + } + + if (copyOrphanedVersions == null) { + log.info("copyOrphanedVersions parameter set to false"); + } else { + log.info("copyOrphanedVersions parameter set to {}", DATE_FORMAT.format(copyOrphanedVersions.getTime())); + } + + if (includePaths != null) { + log.info("paths to include: {}", (Object) includePaths); + } + + if (excludePaths != null) { + log.info("paths to exclude: {}", (Object) excludePaths); + } + + if (failOnError) { + log.info("Unreadable nodes will cause failure of the entire transaction"); + } + + if (earlyShutdown) { + log.info("Source repository would be shutdown post copying of nodes"); + } + + if (skipInitialization) { + log.info("The repository initialization will be skipped"); + } + + if (skipNameCheck) { + log.info("Test for long-named nodes will be disabled"); + } + + if (ignoreMissingBinaries) { + log.info("Missing binaries won't break the migration"); + } + + if (srcExternalBlobs != null) { + log.info("Source DataStore external blobs: {}", srcExternalBlobs); + } + + if (skipCheckpoints) { + log.info("Checkpoints won't be migrated"); + } + + log.info("Cache size: {} MB", cacheSizeInMB); + + } + + private static Calendar parseVersionCopyArgument(String string) { + final Calendar calendar; + + if (Boolean.parseBoolean(string)) { + calendar = Calendar.getInstance(); + calendar.setTimeInMillis(0); + } else if (string != null && string.matches("^\\d{4}-\\d{2}-\\d{2}$")) { + calendar = Calendar.getInstance(); + try { + calendar.setTime(DATE_FORMAT.parse(string)); + } catch (ParseException e) { + return null; + } + } else { + calendar = null; + } + return calendar; + } + + public Boolean getSrcExternalBlobs() { + return srcExternalBlobs; + } + + private static String[] checkPaths(String[] paths) throws CliArgumentException { + if (paths == null) { + return paths; + } + for (String p : paths) { + if (!PathUtils.isValid(p)) { + throw new CliArgumentException("Following path is not valid: " + p, 1); + } + } + return paths; + } + +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.parser; + +import static java.util.Arrays.asList; + +import joptsimple.OptionParser; + +public class OptionParserFactory { + + public static final String COPY_BINARIES = "copy-binaries"; + + public static final String DISABLE_MMAP = "disable-mmap"; + + public static final String FAIL_ON_ERROR = "fail-on-error"; + + public static final String IGNORE_MISSING_BINARIES = "ignore-missing-binaries"; + + public static final String EARLY_SHUTDOWN = "early-shutdown"; + + public static final String CACHE_SIZE = "cache"; + + public static final String HELP = "help"; + + public static final String DST_USER = "user"; + + public static final String DST_PASSWORD = "password"; + + public static final String SRC_USER = "src-user"; + + public static final String SRC_PASSWORD = "src-password"; + + public static final String SRC_FBS = "src-fileblobstore"; + + public static final String SRC_FDS = "src-datastore"; + + public static final String SRC_S3 = "src-s3datastore"; + + public static final String SRC_S3_CONFIG = "src-s3config"; + + public static final String SRC_EXTERNAL_BLOBS = "src-external-ds"; + + public static final String DST_FDS = "datastore"; + + public static final String DST_FBS = "fileblobstore"; + + public static final String DST_S3 = "s3datastore"; + + public static final String DST_S3_CONFIG = "s3config"; + + public static final String COPY_VERSIONS = "copy-versions"; + + public static final String COPY_ORPHANED_VERSIONS = "copy-orphaned-versions"; + + public static final String INCLUDE_PATHS = "include-paths"; + + public static final String EXCLUDE_PATHS = "exclude-paths"; + + public static final String MERGE_PATHS = "merge-paths"; + + public static final String SKIP_INIT = "skip-init"; + + public static final String SKIP_NAME_CHECK = "skip-name-check"; + + public static final String VERIFY = "verify"; + + public static final String ONLY_VERIFY = "only-verify"; + + public static final String SKIP_CHECKPOINTS = "skip-checkpoints"; + + public static OptionParser create() { + OptionParser op = new OptionParser(); + addUsageOptions(op); + addBlobOptions(op); + addRdbOptions(op); + addPathsOptions(op); + addVersioningOptions(op); + addMiscOptions(op); + return op; + } + + private static void addUsageOptions(OptionParser op) { + op.acceptsAll(asList("h", "?", HELP), "show help").forHelp(); + } + + private static void addBlobOptions(OptionParser op) { + op.accepts(COPY_BINARIES, "Copy binary content. Use this to disable use of existing DataStore in new repo"); + op.accepts(SRC_FDS, "Datastore directory to be used as a source FileDataStore").withRequiredArg() + .ofType(String.class); + op.accepts(SRC_FBS, "Datastore directory to be used as a source FileBlobStore").withRequiredArg() + .ofType(String.class); + op.accepts(SRC_S3, "Datastore directory to be used for the source S3").withRequiredArg().ofType(String.class); + op.accepts(SRC_S3_CONFIG, "Configuration file for the source S3DataStore").withRequiredArg() + .ofType(String.class); + op.accepts(DST_FDS, "Datastore directory to be used as a target FileDataStore").withRequiredArg() + .ofType(String.class); + op.accepts(DST_FBS, "Datastore directory to be used as a target FileBlobStore").withRequiredArg() + .ofType(String.class); + op.accepts(DST_S3, "Datastore directory to be used for the target S3").withRequiredArg().ofType(String.class); + op.accepts(DST_S3_CONFIG, "Configuration file for the target S3DataStore").withRequiredArg() + .ofType(String.class); + op.accepts(IGNORE_MISSING_BINARIES, "Don't break the migration if some binaries are missing"); + op.accepts(SRC_EXTERNAL_BLOBS, "Flag specifying if the source Store has external references or not"); + } + + private static void addRdbOptions(OptionParser op) { + op.accepts(SRC_USER, "Source rdb user").withRequiredArg().ofType(String.class); + op.accepts(SRC_PASSWORD, "Source rdb password").withRequiredArg().ofType(String.class); + op.accepts(DST_USER, "Target rdb user").withRequiredArg().ofType(String.class); + op.accepts(DST_PASSWORD, "Target rdb password").withRequiredArg().ofType(String.class); + } + + private static void addPathsOptions(OptionParser op) { + op.accepts(INCLUDE_PATHS, "Comma-separated list of paths to include during copy.").withRequiredArg() + .ofType(String.class); + op.accepts(EXCLUDE_PATHS, "Comma-separated list of paths to exclude during copy.").withRequiredArg() + .ofType(String.class); + op.accepts(MERGE_PATHS, "Comma-separated list of paths to merge during copy.").withRequiredArg() + .ofType(String.class); + } + + private static void addVersioningOptions(OptionParser op) { + op.accepts(COPY_VERSIONS, + "Copy the version storage. Parameters: { true | false | yyyy-mm-dd }. Defaults to true.") + .withRequiredArg().ofType(String.class); + op.accepts(COPY_ORPHANED_VERSIONS, + "Allows to skip copying orphaned versions. Parameters: { true | false | yyyy-mm-dd }. Defaults to true.") + .withRequiredArg().ofType(String.class); + } + + private static void addMiscOptions(OptionParser op) { + op.accepts(DISABLE_MMAP, "Disable memory mapped file access for Segment Store"); + op.accepts(FAIL_ON_ERROR, "Fail completely if nodes can't be read from the source repo"); + op.accepts(EARLY_SHUTDOWN, + "Shutdown the source repository after nodes are copied and before the commit hooks are applied"); + op.accepts(CACHE_SIZE, "Cache size in MB").withRequiredArg().ofType(Integer.class).defaultsTo(256); + op.accepts(SKIP_INIT, "Skip the repository initialization; only copy data"); + op.accepts(SKIP_NAME_CHECK, "Skip the initial phase of testing node name lengths"); + op.accepts(VERIFY, "After the sidegrade check whether the source repository is exactly the same as destination"); + op.accepts(ONLY_VERIFY, "Performs only --" + VERIFY + ", without copying content"); + op.accepts(SKIP_CHECKPOINTS, "Don't copy checkpoints on the full segment->segment migration"); + } +} Added: jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java?rev=1792995&view=auto ============================================================================== --- jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java (added) +++ jackrabbit/oak/branches/1.2/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java Fri Apr 28 07:18:26 2017 @@ -0,0 +1,264 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.parser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.apache.jackrabbit.oak.plugins.segment.SegmentVersion; +import org.apache.jackrabbit.oak.upgrade.cli.node.StoreFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.apache.jackrabbit.oak.upgrade.cli.parser.StoreType.JCR2_DIR; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.StoreType.JCR2_DIR_XML; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.StoreType.JCR2_XML; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.StoreType.SEGMENT; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.StoreType.getMatchingType; + +public class StoreArguments { + + private static final String DEFAULT_CRX2_REPO = "crx-quickstart/repository"; + + private static final String REPOSITORY_XML = "repository.xml"; + + private static final Logger log = LoggerFactory.getLogger(StoreArguments.class); + + private final MigrationOptions options; + + private final StoreDescriptor src; + + private final StoreDescriptor dst; + + private Boolean srcHasExternalBlobRefs; + + public StoreArguments(MigrationOptions options, List<String> arguments) throws CliArgumentException { + this.options = options; + List<StoreDescriptor> descriptors = createStoreDescriptors(arguments, options); + + src = descriptors.get(0); + dst = descriptors.get(1); + + if (options.getSrcExternalBlobs() != null) { + srcHasExternalBlobRefs = options.getSrcExternalBlobs(); + } + } + + public void logOptions() { + log.info("Source: {}", src); + log.info("Destination: {}", dst); + + if (dst.getType() == SEGMENT) { + logSegmentVersion(); + } + } + + public StoreFactory getSrcStore() { + return src.getFactory(MigrationDirection.SRC, options); + } + + public StoreFactory getDstStore() { + return dst.getFactory(MigrationDirection.DST, options); + } + + public StoreType getSrcType() { + return src.getType(); + } + + public StoreType getDstType() { + return dst.getType(); + } + + String getSrcDescriptor() { + return src.toString(); + } + + String getDstDescriptor() { + return dst.toString(); + } + + public boolean isInPlaceUpgrade() { + if (src.getType() == JCR2_DIR_XML && dst.getType() == SEGMENT) { + return src.getPath().equals(dst.getPath()); + } + return false; + } + + public String[] getSrcPaths() { + return src.getPaths(); + } + + public boolean srcUsesEmbeddedDatastore() throws IOException { + if (srcHasExternalBlobRefs == null) { + srcHasExternalBlobRefs = src.getFactory(StoreArguments.MigrationDirection.SRC, options).hasExternalBlobReferences(); + } + return !srcHasExternalBlobRefs; + } + + private static List<StoreDescriptor> createStoreDescriptors(List<String> arguments, MigrationOptions options) throws CliArgumentException { + List<StoreDescriptor> descriptors = mapToStoreDescriptors(arguments); + mergeCrx2Descriptors(descriptors); + addSegmentAsDestination(descriptors); + validateDescriptors(descriptors, options); + return descriptors; + } + + private static List<StoreDescriptor> mapToStoreDescriptors(List<String> arguments) throws CliArgumentException { + List<StoreDescriptor> descriptors = new ArrayList<StoreDescriptor>(); + boolean jcr2Dir = false; + boolean jcr2Xml = false; + for (String argument : arguments) { + StoreType type = getMatchingType(argument); + if (type == JCR2_DIR) { + if (jcr2Dir) { + type = SEGMENT; + } + jcr2Dir = true; + } + if (type == JCR2_DIR_XML) { + if (jcr2Xml) { + throw new CliArgumentException("Too many repository.xml files passed as arguments", 1); + } + jcr2Xml = true; + } + descriptors.add(new StoreDescriptor(type, argument)); + } + return descriptors; + } + + private static void mergeCrx2Descriptors(List<StoreDescriptor> descriptors) { + int crx2DirIndex = -1; + int crx2XmlIndex = -1; + for (int i = 0; i < descriptors.size(); i++) { + StoreType type = descriptors.get(i).getType(); + if (type == JCR2_DIR) { + crx2DirIndex = i; + } else if (type == JCR2_XML) { + crx2XmlIndex = i; + } + } + + if (crx2DirIndex > -1 || crx2XmlIndex > -1) { + String repoDir; + if (crx2DirIndex > -1) { + repoDir = descriptors.get(crx2DirIndex).getPath(); + descriptors.set(crx2DirIndex, null); + } else { + repoDir = DEFAULT_CRX2_REPO; + } + String repoXml; + if (crx2XmlIndex > -1) { + repoXml = descriptors.get(crx2XmlIndex).getPath(); + descriptors.set(crx2XmlIndex, null); + } else { + repoXml = repoDir + "/" + REPOSITORY_XML; + } + descriptors.add(0, new StoreDescriptor(JCR2_DIR_XML, repoDir, repoXml)); + + Iterator<StoreDescriptor> it = descriptors.iterator(); + while (it.hasNext()) { + if (it.next() == null) { + it.remove(); + } + } + } + } + + private static void addSegmentAsDestination(List<StoreDescriptor> descriptors) { + if (descriptors.size() == 1) { + StoreType type = descriptors.get(0).getType(); + if (type == JCR2_DIR_XML) { + String crx2Dir = descriptors.get(0).getPath(); + descriptors.add(new StoreDescriptor(SEGMENT, crx2Dir)); + log.info("In place migration between JCR2 and SegmentNodeStore in {}", crx2Dir); + } + } + } + + private static void validateDescriptors(List<StoreDescriptor> descriptors, MigrationOptions options) throws CliArgumentException { + if (descriptors.size() < 2) { + throw new CliArgumentException("Not enough node store arguments: " + descriptors.toString(), 1); + } else if (descriptors.size() > 2) { + throw new CliArgumentException("Too much node store arguments: " + descriptors.toString(), 1); + } else if (descriptors.get(1).getType() == JCR2_DIR_XML) { + throw new CliArgumentException("Can't use CRX2 as a destination", 1); + } + StoreDescriptor src = descriptors.get(0); + StoreDescriptor dst = descriptors.get(1); + if (src.getType() == dst.getType() && src.getPath().equals(dst.getPath())) { + throw new CliArgumentException("The source and the destination is the same repository.", 1); + } + if (src.getType() == StoreType.JCR2_DIR_XML && options.isSrcBlobStoreDefined()) { + throw new CliArgumentException("The --src-datastore can't be used for the repository upgrade. Source datastore configuration is placed in the repository.xml file.", 1); + } + } + + private static void logSegmentVersion() { + SegmentVersion[] versions = SegmentVersion.values(); + SegmentVersion lastVersion = versions[versions.length - 1]; + log.info("Using Oak segment format {} - please make sure your version of AEM supports that format", + lastVersion); + if (lastVersion == SegmentVersion.V_11) { + log.info("Requires Oak 1.0.12, 1.1.7 or later"); + } + } + + enum MigrationDirection { + SRC, DST + } + + private static class StoreDescriptor { + + private final String[] paths; + + private final StoreType type; + + public StoreDescriptor(StoreType type, String... paths) { + this.type = type; + this.paths = paths; + } + + public String[] getPaths() { + return paths; + } + + public String getPath() { + return paths[0]; + } + + public StoreType getType() { + return type; + } + + public StoreFactory getFactory(MigrationDirection direction, MigrationOptions options) { + return type.createFactory(paths, direction, options); + } + + @Override + public String toString() { + if (paths.length == 1) { + return String.format("%s[%s]", type, getPath()); + } else { + return String.format("%s%s", type, Arrays.toString(getPaths())); + } + } + + } +}
