This is an automated email from the ASF dual-hosted git repository. miroslav pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git
The following commit(s) were added to refs/heads/trunk by this push: new 2e63881a95 OAK-9212 AzureArchiveManage.listArchives() should not delete segments (#2467) 2e63881a95 is described below commit 2e63881a959d016157f80f4bab6e381b3130d321 Author: Miroslav Smiljanic <smmiros...@gmail.com> AuthorDate: Wed Aug 27 15:09:39 2025 +0200 OAK-9212 AzureArchiveManage.listArchives() should not delete segments (#2467) * OAK-9212 do not delete blobs when doing AzureArchiveManagerV8#listArchives and AzureArchiveManager#listArchives * OAK-9212 remove unused imports * OAK-9212 mock listing blobs during initialisation * OAK-9212 upload deleted marker when the archive is being deleted, and use the same marker to exclude the archive when invoking listArchives * OAK-9212 rename method * OAK-9212 change field visibility * OAK-9212 upload deleted marker when the archive is being deleted, and use the same marker to exclude the archive when invoking listArchives (Azure SDK v12) * OAK-9212 use readOnly when instantiating SegmentArchiveManager * OAK-9212 changed exported version for org.apache.jackrabbit.oak.segment.spi.persistence.split * OAK-9212 use constant * OAK-9212 throw UnsupportedOperationException when in read-only mode * OAK-9212 throw UnsupportedOperationException when in read-only mode * OAK-9212 added test testListArchivesInReadOnlyModeWithPartiallyDeletedArchive * OAK-9212 added test testListArchivesInReadWriteModeWithPartiallyDeletedArchive --------- Co-authored-by: Miroslav Smiljanic <miros...@apache.com> --- .../jackrabbit/oak/segment/aws/AwsPersistence.java | 2 +- .../oak/segment/aws/tool/AwsCompact.java | 2 +- .../segment/aws/tool/AwsSegmentStoreMigrator.java | 4 +- .../oak/segment/aws/AwsArchiveManagerTest.java | 6 +- .../oak/segment/aws/AwsGCJournalFileTest.java | 2 +- .../oak/segment/aws/AwsReadSegmentTest.java | 2 +- .../oak/segment/aws/tool/SegmentCopyTestBase.java | 4 +- .../persistence/split/SplitPersistenceTest.java | 2 +- .../oak/segment/azure/AzureArchiveManager.java | 73 ++++++++++--- .../oak/segment/azure/AzurePersistence.java | 4 +- .../segment/azure/AzureSegmentArchiveWriter.java | 1 + .../oak/segment/azure/AzureUtilities.java | 8 ++ .../oak/segment/azure/tool/AzureCompact.java | 4 +- .../oak/segment/azure/tool/SegmentCopy.java | 2 +- .../segment/azure/tool/SegmentStoreMigrator.java | 4 +- .../oak/segment/azure/tool/ToolUtils.java | 4 +- .../segment/azure/v8/AzureArchiveManagerV8.java | 67 +++++++++--- .../oak/segment/azure/v8/AzurePersistenceV8.java | 4 +- .../azure/v8/AzureSegmentArchiveWriterV8.java | 17 ++- .../AzureArchiveManagerIgnoreSamePrefixTest.java | 13 +-- .../oak/segment/azure/AzureArchiveManagerTest.java | 120 +++++++++++++++++++-- .../oak/segment/azure/AzureReadSegmentTest.java | 4 +- .../azure/AzureSegmentArchiveWriterTest.java | 33 +++++- .../oak/segment/azure/AzureTarFileTest.java | 2 +- .../oak/segment/azure/AzureTarWriterTest.java | 4 +- .../oak/segment/azure/AzureUtilitiesTest.java | 79 ++++++++++++++ .../segment/azure/tool/SegmentCopyTestBase.java | 5 +- .../azure/v8/AzureArchiveManagerV8Test.java | 117 ++++++++++++++++++-- .../segment/azure/v8/AzureReadSegmentV8Test.java | 4 +- .../azure/v8/AzureSegmentArchiveWriterV8Test.java | 22 +++- .../oak/segment/azure/v8/AzureTarFileV8Test.java | 2 +- .../oak/segment/azure/v8/AzureTarWriterV8Test.java | 4 +- .../persistence/split/SplitPersistenceTest.java | 2 +- .../split/v8/SplitPersistenceV8Test.java | 2 +- .../oak/segment/remote/RemoteUtilities.java | 4 + .../oak/segment/remote/RemoteUtilitiesTest.java | 31 ++++++ .../oak/segment/file/FileStoreProcBackend.java | 2 +- .../jackrabbit/oak/segment/file/tar/TarFiles.java | 2 +- .../oak/segment/file/tar/TarPersistence.java | 2 +- .../persistence/SegmentNodeStorePersistence.java | 16 +-- .../persistentcache/CachingPersistence.java | 4 +- .../spi/persistence/split/SplitPersistence.java | 10 +- .../spi/persistence/split/package-info.java | 2 +- .../jackrabbit/oak/segment/FailedFlushTest.java | 2 +- .../jackrabbit/oak/segment/file/FileStoreTest.java | 2 +- .../oak/segment/file/tar/TarFilesTest.java | 4 +- 46 files changed, 593 insertions(+), 113 deletions(-) diff --git a/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsPersistence.java b/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsPersistence.java index 82567184fd..a3cb8d602a 100644 --- a/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsPersistence.java +++ b/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsPersistence.java @@ -42,7 +42,7 @@ public class AwsPersistence implements SegmentNodeStorePersistence { @Override public SegmentArchiveManager createArchiveManager(boolean mmap, boolean offHeapAccess, IOMonitor ioMonitor, - FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) { + FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) { awsContext.setRemoteStoreMonitor(remoteStoreMonitor); return new AwsArchiveManager(awsContext.directory, ioMonitor, fileStoreMonitor); } diff --git a/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsCompact.java b/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsCompact.java index c9f2cf93a5..527c39eb7f 100644 --- a/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsCompact.java +++ b/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsCompact.java @@ -205,7 +205,7 @@ public class AwsCompact { Stopwatch watch = Stopwatch.createStarted(); SegmentNodeStorePersistence persistence = newSegmentNodeStorePersistence(SegmentStoreType.AWS, path); SegmentArchiveManager archiveManager = persistence.createArchiveManager(false, false, new IOMonitorAdapter(), - new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); System.out.printf("Compacting %s\n", path); System.out.printf(" before\n"); diff --git a/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsSegmentStoreMigrator.java b/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsSegmentStoreMigrator.java index 4f37a60d07..596ec7f107 100644 --- a/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsSegmentStoreMigrator.java +++ b/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsSegmentStoreMigrator.java @@ -152,9 +152,9 @@ public class AwsSegmentStoreMigrator implements Closeable { return; } SegmentArchiveManager sourceManager = source.createArchiveManager(false, false, new IOMonitorAdapter(), - new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), true); SegmentArchiveManager targetManager = target.createArchiveManager(false, false, new IOMonitorAdapter(), - new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); List<String> targetArchives = targetManager.listArchives(); if (appendMode && !targetArchives.isEmpty()) { diff --git a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManagerTest.java b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManagerTest.java index 18d55b4ac6..0c9e02ae63 100644 --- a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManagerTest.java +++ b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManagerTest.java @@ -74,7 +74,7 @@ public class AwsArchiveManagerTest { @Test public void testRecovery() throws IOException { SegmentArchiveManager manager = new AwsPersistence(awsContext).createArchiveManager(false, false, - new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); SegmentArchiveWriter writer = manager.create("data00000a.tar"); List<UUID> uuids = new ArrayList<>(); @@ -149,7 +149,7 @@ public class AwsArchiveManagerTest { @Test public void testExists() throws IOException { SegmentArchiveManager manager = new AwsPersistence(awsContext).createArchiveManager(false, false, - new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); SegmentArchiveWriter writer = manager.create("data00000a.tar"); List<UUID> uuids = new ArrayList<>(); @@ -170,7 +170,7 @@ public class AwsArchiveManagerTest { @Test public void testArchiveExistsAfterFlush() throws IOException { SegmentArchiveManager manager = new AwsPersistence(awsContext).createArchiveManager(false, false, - new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); SegmentArchiveWriter writer = manager.create("data00000a.tar"); Assert.assertFalse(manager.exists("data00000a.tar")); diff --git a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFileTest.java b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFileTest.java index 6ffa1e62e3..51a119d0f4 100644 --- a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFileTest.java +++ b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFileTest.java @@ -85,7 +85,7 @@ public class AwsGCJournalFileTest extends GcJournalTest { @Override public SegmentArchiveManager createArchiveManager(boolean arg0, boolean arg1, IOMonitor arg2, - FileStoreMonitor arg3, RemoteStoreMonitor arg4) throws IOException { + FileStoreMonitor arg3, RemoteStoreMonitor arg4, boolean readOnly) throws IOException { throw new IOException(); } diff --git a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsReadSegmentTest.java b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsReadSegmentTest.java index f22f0179da..905435eefc 100644 --- a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsReadSegmentTest.java +++ b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsReadSegmentTest.java @@ -94,7 +94,7 @@ public class AwsReadSegmentTest { @Override public SegmentArchiveManager createArchiveManager(boolean mmap, boolean offHeapAccess, IOMonitor ioMonitor, - FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) { + FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) { return new AwsArchiveManager(awsContext.directory, ioMonitor, fileStoreMonitor) { @Override public SegmentArchiveReader open(String archiveName) throws IOException { diff --git a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/tool/SegmentCopyTestBase.java b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/tool/SegmentCopyTestBase.java index d5b9a65732..73f98a2bf8 100644 --- a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/tool/SegmentCopyTestBase.java +++ b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/tool/SegmentCopyTestBase.java @@ -93,9 +93,9 @@ public abstract class SegmentCopyTestBase { RemoteStoreMonitor remoteStoreMonitor = new RemoteStoreMonitorAdapter(); FileStoreMonitor fileStoreMonitor = new FileStoreMonitorAdapter(); SegmentArchiveManager srcArchiveManager = srcPersistence.createArchiveManager(false, false, ioMonitor, - fileStoreMonitor, remoteStoreMonitor); + fileStoreMonitor, remoteStoreMonitor, true); SegmentArchiveManager destArchiveManager = destPersistence.createArchiveManager(false, false, ioMonitor, - fileStoreMonitor, remoteStoreMonitor); + fileStoreMonitor, remoteStoreMonitor, false); checkArchives(srcArchiveManager, destArchiveManager); checkJournal(srcPersistence, destPersistence); diff --git a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceTest.java b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceTest.java index 100a49fdfa..0320033b6a 100644 --- a/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceTest.java +++ b/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceTest.java @@ -141,7 +141,7 @@ public class SplitPersistenceTest { splitFileStore.close(); splitFileStore = null; - SegmentArchiveManager manager = splitPersistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = splitPersistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), true); for (String archive : manager.listArchives()) { SegmentArchiveReader reader = manager.open(archive); BinaryReferencesIndexLoader.parseBinaryReferencesIndex(reader.getBinaryReferences()); diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManager.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManager.java index f4a5a7d0d4..2b3849aea7 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManager.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManager.java @@ -16,6 +16,7 @@ */ package org.apache.jackrabbit.oak.segment.azure; +import com.azure.core.util.BinaryData; import com.azure.core.util.polling.PollResponse; import com.azure.storage.blob.BlobContainerClient; import com.azure.storage.blob.models.BlobCopyInfo; @@ -56,6 +57,10 @@ public class AzureArchiveManager implements SegmentArchiveManager { private static final Logger log = LoggerFactory.getLogger(AzureArchiveManager.class); + private static final String DELETED_ARCHIVE_MARKER = "deleted"; + + private static final String CLOSED_ARCHIVE_MARKER = "closed"; + protected final BlobContainerClient readBlobContainerClient; protected final BlobContainerClient writeBlobContainerClient; @@ -68,13 +73,16 @@ public class AzureArchiveManager implements SegmentArchiveManager { private final WriteAccessController writeAccessController; - public AzureArchiveManager(BlobContainerClient readBlobContainerClient, BlobContainerClient writeBlobContainerClient, String rootPrefix, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, WriteAccessController writeAccessController) { + private final boolean readOnly; + + public AzureArchiveManager(BlobContainerClient readBlobContainerClient, BlobContainerClient writeBlobContainerClient, String rootPrefix, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, WriteAccessController writeAccessController, boolean readOnly) { this.readBlobContainerClient = readBlobContainerClient; this.writeBlobContainerClient = writeBlobContainerClient; this.rootPrefix = AzureUtilities.asAzurePrefix(rootPrefix); this.ioMonitor = ioMonitor; this.monitor = fileStoreMonitor; this.writeAccessController = writeAccessController; + this.readOnly = readOnly; } @Override @@ -89,8 +97,10 @@ public class AzureArchiveManager implements SegmentArchiveManager { Iterator<String> it = archiveNames.iterator(); while (it.hasNext()) { String archiveName = it.next(); - if (isArchiveEmpty(archiveName)) { - delete(archiveName); + if (deleteInProgress(archiveName)) { + if (!readOnly) { + delete(archiveName); + } it.remove(); } } @@ -101,21 +111,25 @@ public class AzureArchiveManager implements SegmentArchiveManager { } /** - * Check if there's a valid 0000. segment in the archive + * Check if the archive is being deleted. + * * @param archiveName - * @return true if the archive is empty (no 0000.* segment) + * @return true if the "deleted" marker exists */ - private boolean isArchiveEmpty(String archiveName) throws BlobStorageException { - String fullBlobPrefix = getDirectory(archiveName) + "0000."; - ListBlobsOptions listBlobsOptions = new ListBlobsOptions(); - listBlobsOptions.setPrefix(fullBlobPrefix); - return !readBlobContainerClient.listBlobs(listBlobsOptions, null).iterator().hasNext(); + private boolean deleteInProgress(String archiveName) throws BlobStorageException { + return readBlobContainerClient.getBlobClient(getDirectory(archiveName) + DELETED_ARCHIVE_MARKER).exists(); + } + + private void checkWriteOperation(String operation) { + if (readOnly) { + throw new UnsupportedOperationException("Operation " + operation + " is not allowed in read-only mode"); + } } @Override public SegmentArchiveReader open(String archiveName) throws IOException { try { - String closedBlob = getDirectory(archiveName) + "closed"; + String closedBlob = getDirectory(archiveName) + CLOSED_ARCHIVE_MARKER; if (!readBlobContainerClient.getBlobClient(closedBlob).exists()) { return null; } @@ -132,30 +146,55 @@ public class AzureArchiveManager implements SegmentArchiveManager { @Override public SegmentArchiveWriter create(String archiveName) throws IOException { + checkWriteOperation("create"); return new AzureSegmentArchiveWriter(writeBlobContainerClient, rootPrefix, archiveName, ioMonitor, monitor, writeAccessController); } @Override public boolean delete(String archiveName) { + checkWriteOperation("delete"); try { + uploadDeletedMarker(archiveName); getBlobs(archiveName) .forEach(blobItem -> { try { - writeAccessController.checkWritingAllowed(); - writeBlobContainerClient.getBlobClient(blobItem.getName()).delete(); + String blobName = getName(blobItem); + if (!blobName.equals(DELETED_ARCHIVE_MARKER) && !blobName.equals(CLOSED_ARCHIVE_MARKER)) { + writeAccessController.checkWritingAllowed(); + writeBlobContainerClient.getBlobClient(blobItem.getName()).delete(); + } } catch (BlobStorageException e) { log.error("Can't delete segment {}", blobItem.getName(), e); } }); + deleteClosedMarker(archiveName); + deleteDeletedMarker(archiveName); return true; - } catch (IOException e) { + } catch (IOException | BlobStorageException e) { log.error("Can't delete archive {}", archiveName, e); return false; } } + private void deleteDeletedMarker(String archiveName) throws BlobStorageException { + writeAccessController.checkWritingAllowed(); + writeBlobContainerClient.getBlobClient(getDirectory(archiveName) + DELETED_ARCHIVE_MARKER).deleteIfExists(); + } + + private void deleteClosedMarker(String archiveName) throws BlobStorageException { + writeAccessController.checkWritingAllowed(); + writeBlobContainerClient.getBlobClient(getDirectory(archiveName) + CLOSED_ARCHIVE_MARKER).deleteIfExists(); + } + + private void uploadDeletedMarker(String archiveName) throws BlobStorageException { + writeAccessController.checkWritingAllowed(); + writeBlobContainerClient.getBlobClient(getDirectory(archiveName) + DELETED_ARCHIVE_MARKER).getBlockBlobClient().upload(BinaryData.fromBytes(new byte[0]), true); + } + + @Override public boolean renameTo(String from, String to) { + checkWriteOperation("renameTo"); try { String targetDirectory = getDirectory(to); getBlobs(from) @@ -176,6 +215,7 @@ public class AzureArchiveManager implements SegmentArchiveManager { @Override public void copyFile(String from, String to) throws IOException { + checkWriteOperation("copyFile"); String targetDirectory = getDirectory(to); getBlobs(from) .forEach(blobItem -> { @@ -201,6 +241,7 @@ public class AzureArchiveManager implements SegmentArchiveManager { @Override public void recoverEntries(String archiveName, LinkedHashMap<UUID, byte[]> entries) throws IOException { + checkWriteOperation("recoverEntries"); Pattern pattern = Pattern.compile(RemoteUtilities.SEGMENT_FILE_NAME_PATTERN); List<RecoveredEntry> entryList = new ArrayList<>(); @@ -240,7 +281,8 @@ public class AzureArchiveManager implements SegmentArchiveManager { private void delete(String archiveName, Set<UUID> recoveredEntries) throws IOException { getBlobs(archiveName) .forEach(blobItem -> { - if (!recoveredEntries.contains(RemoteUtilities.getSegmentUUID(getName(blobItem)))) { + String name = getName(blobItem); + if (RemoteUtilities.isSegmentName(name) && !recoveredEntries.contains(RemoteUtilities.getSegmentUUID(name))) { try { writeBlobContainerClient.getBlobClient(blobItem.getName()).delete(); } catch (BlobStorageException e) { @@ -257,6 +299,7 @@ public class AzureArchiveManager implements SegmentArchiveManager { */ @Override public void backup(@NotNull String archiveName, @NotNull String backupArchiveName, @NotNull Set<UUID> recoveredEntries) throws IOException { + checkWriteOperation("backup"); copyFile(archiveName, backupArchiveName); delete(archiveName, recoveredEntries); } diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzurePersistence.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzurePersistence.java index 295ca41b1e..84d484e90d 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzurePersistence.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzurePersistence.java @@ -73,9 +73,9 @@ public class AzurePersistence implements SegmentNodeStorePersistence { } @Override - public SegmentArchiveManager createArchiveManager(boolean mmap, boolean offHeapAccess, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) { + public SegmentArchiveManager createArchiveManager(boolean mmap, boolean offHeapAccess, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) { attachRemoteStoreMonitor(remoteStoreMonitor); - return new AzureArchiveManager(readBlobContainerClient, writeBlobContainerClient, rootPrefix, ioMonitor, fileStoreMonitor, writeAccessController); + return new AzureArchiveManager(readBlobContainerClient, writeBlobContainerClient, rootPrefix, ioMonitor, fileStoreMonitor, writeAccessController, readOnly); } @Override diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureSegmentArchiveWriter.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureSegmentArchiveWriter.java index b558143b6c..d96dce3171 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureSegmentArchiveWriter.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureSegmentArchiveWriter.java @@ -57,6 +57,7 @@ public class AzureSegmentArchiveWriter extends AbstractRemoteSegmentArchiveWrite this.archiveName = AzureUtilities.ensureNoTrailingSlash(archiveName); this.archivePathPrefix = AzureUtilities.asAzurePrefix(rootPrefix, archiveName); this.writeAccessController = writeAccessController; + this.created = AzureUtilities.archiveExists(blobContainerClient, archivePathPrefix); } @Override diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilities.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilities.java index 25f65a288c..c256392537 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilities.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilities.java @@ -69,6 +69,14 @@ public final class AzureUtilities { return blobContainerClient.listBlobs(listOptions, null).stream().collect(Collectors.toList()); } + public static boolean archiveExists(BlobContainerClient blobContainerClient, String archivePathPrefix) { + ListBlobsOptions listOptions = new ListBlobsOptions(); + listOptions.setPrefix(archivePathPrefix); + listOptions.setMaxResultsPerPage(1); + return blobContainerClient.listBlobs(listOptions, null).iterator().hasNext(); + } + + public static void readBufferFully(BlockBlobClient blob, Buffer buffer) throws IOException { try { blob.downloadStream(new ByteBufferOutputStream(buffer)); diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/AzureCompact.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/AzureCompact.java index 492e3dbf92..9a60655c88 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/AzureCompact.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/AzureCompact.java @@ -376,8 +376,8 @@ public class AzureCompact { SegmentNodeStorePersistence splitPersistence = new SplitPersistence(roPersistence, rwPersistence); - SegmentArchiveManager roArchiveManager = createArchiveManager(roPersistence); - SegmentArchiveManager rwArchiveManager = createArchiveManager(rwPersistence); + SegmentArchiveManager roArchiveManager = createArchiveManager(roPersistence, true); + SegmentArchiveManager rwArchiveManager = createArchiveManager(rwPersistence, false); System.out.printf("Compacting %s\n", path != null ? path : sourceBlobContainerClient.getBlobContainerUrl()); System.out.printf(" to %s\n", targetPath != null ? targetPath : destinationBlobContainerClient.getBlobContainerUrl()); diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentCopy.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentCopy.java index 8ceb766d08..15d8415bc6 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentCopy.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentCopy.java @@ -288,7 +288,7 @@ public class SegmentCopy { } SegmentArchiveManager sourceManager = srcPersistence.createArchiveManager(false, false, - new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), true); int maxArchives = maxSizeGb * 4; int count = 0; diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentStoreMigrator.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentStoreMigrator.java index cdd2ac1af7..e99b070ecb 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentStoreMigrator.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentStoreMigrator.java @@ -159,9 +159,9 @@ public class SegmentStoreMigrator implements Closeable { return; } SegmentArchiveManager sourceManager = source.createArchiveManager(false, false, new IOMonitorAdapter(), - new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), true); SegmentArchiveManager targetManager = target.createArchiveManager(false, false, new IOMonitorAdapter(), - new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); List<String> targetArchives = targetManager.listArchives(); if (appendMode && !targetArchives.isEmpty()) { diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/ToolUtils.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/ToolUtils.java index fddb0ff68a..48d982dd74 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/ToolUtils.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/tool/ToolUtils.java @@ -135,11 +135,11 @@ public class ToolUtils { return persistence; } - public static SegmentArchiveManager createArchiveManager(SegmentNodeStorePersistence persistence) { + public static SegmentArchiveManager createArchiveManager(SegmentNodeStorePersistence persistence, boolean readOnly) { SegmentArchiveManager archiveManager = null; try { archiveManager = persistence.createArchiveManager(false, false, new IOMonitorAdapter(), - new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); } catch (IOException e) { throw new IllegalArgumentException( "Could not access the Azure Storage. Please verify the path provided!"); diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8.java index ea10290000..fbf64c2231 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8.java @@ -55,6 +55,9 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { private static final Logger log = LoggerFactory.getLogger(AzureSegmentArchiveReaderV8.class); + private static final String DELETED_ARCHIVE_MARKER = "deleted"; + private static final String CLOSED_ARCHIVE_MARKER = "closed"; + protected final CloudBlobDirectory cloudBlobDirectory; protected final IOMonitor ioMonitor; @@ -62,11 +65,14 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { protected final FileStoreMonitor monitor; private WriteAccessController writeAccessController; - public AzureArchiveManagerV8(CloudBlobDirectory segmentstoreDirectory, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, WriteAccessController writeAccessController) { + private final boolean readOnly; + + public AzureArchiveManagerV8(CloudBlobDirectory segmentstoreDirectory, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, WriteAccessController writeAccessController, boolean readOnly) { this.cloudBlobDirectory = segmentstoreDirectory; this.ioMonitor = ioMonitor; this.monitor = fileStoreMonitor; this.writeAccessController = writeAccessController; + this.readOnly = readOnly; } @Override @@ -84,8 +90,10 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { Iterator<String> it = archiveNames.iterator(); while (it.hasNext()) { String archiveName = it.next(); - if (isArchiveEmpty(archiveName)) { - delete(archiveName); + if (deleteInProgress(archiveName)) { + if (!readOnly) { + delete(archiveName); + } it.remove(); } } @@ -96,19 +104,26 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { } /** - * Check if there's a valid 0000. segment in the archive + * Check if the archive is being deleted. + * * @param archiveName - * @return true if the archive is empty (no 0000.* segment) + * @return true if the "deleted" marker exists */ - private boolean isArchiveEmpty(String archiveName) throws IOException, URISyntaxException, StorageException { - return !getDirectory(archiveName).listBlobs("0000.").iterator().hasNext(); + private boolean deleteInProgress(String archiveName) throws IOException, URISyntaxException, StorageException { + return getDirectory(archiveName).getBlockBlobReference(DELETED_ARCHIVE_MARKER).exists(); + } + + private void checkWriteOperation(String operation) { + if (readOnly) { + throw new UnsupportedOperationException("Operation " + operation + " is not allowed in read-only mode"); + } } @Override public SegmentArchiveReader open(String archiveName) throws IOException { try { CloudBlobDirectory archiveDirectory = getDirectory(archiveName); - if (!archiveDirectory.getBlockBlobReference("closed").exists()) { + if (!archiveDirectory.getBlockBlobReference(CLOSED_ARCHIVE_MARKER).exists()) { return null; } return new AzureSegmentArchiveReaderV8(archiveDirectory, ioMonitor); @@ -125,30 +140,54 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { @Override public SegmentArchiveWriter create(String archiveName) throws IOException { + checkWriteOperation("create"); return new AzureSegmentArchiveWriterV8(getDirectory(archiveName), ioMonitor, monitor, writeAccessController); } @Override public boolean delete(String archiveName) { + checkWriteOperation("delete"); try { + uploadDeletedMarker(archiveName); getBlobs(archiveName) .forEach(cloudBlob -> { try { - writeAccessController.checkWritingAllowed(); - cloudBlob.delete(); + String blobName = getName(cloudBlob); + if (!blobName.equals(DELETED_ARCHIVE_MARKER) && !blobName.equals(CLOSED_ARCHIVE_MARKER)) { + writeAccessController.checkWritingAllowed(); + cloudBlob.delete(); + } } catch (StorageException e) { log.error("Can't delete segment {}", cloudBlob.getUri().getPath(), e); } }); + deleteClosedMarker(archiveName); + deleteDeletedMarker(archiveName); return true; - } catch (IOException e) { + } catch (IOException | URISyntaxException | StorageException e) { log.error("Can't delete archive {}", archiveName, e); return false; } } + private void deleteDeletedMarker(String archiveName) throws IOException, URISyntaxException, StorageException { + writeAccessController.checkWritingAllowed(); + getDirectory(archiveName).getBlockBlobReference(DELETED_ARCHIVE_MARKER).deleteIfExists(); + } + + private void deleteClosedMarker(String archiveName) throws IOException, URISyntaxException, StorageException { + writeAccessController.checkWritingAllowed(); + getDirectory(archiveName).getBlockBlobReference(CLOSED_ARCHIVE_MARKER).deleteIfExists(); + } + + private void uploadDeletedMarker(String archiveName) throws IOException, URISyntaxException, StorageException { + writeAccessController.checkWritingAllowed(); + getDirectory(archiveName).getBlockBlobReference(DELETED_ARCHIVE_MARKER).openOutputStream().close(); + } + @Override public boolean renameTo(String from, String to) { + checkWriteOperation("renameTo"); try { CloudBlobDirectory targetDirectory = getDirectory(to); getBlobs(from) @@ -169,6 +208,7 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { @Override public void copyFile(String from, String to) throws IOException { + checkWriteOperation("copyFile"); CloudBlobDirectory targetDirectory = getDirectory(to); getBlobs(from) .forEach(cloudBlob -> { @@ -192,6 +232,7 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { @Override public void recoverEntries(String archiveName, LinkedHashMap<UUID, byte[]> entries) throws IOException { + checkWriteOperation("recoverEntries"); Pattern pattern = Pattern.compile(RemoteUtilities.SEGMENT_FILE_NAME_PATTERN); List<RecoveredEntry> entryList = new ArrayList<>(); @@ -231,7 +272,8 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { private void delete(String archiveName, Set<UUID> recoveredEntries) throws IOException { getBlobs(archiveName) .forEach(cloudBlob -> { - if (!recoveredEntries.contains(RemoteUtilities.getSegmentUUID(getName(cloudBlob)))) { + String name = getName(cloudBlob); + if (RemoteUtilities.isSegmentName(name) && !recoveredEntries.contains(RemoteUtilities.getSegmentUUID(name))) { try { cloudBlob.delete(); } catch (StorageException e) { @@ -248,6 +290,7 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { */ @Override public void backup(@NotNull String archiveName, @NotNull String backupArchiveName, @NotNull Set<UUID> recoveredEntries) throws IOException { + checkWriteOperation("backup"); copyFile(archiveName, backupArchiveName); delete(archiveName, recoveredEntries); } diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzurePersistenceV8.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzurePersistenceV8.java index 0b056f768f..1ece9f4f78 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzurePersistenceV8.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzurePersistenceV8.java @@ -59,9 +59,9 @@ public class AzurePersistenceV8 implements SegmentNodeStorePersistence { } @Override - public SegmentArchiveManager createArchiveManager(boolean mmap, boolean offHeapAccess, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) { + public SegmentArchiveManager createArchiveManager(boolean mmap, boolean offHeapAccess, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) { attachRemoteStoreMonitor(remoteStoreMonitor); - return new AzureArchiveManagerV8(segmentstoreDirectory, ioMonitor, fileStoreMonitor, writeAccessController); + return new AzureArchiveManagerV8(segmentstoreDirectory, ioMonitor, fileStoreMonitor, writeAccessController, readOnly); } @Override diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureSegmentArchiveWriterV8.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureSegmentArchiveWriterV8.java index 89ae33763a..9a3716e4c8 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureSegmentArchiveWriterV8.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureSegmentArchiveWriterV8.java @@ -23,6 +23,7 @@ import static org.apache.jackrabbit.oak.segment.remote.RemoteUtilities.OFF_HEAP; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; +import java.util.NoSuchElementException; import java.util.concurrent.TimeUnit; import com.microsoft.azure.storage.blob.BlobRequestOptions; @@ -40,6 +41,8 @@ import org.apache.jackrabbit.oak.segment.remote.AbstractRemoteSegmentArchiveWrit import org.apache.jackrabbit.oak.segment.remote.RemoteSegmentArchiveEntry; import org.apache.jackrabbit.oak.segment.spi.monitor.FileStoreMonitor; import org.apache.jackrabbit.oak.segment.spi.monitor.IOMonitor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class AzureSegmentArchiveWriterV8 extends AbstractRemoteSegmentArchiveWriter { @@ -52,11 +55,23 @@ public class AzureSegmentArchiveWriterV8 extends AbstractRemoteSegmentArchiveWri private final BlobRequestOptions writeOptimisedBlobRequestOptions; - public AzureSegmentArchiveWriterV8(CloudBlobDirectory archiveDirectory, IOMonitor ioMonitor, FileStoreMonitor monitor, WriteAccessController writeAccessController) { + private static final Logger LOG = LoggerFactory.getLogger(AzureSegmentArchiveWriterV8.class); + + public AzureSegmentArchiveWriterV8(CloudBlobDirectory archiveDirectory, IOMonitor ioMonitor, FileStoreMonitor monitor, WriteAccessController writeAccessController) throws IOException { super(ioMonitor, monitor); this.archiveDirectory = archiveDirectory; this.writeAccessController = writeAccessController; this.writeOptimisedBlobRequestOptions = AzureRequestOptionsV8.optimiseForWriteOperations(archiveDirectory.getServiceClient().getDefaultRequestOptions()); + this.created = hasBlobs(); + } + + private boolean hasBlobs() throws IOException { + try { + return this.archiveDirectory.listBlobs().iterator().hasNext(); + } catch (StorageException | URISyntaxException | NoSuchElementException e) { + LOG.error("Error listing blobs", e); + throw new IOException(e); + } } @Override diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManagerIgnoreSamePrefixTest.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManagerIgnoreSamePrefixTest.java index 35cc9b0b8b..57cc366a14 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManagerIgnoreSamePrefixTest.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManagerIgnoreSamePrefixTest.java @@ -61,6 +61,7 @@ public class AzureArchiveManagerIgnoreSamePrefixTest { private static final String rootPrefix = "oak"; private static final String segmentName = "0004.44b4a246-50e0-470a-abe4-5a37a81c37c1"; + private boolean readOnly = false; @Before public void setup() throws BlobStorageException, InvalidKeyException, URISyntaxException, IOException { @@ -88,7 +89,7 @@ public class AzureArchiveManagerIgnoreSamePrefixTest { writeBlobContainerClient.getBlobClient(rootPrefix + "/" + bakArchiveName + "/" + segmentName) .getBlockBlobClient().upload(BinaryData.fromString("test-data-segment-content")); - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create(archiveName); List<UUID> uuids = new ArrayList<>(); @@ -116,7 +117,7 @@ public class AzureArchiveManagerIgnoreSamePrefixTest { writeBlobContainerClient.getBlobClient(rootPrefix + "/" + bakArchiveName + "/" + segmentName) .getBlockBlobClient().upload(BinaryData.fromString("test-data-segment-content")); - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); assertFalse(manager.exists(archiveName)); } @@ -130,7 +131,7 @@ public class AzureArchiveManagerIgnoreSamePrefixTest { writeBlobContainerClient.getBlobClient(rootPrefix + "/" + bakArchiveName + "/" + segmentName) .getBlockBlobClient().upload(BinaryData.fromString("test-data-segment-content")); - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); manager.renameTo(archiveName, targetArchiveName); boolean blobExists = readBlobContainerClient.listBlobs(new ListBlobsOptions().setPrefix(rootPrefix + "/" + targetArchiveName), null) @@ -148,7 +149,7 @@ public class AzureArchiveManagerIgnoreSamePrefixTest { writeBlobContainerClient.getBlobClient(rootPrefix + "/" + bakArchiveName + "/" + segmentName) .getBlockBlobClient().upload(BinaryData.fromString("test-data-segment-content")); - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); manager.copyFile(archiveName, targetArchiveName); boolean blobExistsInTargetArchive = readBlobContainerClient.listBlobs(new ListBlobsOptions().setPrefix(rootPrefix + "/" + targetArchiveName), null) @@ -164,7 +165,7 @@ public class AzureArchiveManagerIgnoreSamePrefixTest { writeBlobContainerClient.getBlobClient(rootPrefix + "/" + bakArchiveName + "/" + segmentName) .getBlockBlobClient().upload(BinaryData.fromString("test-data-segment-content")); - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); manager.delete(archiveName); boolean blobExists = readBlobContainerClient.listBlobs(new ListBlobsOptions().setPrefix(rootPrefix + "/" + bakArchiveName + "/"), null) @@ -183,7 +184,7 @@ public class AzureArchiveManagerIgnoreSamePrefixTest { writeBlobContainerClient.getBlobClient(rootPrefix + "/" + extraBackupArchiveTestName + "/" + segmentName) .getBlockBlobClient().getBlobOutputStream().close(); - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create(archiveTestName); List<UUID> uuids = new ArrayList<>(); diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManagerTest.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManagerTest.java index f6b19dffd7..6a3e180a02 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManagerTest.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureArchiveManagerTest.java @@ -16,6 +16,7 @@ */ package org.apache.jackrabbit.oak.segment.azure; +import com.azure.core.util.BinaryData; import com.azure.storage.blob.BlobContainerClient; import com.azure.storage.blob.models.BlobItem; import com.azure.storage.blob.models.BlobStorageException; @@ -92,6 +93,7 @@ public class AzureArchiveManagerTest { private BlobContainerClient noRetryBlobContainerClient; private AzurePersistence azurePersistence; + private boolean readOnly = false; @Before public void setup() throws BlobStorageException, InvalidKeyException, URISyntaxException { @@ -112,7 +114,7 @@ public class AzureArchiveManagerTest { @Test public void testRecovery() throws BlobStorageException, IOException { - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); List<UUID> uuids = new ArrayList<>(); @@ -134,7 +136,7 @@ public class AzureArchiveManagerTest { @Test public void testBackupWithRecoveredEntries() throws BlobStorageException, IOException { - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); List<UUID> uuids = new ArrayList<>(); @@ -285,7 +287,7 @@ public class AzureArchiveManagerTest { @Test public void testExists() throws IOException { - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); List<UUID> uuids = new ArrayList<>(); @@ -304,7 +306,7 @@ public class AzureArchiveManagerTest { @Test public void testArchiveExistsAfterFlush() throws IOException { - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); Assert.assertFalse(manager.exists("data00000a.tar")); @@ -316,7 +318,7 @@ public class AzureArchiveManagerTest { @Test(expected = FileNotFoundException.class) public void testSegmentDeletedAfterCreatingReader() throws IOException, BlobStorageException { - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); Assert.assertFalse(manager.exists("data00000a.tar")); @@ -349,7 +351,7 @@ public class AzureArchiveManagerTest { AzurePersistence azurePersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak"); FileStore fileStore = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(azurePersistence).build(); - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); //Assert.assertFalse(manager.exists("data00000a.tar")); @@ -526,7 +528,7 @@ public class AzureArchiveManagerTest { WriteAccessController writeAccessController = new WriteAccessController(); AzureRepositoryLock azureRepositoryLock = new AzureRepositoryLock(blobMocked, blobLeaseMocked, () -> { }, writeAccessController); - AzureArchiveManager azureArchiveManager = new AzureArchiveManager(oakDirectory, writeOakDirectory, "", new IOMonitorAdapter(), new FileStoreMonitorAdapter(), writeAccessController); + AzureArchiveManager azureArchiveManager = new AzureArchiveManager(oakDirectory, writeOakDirectory, "", new IOMonitorAdapter(), new FileStoreMonitorAdapter(), writeAccessController, readOnly); Mockito @@ -535,7 +537,7 @@ public class AzureArchiveManagerTest { Mockito .doReturn(azureArchiveManager) - .when(mockedRwPersistence).createArchiveManager(Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.any(), Mockito.any(), Mockito.any()); + .when(mockedRwPersistence).createArchiveManager(Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.eq(readOnly)); Mockito .doReturn(new AzureJournalFile(oakDirectory, writeOakDirectory, "journal.log", writeAccessController)) .when(mockedRwPersistence).getJournalFile(); @@ -607,6 +609,108 @@ public class AzureArchiveManagerTest { }; } + @Test + public void testListArchivesDoesNotReturnDeletedArchive() throws IOException, BlobStorageException { + // The archive manager should not return the archive which has "deleted" marker + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); + + // Create an archive + SegmentArchiveWriter writer = manager.create("data00000a.tar"); + UUID u = UUID.randomUUID(); + writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.flush(); + writer.close(); + + // Verify the archive is listed + List<String> archives = manager.listArchives(); + assertTrue("Archive should be listed before deletion", archives.contains("data00000a.tar")); + + // Upload deleted marker for the archive + writeBlobContainerClient.getBlobClient("oak/data00000a.tar/deleted").getBlockBlobClient().upload(BinaryData.fromBytes(new byte[0])); + + // Verify the archive is no longer listed after adding deleted marker + archives = manager.listArchives(); + assertFalse("Archive should not be listed after deleted marker is uploaded", archives.contains("data00000a.tar")); + } + + @Test + public void testListArchivesInReadOnlyModeWithPartiallyDeletedArchive() throws IOException, BlobStorageException { + // Create a read-write manager first to create an archive + SegmentArchiveManager rwManager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); + + // Create an archive with some segments + SegmentArchiveWriter writer = rwManager.create("data00000b.tar"); + UUID u1 = UUID.randomUUID(); + UUID u2 = UUID.randomUUID(); + writer.writeSegment(u1.getMostSignificantBits(), u1.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.writeSegment(u2.getMostSignificantBits(), u2.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.flush(); + writer.close(); + + // Verify the archive is initially listed + List<String> archives = rwManager.listArchives(); + assertTrue("Archive should be listed initially", archives.contains("data00000b.tar")); + + // Add deleted marker - simulates partially deleted archive + writeBlobContainerClient.getBlobClient("oak/data00000b.tar/deleted").getBlockBlobClient().upload(BinaryData.fromBytes(new byte[0])); + + ListBlobsOptions listOptions = new ListBlobsOptions(); + listOptions.setPrefix("oak/data00000b.tar/"); + assertTrue("Archive directory should still contain blobs", + readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext()); + + // Create a read-only manager + SegmentArchiveManager roManager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), true); + + archives = roManager.listArchives(); + assertFalse("Partially deleted archive should not be listed in read-only mode", archives.contains("data00000b.tar")); + + assertTrue("Archive directory should still contain blobs after read-only listArchives", + readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext()); + + assertTrue("Deleted marker should still exist", + readBlobContainerClient.getBlobClient("oak/data00000b.tar/deleted").exists()); + } + + @Test + public void testListArchivesInReadWriteModeWithPartiallyDeletedArchive() throws IOException, BlobStorageException { + // Create a read-write manager to create an archive + SegmentArchiveManager rwManager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); + + // Create an archive with some segments + SegmentArchiveWriter writer = rwManager.create("data00000c.tar"); + UUID u1 = UUID.randomUUID(); + UUID u2 = UUID.randomUUID(); + writer.writeSegment(u1.getMostSignificantBits(), u1.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.writeSegment(u2.getMostSignificantBits(), u2.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.flush(); + writer.close(); + + // Verify the archive is initially listed + List<String> archives = rwManager.listArchives(); + assertTrue("Archive should be listed initially", archives.contains("data00000c.tar")); + + // Add deleted marker - simulates partially deleted archive + writeBlobContainerClient.getBlobClient("oak/data00000c.tar/deleted").getBlockBlobClient().upload(BinaryData.fromBytes(new byte[0])); + + ListBlobsOptions listOptions = new ListBlobsOptions(); + listOptions.setPrefix("oak/data00000c.tar/"); + assertTrue("Archive directory should still contain blobs before cleanup", + readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext()); + + assertTrue("Deleted marker should exist before cleanup", + readBlobContainerClient.getBlobClient("oak/data00000c.tar/deleted").exists()); + + archives = rwManager.listArchives(); + assertFalse("Partially deleted archive should not be listed in read-write mode", archives.contains("data00000c.tar")); + + assertFalse("Archive directory should be empty after read-write listArchives cleanup", + readBlobContainerClient.listBlobs(listOptions, null).iterator().hasNext()); + + assertFalse("Deleted marker should be removed after cleanup", + readBlobContainerClient.getBlobClient("oak/data00000c.tar/deleted").exists()); + } + private static void assertDoesNotThrow(Executable executable) { try { executable.execute(); diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureReadSegmentTest.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureReadSegmentTest.java index cbc3d2cf7c..85a07aeb3b 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureReadSegmentTest.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureReadSegmentTest.java @@ -94,8 +94,8 @@ public class AzureReadSegmentTest { @Override public SegmentArchiveManager createArchiveManager(boolean mmap, boolean offHeapAccess, IOMonitor ioMonitor, - FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) { - return new AzureArchiveManager(readBlobContainerClient, writeBlobContainerClient, rootPrefix, ioMonitor, fileStoreMonitor, writeAccessController) { + FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) { + return new AzureArchiveManager(readBlobContainerClient, writeBlobContainerClient, rootPrefix, ioMonitor, fileStoreMonitor, writeAccessController, readOnly) { @Override public SegmentArchiveReader open(String archiveName) throws IOException { return new AzureSegmentArchiveReader(readBlobContainerClient, rootPrefix, archiveName, ioMonitor) { diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureSegmentArchiveWriterTest.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureSegmentArchiveWriterTest.java index b2a12f039b..345e3f2aa5 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureSegmentArchiveWriterTest.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureSegmentArchiveWriterTest.java @@ -198,7 +198,7 @@ public class AzureSegmentArchiveWriterTest { writeAccessController.enableWriting(); AzurePersistence azurePersistence = new AzurePersistence(readBlobContainerClient, writeBlobContainerClient, noRetryBlobContainerClient, "oak");/**/ azurePersistence.setWriteAccessController(writeAccessController); - SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); SegmentArchiveWriter writer = manager.create("data00000a.tar"); return writer; } @@ -236,11 +236,40 @@ public class AzureSegmentArchiveWriterTest { } private void createContainerMock() { + // Mock container creation (PUT) mockServerClient .when(request() .withMethod("PUT") - .withPath(BASE_PATH)) + .withPath(BASE_PATH) + .withQueryStringParameter("restype", "container")) .respond(response().withStatusCode(201).withBody("Container created successfully")); + + // Mock container existence check (HEAD) + mockServerClient + .when(request() + .withMethod("HEAD") + .withPath(BASE_PATH) + .withQueryStringParameter("restype", "container")) + .respond(response().withStatusCode(200)); + + // Mock listBlobs operation for archiveExists() call - return empty list + mockServerClient + .when(request() + .withMethod("GET") + .withPath(BASE_PATH) + .withQueryStringParameter("restype", "container") + .withQueryStringParameter("comp", "list") + .withQueryStringParameter("prefix", "oak/data00000a.tar/") + .withQueryStringParameter("maxresults", "1"), Times.once()) + .respond(response() + .withStatusCode(200) + .withHeader("Content-Type", "application/xml") + .withBody("<?xml version=\"1.0\" encoding=\"utf-8\"?>" + + "<EnumerationResults ServiceEndpoint=\"http://127.0.0.1:10000/devstoreaccount1\" ContainerName=\"oak-test\">" + + "<Prefix>oak/data00000a.tar/</Prefix>" + + "<MaxResults>1</MaxResults>" + + "<Blobs></Blobs>" + + "</EnumerationResults>")); } public BlobContainerClient getCloudStorageAccount(String containerName, RequestRetryOptions retryOptions) { diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureTarFileTest.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureTarFileTest.java index 9c4aad9aeb..5eaab46302 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureTarFileTest.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureTarFileTest.java @@ -53,7 +53,7 @@ public class AzureTarFileTest extends TarFileTest { WriteAccessController writeAccessController = new WriteAccessController(); writeAccessController.enableWriting(); azurePersistence.setWriteAccessController(writeAccessController); - archiveManager = azurePersistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + archiveManager = azurePersistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); } catch (BlobStorageException e) { throw new IOException(e); } diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureTarWriterTest.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureTarWriterTest.java index a3ee015f87..f1f78602ec 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureTarWriterTest.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureTarWriterTest.java @@ -47,7 +47,7 @@ public class AzureTarWriterTest extends TarWriterTest { protected SegmentArchiveManager getSegmentArchiveManager() throws Exception { WriteAccessController writeAccessController = new WriteAccessController(); writeAccessController.enableWriting(); - AzureArchiveManager azureArchiveManager = new AzureArchiveManager(readBlobContainerClient, writeBlobContainerClient, "oak", new IOMonitorAdapter(), monitor, writeAccessController); + AzureArchiveManager azureArchiveManager = new AzureArchiveManager(readBlobContainerClient, writeBlobContainerClient, "oak", new IOMonitorAdapter(), monitor, writeAccessController, false); return azureArchiveManager; } @@ -56,7 +56,7 @@ public class AzureTarWriterTest extends TarWriterTest { protected SegmentArchiveManager getFailingSegmentArchiveManager() throws Exception { final WriteAccessController writeAccessController = new WriteAccessController(); writeAccessController.enableWriting(); - return new AzureArchiveManager(readBlobContainerClient, writeBlobContainerClient, "oak", new IOMonitorAdapter(), monitor, writeAccessController) { + return new AzureArchiveManager(readBlobContainerClient, writeBlobContainerClient, "oak", new IOMonitorAdapter(), monitor, writeAccessController, false) { @Override public SegmentArchiveWriter create(String archiveName) throws IOException { return new AzureSegmentArchiveWriter(writeBlobContainerClient, "oak", archiveName, ioMonitor, monitor, writeAccessController) { diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilitiesTest.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilitiesTest.java new file mode 100644 index 0000000000..5a66b0cc7e --- /dev/null +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilitiesTest.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.segment.azure; + +import com.azure.core.util.BinaryData; +import com.azure.storage.blob.BlobContainerClient; +import com.azure.storage.blob.models.BlobStorageException; +import org.apache.jackrabbit.oak.segment.remote.RemoteUtilities; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class AzureUtilitiesTest { + + @ClassRule + public static AzuriteDockerRule azurite = new AzuriteDockerRule(); + + private BlobContainerClient blobContainerClient; + private String archivePrefix = "oak/data00000a.tar/"; + private String archiveName = "data00000a.tar"; + + @Before + public void setup() throws BlobStorageException { + blobContainerClient = azurite.getReadBlobContainerClient("oak-test"); + } + + @Test + public void testArchiveExistsWhenArchiveHasBlobs() { + blobContainerClient.getBlobClient(archivePrefix + RemoteUtilities.getSegmentFileName(0, 0, 0)).getBlockBlobClient() + .upload(BinaryData.fromString("")); + + assertTrue("Archive should exist when it contains segment blob", + AzureUtilities.archiveExists(blobContainerClient, archivePrefix)); + } + + @Test + public void testArchiveExistsWhenArchiveIsEmpty() { + + assertFalse("Archive should not exist when no blobs are present", + AzureUtilities.archiveExists(blobContainerClient, archivePrefix)); + } + + @Test + public void testArchiveExistsWithArchiveMetadata() { + blobContainerClient.getBlobClient(archivePrefix + archiveName + ".brf").getBlockBlobClient() + .upload(BinaryData.fromString("")); + blobContainerClient.getBlobClient(archivePrefix + archiveName + ".gph").getBlockBlobClient() + .upload(BinaryData.fromString("")); + + assertTrue("Archive should exist when it contains metadata", + AzureUtilities.archiveExists(blobContainerClient, archivePrefix)); + } + + @Test + public void testArchiveExistsWithArchiveClosedMarker() { + blobContainerClient.getBlobClient(archivePrefix + "closed").getBlockBlobClient() + .upload(BinaryData.fromString("")); + + assertTrue("Archive should exist when it contains closed marker", + AzureUtilities.archiveExists(blobContainerClient, archivePrefix)); + } +} diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentCopyTestBase.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentCopyTestBase.java index c7f7f07606..45abdc9750 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentCopyTestBase.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/tool/SegmentCopyTestBase.java @@ -50,7 +50,6 @@ import org.apache.jackrabbit.oak.segment.SegmentCache; import org.apache.jackrabbit.oak.segment.SegmentNodeStore; import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; import org.apache.jackrabbit.oak.segment.azure.v8.AzurePersistenceV8; -import org.apache.jackrabbit.oak.segment.azure.tool.SegmentCopy; import org.apache.jackrabbit.oak.segment.azure.tool.ToolUtils.SegmentStoreType; import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions.CompactorType; import org.apache.jackrabbit.oak.segment.file.FileStore; @@ -113,9 +112,9 @@ public abstract class SegmentCopyTestBase { RemoteStoreMonitor remoteStoreMonitor = new RemoteStoreMonitorAdapter(); FileStoreMonitor fileStoreMonitor = new FileStoreMonitorAdapter(); SegmentArchiveManager srcArchiveManager = srcPersistence.createArchiveManager(false, false, ioMonitor, - fileStoreMonitor, remoteStoreMonitor); + fileStoreMonitor, remoteStoreMonitor, true); SegmentArchiveManager destArchiveManager = destPersistence.createArchiveManager(false, false, ioMonitor, - fileStoreMonitor, remoteStoreMonitor); + fileStoreMonitor, remoteStoreMonitor, false); checkArchives(srcArchiveManager, destArchiveManager); checkJournal(srcPersistence, destPersistence); diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8Test.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8Test.java index 5305018ba3..e89851e036 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8Test.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8Test.java @@ -92,6 +92,7 @@ public class AzureArchiveManagerV8Test { private CloudBlobContainer container; private AzurePersistenceV8 azurePersistenceV8; + private boolean readOnly = false; @Before public void setup() throws StorageException, InvalidKeyException, URISyntaxException { @@ -110,7 +111,7 @@ public class AzureArchiveManagerV8Test { @Test public void testRecovery() throws StorageException, URISyntaxException, IOException { - SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); List<UUID> uuids = new ArrayList<>(); @@ -132,7 +133,7 @@ public class AzureArchiveManagerV8Test { @Test public void testBackupWithRecoveredEntries() throws StorageException, URISyntaxException, IOException { - SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); List<UUID> uuids = new ArrayList<>(); @@ -271,7 +272,7 @@ public class AzureArchiveManagerV8Test { @Test public void testExists() throws IOException, URISyntaxException { - SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); List<UUID> uuids = new ArrayList<>(); @@ -290,7 +291,7 @@ public class AzureArchiveManagerV8Test { @Test public void testArchiveExistsAfterFlush() throws URISyntaxException, IOException { - SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); Assert.assertFalse(manager.exists("data00000a.tar")); @@ -302,7 +303,7 @@ public class AzureArchiveManagerV8Test { @Test(expected = FileNotFoundException.class) public void testSegmentDeletedAfterCreatingReader() throws IOException, URISyntaxException, StorageException { - SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); Assert.assertFalse(manager.exists("data00000a.tar")); @@ -333,7 +334,7 @@ public class AzureArchiveManagerV8Test { AzurePersistenceV8 azurePersistenceV8 = new AzurePersistenceV8(container.getDirectoryReference("oak")); FileStore fileStore = FileStoreBuilder.fileStoreBuilder(new File("target")).withCustomPersistence(azurePersistenceV8).build(); - SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); SegmentArchiveWriter writer = manager.create("data00000a.tar"); //Assert.assertFalse(manager.exists("data00000a.tar")); @@ -492,7 +493,7 @@ public class AzureArchiveManagerV8Test { AzurePersistenceV8 mockedRwPersistence = Mockito.spy(rwPersistence); WriteAccessController writeAccessController = new WriteAccessController(); AzureRepositoryLockV8 azureRepositoryLockV8 = new AzureRepositoryLockV8(blobMocked, () -> {}, writeAccessController); - AzureArchiveManagerV8 azureArchiveManagerV8 = new AzureArchiveManagerV8(oakDirectory, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), writeAccessController); + AzureArchiveManagerV8 azureArchiveManagerV8 = new AzureArchiveManagerV8(oakDirectory, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), writeAccessController, readOnly); Mockito @@ -501,7 +502,7 @@ public class AzureArchiveManagerV8Test { Mockito .doReturn(azureArchiveManagerV8) - .when(mockedRwPersistence).createArchiveManager(Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.any(), Mockito.any(), Mockito.any()); + .when(mockedRwPersistence).createArchiveManager(Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.eq(readOnly)); Mockito .doReturn(new AzureJournalFileV8(oakDirectory, "journal.log", writeAccessController)) .when(mockedRwPersistence).getJournalFile(); @@ -547,6 +548,106 @@ public class AzureArchiveManagerV8Test { rwFileStore2.close(); } + + @Test + public void testListArchivesDoesNotReturnDeletedArchive() throws IOException, URISyntaxException, StorageException { + // The archive manager should not return the archive which has "deleted" marker + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), readOnly); + + // Create an archive + SegmentArchiveWriter writer = manager.create("data00000a.tar"); + UUID u = UUID.randomUUID(); + writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.flush(); + writer.close(); + + // Verify the archive is listed + List<String> archives = manager.listArchives(); + assertTrue("Archive should be listed before deletion", archives.contains("data00000a.tar")); + + // Upload deleted marker for the archive + CloudBlobDirectory archiveDirectory = container.getDirectoryReference("oak/data00000a.tar"); + archiveDirectory.getBlockBlobReference("deleted").openOutputStream().close(); + + // Verify the archive is no longer listed after adding deleted marker + archives = manager.listArchives(); + assertFalse("Archive should not be listed after deleted marker is uploaded", archives.contains("data00000a.tar")); + } + + @Test + public void testListArchivesInReadOnlyModeWithPartiallyDeletedArchive() throws IOException, URISyntaxException, StorageException { + // Create a read-write manager first to create an archive + SegmentArchiveManager rwManager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); + + SegmentArchiveWriter writer = rwManager.create("data00000b.tar"); + UUID u1 = UUID.randomUUID(); + UUID u2 = UUID.randomUUID(); + writer.writeSegment(u1.getMostSignificantBits(), u1.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.writeSegment(u2.getMostSignificantBits(), u2.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.flush(); + writer.close(); + + // Verify the archive is initially listed + List<String> archives = rwManager.listArchives(); + assertTrue("Archive should be listed initially", archives.contains("data00000b.tar")); + + // Add deleted marker - simulates partially deleted archive + CloudBlobDirectory archiveDirectory = container.getDirectoryReference("oak/data00000b.tar"); + archiveDirectory.getBlockBlobReference("deleted").openOutputStream().close(); + + assertTrue("Archive directory should still contain blobs", + archiveDirectory.listBlobs().iterator().hasNext()); + + // Create a read-only manager + SegmentArchiveManager roManager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), true); + + archives = roManager.listArchives(); + assertFalse("Partially deleted archive should not be listed in read-only mode", archives.contains("data00000b.tar")); + + assertTrue("Archive directory should still contain blobs after read-only listArchives", + archiveDirectory.listBlobs().iterator().hasNext()); + + assertTrue("Deleted marker should still exist", + archiveDirectory.getBlockBlobReference("deleted").exists()); + } + + @Test + public void testListArchivesInReadWriteModeWithPartiallyDeletedArchive() throws IOException, URISyntaxException, StorageException { + // Create a read-write manager to create an archive + SegmentArchiveManager rwManager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); + + // Create an archive with some segments + SegmentArchiveWriter writer = rwManager.create("data00000c.tar"); + UUID u1 = UUID.randomUUID(); + UUID u2 = UUID.randomUUID(); + writer.writeSegment(u1.getMostSignificantBits(), u1.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.writeSegment(u2.getMostSignificantBits(), u2.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.flush(); + writer.close(); + + // Verify the archive is initially listed + List<String> archives = rwManager.listArchives(); + assertTrue("Archive should be listed initially", archives.contains("data00000c.tar")); + + // Add deleted marker - simulates partially deleted archive + CloudBlobDirectory archiveDirectory = container.getDirectoryReference("oak/data00000c.tar"); + archiveDirectory.getBlockBlobReference("deleted").openOutputStream().close(); + + assertTrue("Archive directory should still contain blobs before cleanup", + archiveDirectory.listBlobs().iterator().hasNext()); + + assertTrue("Deleted marker should exist before cleanup", + archiveDirectory.getBlockBlobReference("deleted").exists()); + + archives = rwManager.listArchives(); + assertFalse("Partially deleted archive should not be listed in read-write mode", archives.contains("data00000c.tar")); + + assertFalse("Archive directory should be empty after read-write listArchives cleanup", + archiveDirectory.listBlobs().iterator().hasNext()); + + assertFalse("Deleted marker should be removed after cleanup", + archiveDirectory.getBlockBlobReference("deleted").exists()); + } private PersistentCache createPersistenceCache() { return new AbstractPersistentCache() { diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureReadSegmentV8Test.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureReadSegmentV8Test.java index ff3a2d422f..b80bd9b980 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureReadSegmentV8Test.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureReadSegmentV8Test.java @@ -93,8 +93,8 @@ public class AzureReadSegmentV8Test { @Override public SegmentArchiveManager createArchiveManager(boolean mmap, boolean offHeapAccess, IOMonitor ioMonitor, - FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) { - return new AzureArchiveManagerV8(segmentstoreDirectory, ioMonitor, fileStoreMonitor, writeAccessController) { + FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) { + return new AzureArchiveManagerV8(segmentstoreDirectory, ioMonitor, fileStoreMonitor, writeAccessController, readOnly) { @Override public SegmentArchiveReader open(String archiveName) throws IOException { CloudBlobDirectory archiveDirectory = getDirectory(archiveName); diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureSegmentArchiveWriterV8Test.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureSegmentArchiveWriterV8Test.java index e659701cdb..bc43f0fff7 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureSegmentArchiveWriterV8Test.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureSegmentArchiveWriterV8Test.java @@ -182,11 +182,14 @@ public class AzureSegmentArchiveWriterV8Test { @NotNull private SegmentArchiveWriter createSegmentArchiveWriter() throws URISyntaxException, IOException { + // Mock the list blobs operation that's called during AzureSegmentArchiveWriterV8 initialization + expectListBlobsRequest(); + WriteAccessController writeAccessController = new WriteAccessController(); writeAccessController.enableWriting(); AzurePersistenceV8 azurePersistenceV8 = new AzurePersistenceV8(container.getDirectoryReference("oak"));/**/ azurePersistenceV8.setWriteAccessController(writeAccessController); - SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); SegmentArchiveWriter writer = manager.create("data00000a.tar"); return writer; } @@ -223,6 +226,23 @@ public class AzureSegmentArchiveWriterV8Test { .withBody(new BinaryBody(new byte[10])); } + private void expectListBlobsRequest() { + mockServerClient + .when(request() + .withMethod("GET") + .withPath(BASE_PATH) + .withQueryStringParameter("comp", "list") + .withQueryStringParameter("prefix", "oak/data00000a.tar/"), Times.once()) + .respond(response() + .withStatusCode(200) + .withHeader("Content-Type", "application/xml") + .withBody("<?xml version=\"1.0\" encoding=\"utf-8\"?>" + + "<EnumerationResults ServiceEndpoint=\"http://127.0.0.1:10000/devstoreaccount1\" ContainerName=\"oak-test\">" + + "<Prefix></Prefix>" + + "<Blobs></Blobs>" + + "</EnumerationResults>")); + } + @NotNull private CloudBlobContainer createCloudBlobContainer() throws URISyntaxException, StorageException { URI uri = new URIBuilder() diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureTarFileV8Test.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureTarFileV8Test.java index 55d0d270a6..b623ed434f 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureTarFileV8Test.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureTarFileV8Test.java @@ -50,7 +50,7 @@ public class AzureTarFileV8Test extends TarFileTest { WriteAccessController writeAccessController = new WriteAccessController(); writeAccessController.enableWriting(); azurePersistenceV8.setWriteAccessController(writeAccessController); - archiveManager = azurePersistenceV8.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + archiveManager = azurePersistenceV8.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); } catch (StorageException | InvalidKeyException | URISyntaxException e) { throw new IOException(e); } diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureTarWriterV8Test.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureTarWriterV8Test.java index 18421c74e7..2c2c0c7865 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureTarWriterV8Test.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureTarWriterV8Test.java @@ -47,7 +47,7 @@ public class AzureTarWriterV8Test extends TarWriterTest { protected SegmentArchiveManager getSegmentArchiveManager() throws Exception { WriteAccessController writeAccessController = new WriteAccessController(); writeAccessController.enableWriting(); - AzureArchiveManagerV8 azureArchiveManagerV8 = new AzureArchiveManagerV8(container.getDirectoryReference("oak"), new IOMonitorAdapter(), monitor, writeAccessController); + AzureArchiveManagerV8 azureArchiveManagerV8 = new AzureArchiveManagerV8(container.getDirectoryReference("oak"), new IOMonitorAdapter(), monitor, writeAccessController, false); return azureArchiveManagerV8; } @@ -56,7 +56,7 @@ public class AzureTarWriterV8Test extends TarWriterTest { protected SegmentArchiveManager getFailingSegmentArchiveManager() throws Exception { final WriteAccessController writeAccessController = new WriteAccessController(); writeAccessController.enableWriting(); - return new AzureArchiveManagerV8(container.getDirectoryReference("oak"), new IOMonitorAdapter(), monitor, writeAccessController) { + return new AzureArchiveManagerV8(container.getDirectoryReference("oak"), new IOMonitorAdapter(), monitor, writeAccessController, false) { @Override public SegmentArchiveWriter create(String archiveName) throws IOException { return new AzureSegmentArchiveWriterV8(getDirectory(archiveName), ioMonitor, monitor, writeAccessController) { diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceTest.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceTest.java index 49c0ac6ae8..23eee8ce8d 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceTest.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceTest.java @@ -138,7 +138,7 @@ public class SplitPersistenceTest { splitFileStore.close(); splitFileStore = null; - SegmentArchiveManager manager = splitPersistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = splitPersistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); for (String archive : manager.listArchives()) { SegmentArchiveReader reader = manager.open(archive); BinaryReferencesIndexLoader.parseBinaryReferencesIndex(reader.getBinaryReferences()); diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/v8/SplitPersistenceV8Test.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/v8/SplitPersistenceV8Test.java index a5af0abb47..e87d55a04a 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/v8/SplitPersistenceV8Test.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/v8/SplitPersistenceV8Test.java @@ -134,7 +134,7 @@ public class SplitPersistenceV8Test { splitFileStore.close(); splitFileStore = null; - SegmentArchiveManager manager = splitPersistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = splitPersistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), false); for (String archive : manager.listArchives()) { SegmentArchiveReader reader = manager.open(archive); BinaryReferencesIndexLoader.parseBinaryReferencesIndex(reader.getBinaryReferences()); diff --git a/oak-segment-remote/src/main/java/org/apache/jackrabbit/oak/segment/remote/RemoteUtilities.java b/oak-segment-remote/src/main/java/org/apache/jackrabbit/oak/segment/remote/RemoteUtilities.java index 9de3ccb6e7..91fa3b173d 100644 --- a/oak-segment-remote/src/main/java/org/apache/jackrabbit/oak/segment/remote/RemoteUtilities.java +++ b/oak-segment-remote/src/main/java/org/apache/jackrabbit/oak/segment/remote/RemoteUtilities.java @@ -53,6 +53,10 @@ public final class RemoteUtilities { return UUID.fromString(m.group(2)); } + public static boolean isSegmentName(String name) { + return null != name && PATTERN.matcher(name).matches(); + } + private static class ArchiveIndexComparator implements Comparator<String> { final static Pattern indexPattern = Pattern.compile("[0-9]+"); diff --git a/oak-segment-remote/src/test/java/org/apache/jackrabbit/oak/segment/remote/RemoteUtilitiesTest.java b/oak-segment-remote/src/test/java/org/apache/jackrabbit/oak/segment/remote/RemoteUtilitiesTest.java index be445659d6..a7b47e13e2 100644 --- a/oak-segment-remote/src/test/java/org/apache/jackrabbit/oak/segment/remote/RemoteUtilitiesTest.java +++ b/oak-segment-remote/src/test/java/org/apache/jackrabbit/oak/segment/remote/RemoteUtilitiesTest.java @@ -26,6 +26,8 @@ import java.util.UUID; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; public class RemoteUtilitiesTest { @Test @@ -66,4 +68,33 @@ public class RemoteUtilitiesTest { public void testSortArchivesLargeIndices() { expectArchiveSortOrder(Arrays.asList("data00003a.tar", "data20000a.tar", "data100000a.tar")); } + + @Test + public void testIsSegmentName_ValidName() { + UUID uuid = UUID.randomUUID(); + String validName = RemoteUtilities.getSegmentFileName(0, uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); + assertTrue(RemoteUtilities.isSegmentName(validName)); + + String validMaxName = RemoteUtilities.getSegmentFileName( + RemoteUtilities.MAX_ENTRY_COUNT - 1, + uuid.getMostSignificantBits(), + uuid.getLeastSignificantBits() + ); + assertTrue(RemoteUtilities.isSegmentName(validMaxName)); + } + + @Test + public void testIsSegmentName_InvalidNames() { + // closed marker + assertFalse(RemoteUtilities.isSegmentName("closed")); + + // metadata files + assertFalse(RemoteUtilities.isSegmentName("data00000a.tar.brf")); + assertFalse(RemoteUtilities.isSegmentName("data00000a.tar.gph")); + assertFalse(RemoteUtilities.isSegmentName("data00000a.tar.idx")); + + // empty value + assertFalse(RemoteUtilities.isSegmentName("")); + assertFalse(RemoteUtilities.isSegmentName(null)); + } } diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStoreProcBackend.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStoreProcBackend.java index 46901c5704..f257c429ef 100644 --- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStoreProcBackend.java +++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStoreProcBackend.java @@ -56,7 +56,7 @@ class FileStoreProcBackend implements Backend { FileStoreProcBackend(AbstractFileStore fileStore, SegmentNodeStorePersistence persistence) throws IOException { this.fileStore = fileStore; this.persistence = persistence; - this.archiveManager = persistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + this.archiveManager = persistence.createArchiveManager(true, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), true); } @Override diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/TarFiles.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/TarFiles.java index e32a5519e2..1a04232753 100644 --- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/TarFiles.java +++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/TarFiles.java @@ -256,7 +256,7 @@ public class TarFiles implements Closeable { } private SegmentArchiveManager buildArchiveManager() throws IOException { - return persistence.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, readOnly && fileStoreMonitor == null ? new FileStoreMonitorAdapter() : fileStoreMonitor, remoteStoreMonitor); + return persistence.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, readOnly && fileStoreMonitor == null ? new FileStoreMonitorAdapter() : fileStoreMonitor, remoteStoreMonitor, readOnly); } } diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/TarPersistence.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/TarPersistence.java index 3fcf503311..d0146593cd 100644 --- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/TarPersistence.java +++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/TarPersistence.java @@ -63,7 +63,7 @@ public class TarPersistence implements SegmentNodeStorePersistence { @Override public SegmentArchiveManager createArchiveManager(boolean memoryMapping, boolean offHeapAccess, IOMonitor ioMonitor, - FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) { + FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) { return new SegmentTarManager(directory, fileStoreMonitor, ioMonitor, memoryMapping, offHeapAccess); } diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/SegmentNodeStorePersistence.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/SegmentNodeStorePersistence.java index b59b2815c6..d4fadf03a3 100644 --- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/SegmentNodeStorePersistence.java +++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/SegmentNodeStorePersistence.java @@ -34,18 +34,20 @@ public interface SegmentNodeStorePersistence { * Opens a new archive manager. It'll be used to access the archives containing * segments. * - * @param memoryMapping whether the memory mapping should be used (if the given - * persistence supports it) - * @param offHeapAccess whether off heap access for segments should be used - * @param ioMonitor object used to monitor segment-related IO access. The - * implementation should call the appropriate methods when - * accessing segments. + * @param memoryMapping whether the memory mapping should be used (if the given + * persistence supports it) + * @param offHeapAccess whether off heap access for segments should be used + * @param ioMonitor object used to monitor segment-related IO access. The + * implementation should call the appropriate methods when + * accessing segments. * @param fileStoreMonitor object used to monitor the general IO usage. + * @param readOnly whether segment archive manager should be opened in read-only mode + * * @return segment archive manager * @throws IOException */ SegmentArchiveManager createArchiveManager(boolean memoryMapping, boolean offHeapAccess, IOMonitor ioMonitor, - FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) throws IOException; + FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) throws IOException; /** * Check if the segment store already contains any segments diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/persistentcache/CachingPersistence.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/persistentcache/CachingPersistence.java index 44a174f0f4..889be2b8cc 100644 --- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/persistentcache/CachingPersistence.java +++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/persistentcache/CachingPersistence.java @@ -41,8 +41,8 @@ public class CachingPersistence implements SegmentNodeStorePersistence { @Override public SegmentArchiveManager createArchiveManager(boolean memoryMapping, boolean offHeapAccess, IOMonitor ioMonitor, - FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) throws IOException { - return new CachingArchiveManager(persistentCache, delegate.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, fileStoreMonitor, remoteStoreMonitor)); + FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) throws IOException { + return new CachingArchiveManager(persistentCache, delegate.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, fileStoreMonitor, remoteStoreMonitor, readOnly)); } @Override diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistence.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistence.java index 703555daf4..6553f0d9ef 100644 --- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistence.java +++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistence.java @@ -74,7 +74,7 @@ public class SplitPersistence implements SegmentNodeStorePersistence { } private Optional<String> getLastArchive() throws IOException { - SegmentArchiveManager manager = roPersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + SegmentArchiveManager manager = roPersistence.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter(), true); List<String> archives = manager.listArchives(); if (archives.isEmpty()) { return Optional.empty(); @@ -85,14 +85,14 @@ public class SplitPersistence implements SegmentNodeStorePersistence { } @Override - public SegmentArchiveManager createArchiveManager(boolean memoryMapping, boolean offHeapAccess, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) throws IOException { + public SegmentArchiveManager createArchiveManager(boolean memoryMapping, boolean offHeapAccess, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) throws IOException { if (lastRoArchive.isPresent()) { return new SplitSegmentArchiveManager( - roPersistence.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, fileStoreMonitor, remoteStoreMonitor), - rwPersistence.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, fileStoreMonitor, new RemoteStoreMonitorAdapter()), + roPersistence.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, fileStoreMonitor, remoteStoreMonitor, true), + rwPersistence.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, fileStoreMonitor, new RemoteStoreMonitorAdapter(), readOnly), lastRoArchive.get()); } else { - return rwPersistence.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, fileStoreMonitor, new RemoteStoreMonitorAdapter()); + return rwPersistence.createArchiveManager(memoryMapping, offHeapAccess, ioMonitor, fileStoreMonitor, new RemoteStoreMonitorAdapter(), readOnly); } } diff --git a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/package-info.java b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/package-info.java index 2e21b7b6b8..ae0e46edba 100644 --- a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/package-info.java +++ b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/package-info.java @@ -15,7 +15,7 @@ * limitations under the License. */ @Internal(since = "1.0.0") -@Version("1.0.0") +@Version("2.0.0") package org.apache.jackrabbit.oak.segment.spi.persistence.split; import org.apache.jackrabbit.oak.commons.annotations.Internal; diff --git a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/FailedFlushTest.java b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/FailedFlushTest.java index d23a0ba720..d379a1db25 100644 --- a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/FailedFlushTest.java +++ b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/FailedFlushTest.java @@ -125,7 +125,7 @@ public class FailedFlushTest { @Override public SegmentArchiveManager createArchiveManager(boolean memoryMapping, boolean offHeapAccess, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, - RemoteStoreMonitor remoteStoreMonitor) { + RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) { return new TestArchiveManager(dir, fileStoreMonitor, ioMonitor, memoryMapping, offHeapAccess); } }).build(); diff --git a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/FileStoreTest.java b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/FileStoreTest.java index d2e6f0d355..9dddf7d5d8 100644 --- a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/FileStoreTest.java +++ b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/FileStoreTest.java @@ -230,7 +230,7 @@ public class FileStoreTest { private static TarPersistence getPersistenceThrowingUnrecoverableExceptionOnClosingArchive(File directory) { return new TarPersistence(directory) { @Override - public SegmentArchiveManager createArchiveManager(boolean memoryMapping, boolean offHeapAccess, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor) { + public SegmentArchiveManager createArchiveManager(boolean memoryMapping, boolean offHeapAccess, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, boolean readOnly) { return new SegmentTarManager(directory, fileStoreMonitor, ioMonitor, memoryMapping, offHeapAccess) { @Override public SegmentArchiveWriter create(String archiveName) { diff --git a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarFilesTest.java b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarFilesTest.java index bc0dbce986..083f6e4515 100644 --- a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarFilesTest.java +++ b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarFilesTest.java @@ -469,8 +469,8 @@ public class TarFilesTest { @Override public SegmentArchiveManager createArchiveManager( boolean memoryMapping, boolean offHeapAccess, IOMonitor ioMonitor, - FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor - ) { + FileStoreMonitor fileStoreMonitor, RemoteStoreMonitor remoteStoreMonitor, + boolean readOnly) { return new SegmentTarManager( segmentStoreDir, fsMonitor, ioMonitor, false, false ) {