This is an automated email from the ASF dual-hosted git repository. miroslav pushed a commit to branch issue/OAK-9212_2 in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git
commit 2289a4149e452d5d3708bcccb8d8f62e3d04a259 Author: Miroslav Smiljanic <miros...@apache.com> AuthorDate: Wed Aug 27 17:06:36 2025 +0200 OAK-11884 delete archive only if write access is allowed --- .../segment/azure/v8/AzureArchiveManagerV8.java | 2 +- .../oak/segment/azure/v8/AzurePersistenceV8.java | 6 +++ .../azure/v8/AzureArchiveManagerV8Test.java | 55 +++++++++++++++++++--- 3 files changed, 56 insertions(+), 7 deletions(-) diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8.java index a732346809..6d168eb46d 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8.java @@ -90,7 +90,7 @@ public class AzureArchiveManagerV8 implements SegmentArchiveManager { while (it.hasNext()) { String archiveName = it.next(); if (deleteInProgress(archiveName)) { - if (!isReadOnly) { + if (writeAccessController.isWritingAllowed()) { delete(archiveName); } it.remove(); diff --git a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzurePersistenceV8.java b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzurePersistenceV8.java index 0b056f768f..dfd72ba2af 100644 --- a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzurePersistenceV8.java +++ b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/v8/AzurePersistenceV8.java @@ -42,6 +42,7 @@ import org.apache.jackrabbit.oak.segment.spi.persistence.ManifestFile; import org.apache.jackrabbit.oak.segment.spi.persistence.RepositoryLock; import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentArchiveManager; import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentNodeStorePersistence; +import org.jetbrains.annotations.TestOnly; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -154,4 +155,9 @@ public class AzurePersistenceV8 implements SegmentNodeStorePersistence { public void setWriteAccessController(WriteAccessController writeAccessController) { this.writeAccessController = writeAccessController; } + + @TestOnly + void disableWriting() { + writeAccessController.disableWriting(); + } } diff --git a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8Test.java b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8Test.java index 9b326b129c..34d053de05 100644 --- a/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8Test.java +++ b/oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/v8/AzureArchiveManagerV8Test.java @@ -554,11 +554,7 @@ public class AzureArchiveManagerV8Test { SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); // Create an archive - SegmentArchiveWriter writer = manager.create("data00000a.tar"); - UUID u = UUID.randomUUID(); - writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); - writer.flush(); - writer.close(); + createArchive(manager, "data00000a.tar"); // Verify the archive is listed List<String> archives = manager.listArchives(); @@ -572,7 +568,54 @@ public class AzureArchiveManagerV8Test { archives = manager.listArchives(); assertFalse("Archive should not be listed after deleted marker is uploaded", archives.contains("data00000a.tar")); } - + + @Test + public void testListArchiveWithDeleteMarkerPresent() throws Exception{ + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + + createArchive(manager, "data00000a.tar"); + + // Upload deleted marker for the archive + CloudBlobDirectory archiveDirectory = container.getDirectoryReference("oak/data00000a.tar"); + archiveDirectory.getBlockBlobReference("deleted").openOutputStream().close(); + + // Verify the archive is no longer listed after adding deleted marker + List<String> archives = manager.listArchives(); + assertFalse("Archive should not be listed after deleted marker is uploaded", archives.contains("data00000a.tar")); + + // Verify the archive is deleted + assertFalse("Archive should be deleted", container.getDirectoryReference("oak/data00000a.tar").listBlobs().iterator().hasNext()); + } + + + @Test + public void testListArchiveWithDeleteMarkerPresentAndNoWriteAccess() throws Exception{ + SegmentArchiveManager manager = azurePersistenceV8.createArchiveManager(false, false, new IOMonitorAdapter(), new FileStoreMonitorAdapter(), new RemoteStoreMonitorAdapter()); + + createArchive(manager, "data00000a.tar"); + + // Upload deleted marker for the archive + CloudBlobDirectory archiveDirectory = container.getDirectoryReference("oak/data00000a.tar"); + archiveDirectory.getBlockBlobReference("deleted").openOutputStream().close(); + + azurePersistenceV8.disableWriting(); + + List<String> archives = manager.listArchives(); + assertFalse("Archive should not be listed after deleted marker is uploaded", archives.contains("data00000a.tar")); + + + // Verify the archive is not deleted + assertTrue("Archive should be deleted", container.getDirectoryReference("oak/data00000a.tar").listBlobs().iterator().hasNext()); + } + + private static void createArchive(SegmentArchiveManager manager, String archiveName) throws IOException { + SegmentArchiveWriter writer = manager.create(archiveName); + UUID u = UUID.randomUUID(); + writer.writeSegment(u.getMostSignificantBits(), u.getLeastSignificantBits(), new byte[10], 0, 10, 0, 0, false); + writer.flush(); + writer.close(); + } + private PersistentCache createPersistenceCache() { return new AbstractPersistentCache() { @Override