This is an automated email from the ASF dual-hosted git repository. joerghoh pushed a commit to branch OAK-11560 in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git
commit 15b46e2280598bf3d7ea79c40b9a21b445644c9b Author: Joerg Hoh <[email protected]> AuthorDate: Thu Mar 6 14:44:32 2025 +0100 OAK-11560 do not delete already present segments --- .../persistentcache/PersistentDiskCache.java | 35 +++++++++++++++++----- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/oak-segment-remote/src/main/java/org/apache/jackrabbit/oak/segment/remote/persistentcache/PersistentDiskCache.java b/oak-segment-remote/src/main/java/org/apache/jackrabbit/oak/segment/remote/persistentcache/PersistentDiskCache.java index 1a1713f87e..cae72a8b48 100644 --- a/oak-segment-remote/src/main/java/org/apache/jackrabbit/oak/segment/remote/persistentcache/PersistentDiskCache.java +++ b/oak-segment-remote/src/main/java/org/apache/jackrabbit/oak/segment/remote/persistentcache/PersistentDiskCache.java @@ -37,6 +37,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.AtomicMoveNotSupportedException; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; @@ -59,6 +60,8 @@ public class PersistentDiskCache extends AbstractPersistentCache { public static final long DEFAULT_TEMP_FILES_CLEANUP_WAIT_TIME_MS = 60000; private static final String TEMP_FILE_SUFFIX = ".part"; + public static boolean DELETE_SEGMENT_ON_REFETCH = Boolean.getBoolean("oak.segment.cache.delete_segment_on_refetch"); + private final File directory; private final long maxCacheSizeBytes; private final DiskCacheIOMonitor diskCacheIOMonitor; @@ -121,7 +124,7 @@ public class PersistentDiskCache extends AbstractPersistentCache { return buffer; } catch (FileNotFoundException e) { - logger.info("Segment {} deleted from file system!", segmentId); + logger.info("Segment {} no longer available, refetching", segmentId); } catch (IOException e) { logger.error("Error loading segment {} from cache:", segmentId, e); } @@ -160,14 +163,20 @@ public class PersistentDiskCache extends AbstractPersistentCache { } long cacheSizeAfter = cacheSize.addAndGet(fileSize); diskCacheIOMonitor.updateCacheSize(cacheSizeAfter, fileSize); - } catch (Exception e) { - logger.error("Error writing segment {} to cache", segmentId, e); - try { - Files.deleteIfExists(segmentFile.toPath()); - Files.deleteIfExists(tempSegmentFile.toPath()); - } catch (IOException i) { - logger.error("Error while deleting corrupted segment file {}", segmentId, i); + } catch (FileAlreadyExistsException faee) { + if (DELETE_SEGMENT_ON_REFETCH) { + writeSegmentExceptionHandler(segmentId, segmentFile, tempSegmentFile, faee); + } else { + // just delete the temp file, as the target segment file is already there and valid + try { + logger.debug("Skipping already existing file {}", segmentId); + Files.deleteIfExists(tempSegmentFile.toPath()); + } catch (IOException e) { + logger.debug("Cannot delete temporary file {}", tempSegmentFile.toPath(),e); + } } + } catch (Exception e) { + writeSegmentExceptionHandler(segmentId, segmentFile, tempSegmentFile, e); } finally { writesPending.remove(segmentId); } @@ -178,6 +187,16 @@ public class PersistentDiskCache extends AbstractPersistentCache { executor.execute(task); } + private void writeSegmentExceptionHandler(String segmentId, File segmentFile, File tempSegmentFile, Exception e) { + logger.error("Error writing segment {} to cache", segmentId, e); + try { + Files.deleteIfExists(segmentFile.toPath()); + Files.deleteIfExists(tempSegmentFile.toPath()); + } catch (IOException i) { + logger.error("Error while deleting corrupted segment file {}", segmentId, i); + } + } + private boolean isCacheFull() { return cacheSize.get() >= maxCacheSizeBytes; }
