This is an automated email from the ASF dual-hosted git repository.

reschke pushed a commit to branch OAK-11500
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git

commit 923ad2c84c531ed72ed8bacb8bf8b4fc678e5be4
Author: Julian Reschke <[email protected]>
AuthorDate: Mon Mar 3 16:45:58 2025 +0100

    OAK-11500: Remove usage of Guava io.Closeable
---
 .../blob/AbstractSharedCachingDataStore.java       |  3 +--
 .../jackrabbit/oak/plugins/blob/FileCache.java     |  9 +-------
 .../plugins/blob/MarkSweepGarbageCollector.java    | 25 ++++++++++++++--------
 .../plugins/blob/datastore/DataStoreBlobStore.java | 24 +++++++++------------
 .../oak/plugins/blob/datastore/FSBackend.java      | 10 ++++-----
 .../plugins/blob/datastore/OakFileDataStore.java   | 10 ++++-----
 .../oak/plugins/blob/UploadStagingCacheTest.java   | 11 +++-------
 .../datastore/BlobIdTrackerClusterSharedTest.java  |  5 ++---
 8 files changed, 43 insertions(+), 54 deletions(-)

diff --git 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java
 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java
index 5bb8a907ca..dcd57525b4 100644
--- 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java
+++ 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java
@@ -58,7 +58,6 @@ import org.slf4j.LoggerFactory;
 
 import org.apache.jackrabbit.guava.common.base.Stopwatch;
 import org.apache.jackrabbit.guava.common.collect.Iterators;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import 
org.apache.jackrabbit.guava.common.util.concurrent.ListeningExecutorService;
 
 /**
@@ -335,7 +334,7 @@ public abstract class AbstractSharedCachingDataStore 
extends AbstractDataStore
                         copyInputStreamToFile(in, tmpFile);
                         return new LazyFileInputStream(tmpFile);
                     } finally {
-                        Closeables.close(in, false);
+                        in.close();
                     }
                 } else {
                     return new FileInputStream(cached);
diff --git 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/FileCache.java
 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/FileCache.java
index 88db2ebd5f..847d41d0f1 100644
--- 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/FileCache.java
+++ 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/FileCache.java
@@ -49,7 +49,6 @@ import org.slf4j.LoggerFactory;
 
 import org.apache.jackrabbit.guava.common.base.Stopwatch;
 import org.apache.jackrabbit.guava.common.cache.AbstractCache;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 
 /**
  */
@@ -110,18 +109,12 @@ public class FileCache extends AbstractCache<String, 
File> implements Closeable
                 if (cachedFile.exists()) {
                     return cachedFile;
                 } else {
-                    InputStream is = null;
-                    boolean threw = true;
                     long startNanos = System.nanoTime();
-                    try {
-                        is = loader.load(key);
+                    try (InputStream is = loader.load(key))  {
                         copyInputStreamToFile(is, cachedFile);
-                        threw = false;
                     } catch (Exception e) {
                         LOG.warn("Error reading object for id [{}] from 
backend", key, e);
                         throw e;
-                    } finally {
-                        Closeables.close(is, threw);
                     }
                     if (LOG.isDebugEnabled()) {
                         LOG.debug("Loaded file: {} in {}", key, 
(System.nanoTime() - startNanos) / 1_000_000);
diff --git 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
index 578377ded5..5fe7f6bea0 100644
--- 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
+++ 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
@@ -58,7 +58,6 @@ import org.apache.commons.collections4.ListValuedMap;
 import org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
 import org.apache.jackrabbit.guava.common.base.Stopwatch;
 import org.apache.jackrabbit.guava.common.collect.Iterators;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import org.apache.jackrabbit.guava.common.util.concurrent.ListenableFutureTask;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.LineIterator;
@@ -303,14 +302,10 @@ public class MarkSweepGarbageCollector implements 
BlobGarbageCollector {
                             
stat.setStartTime(markers.get(uniqueSessionId).getLastModified());
                         }
 
-                        LineNumberReader reader = null;
-                        try {
-                            reader = new LineNumberReader(new 
InputStreamReader(refRec.getStream()));
+                        try (LineNumberReader reader = new 
LineNumberReader(new InputStreamReader(refRec.getStream()))) {
                             while (reader.readLine() != null) {
                             }
                             stat.setNumLines(reader.getLineNumber());
-                        } finally {
-                            Closeables.close(reader, true);
                         }
                     }
                 }
@@ -380,7 +375,13 @@ public class MarkSweepGarbageCollector implements 
BlobGarbageCollector {
         } finally {
             statsCollector.updateDuration(sw.elapsed(TimeUnit.MILLISECONDS), 
TimeUnit.MILLISECONDS);
             if (!LOG.isTraceEnabled() && !traceOutput) {
-                Closeables.close(fs, threw);
+                try {
+                    fs.close();
+                } catch (IOException ioe) {
+                    if (!threw) {
+                        throw ioe;
+                    }
+                }
             }
         }
     }
@@ -769,7 +770,13 @@ public class MarkSweepGarbageCollector implements 
BlobGarbageCollector {
             }
         } finally {
             if (!traceOutput && (!LOG.isTraceEnabled() && candidates == 0)) {
-                Closeables.close(fs, threw);
+                try {
+                    fs.close();
+                } catch (IOException ioe) {
+                    if (!threw) {
+                        throw ioe;
+                    }
+                }
             }
             sw.stop();
             
consistencyStatsCollector.updateDuration(sw.elapsed(TimeUnit.MILLISECONDS), 
TimeUnit.MILLISECONDS);
@@ -1091,7 +1098,7 @@ public class MarkSweepGarbageCollector implements 
BlobGarbageCollector {
             } finally {
                 if (idsIter instanceof Closeable) {
                     try {
-                        Closeables.close((Closeable) idsIter, false);
+                        ((Closeable)idsIter).close();
                     } catch (Exception e) {
                         LOG.debug("Error closing iterator");
                     }
diff --git 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
index 240e519ca6..24aa6b0b65 100644
--- 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
+++ 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
@@ -79,7 +79,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.jackrabbit.guava.common.collect.Iterators;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 
 /**
  * BlobStore wrapper for DataStore. Wraps Jackrabbit 2 DataStore and expose 
them as BlobStores
@@ -341,9 +340,15 @@ public class DataStoreBlobStore
             stats.uploadFailed();
             throw new IOException(e);
         } finally {
-            //DataStore does not closes the stream internally
+            //DataStore does not close the stream internally
             //So close the stream explicitly
-            Closeables.close(stream, threw);
+            try {
+                stream.close();
+            } catch (IOException ioe) {
+                if (!threw) {
+                    throw ioe;
+                }
+            }
         }
     }
 
@@ -364,15 +369,10 @@ public class DataStoreBlobStore
         //This is inefficient as repeated calls for same blobId would involve 
opening new Stream
         //instead clients should directly access the stream from DataRecord by 
special casing for
         //BlobStore which implements DataStore
-        InputStream stream = getInputStream(encodedBlobId);
-        boolean threw = true;
-        try {
+        try (InputStream stream = getInputStream(encodedBlobId)) {
             IOUtils.skipFully(stream, pos);
             int readCount = stream.read(buff, off, length);
-            threw = false;
             return readCount;
-        } finally {
-            Closeables.close(stream, threw);
         }
     }
 
@@ -439,13 +439,9 @@ public class DataStoreBlobStore
                     @Override
                     public byte[] call() throws Exception {
                         boolean threw = true;
-                        InputStream stream = getStream(blobId.blobId);
-                        try {
+                        try (InputStream stream = getStream(blobId.blobId)) {
                             byte[] result = IOUtils.toByteArray(stream);
-                            threw = false;
                             return result;
-                        } finally {
-                            Closeables.close(stream, threw);
                         }
                     }
                 });
diff --git 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
index 7822b115b4..6ebb1fecd5 100644
--- 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
+++ 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
@@ -30,7 +30,6 @@ import java.util.List;
 import java.util.Properties;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.filefilter.FileFilterUtils;
@@ -189,12 +188,13 @@ public class FSBackend extends AbstractSharedBackend {
 
         try {
             File file = new File(fsPathDir, name);
-            FileOutputStream os = new FileOutputStream(file);
-            try {
+            try (FileOutputStream os = new FileOutputStream(file)) {
                 IOUtils.copyLarge(input, os);
             } finally {
-                Closeables.close(os, true);
-                Closeables.close(input, true);
+                try {
+                    input.close();
+                } catch (IOException swallowed) {
+                }
             }
         } catch (IOException e) {
             LOG.error("Exception while adding metadata record with name {}, 
{}",
diff --git 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
index 7ad09d04c1..1cddd3713c 100644
--- 
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
+++ 
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
@@ -33,7 +33,6 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.filefilter.FileFilterUtils;
@@ -139,12 +138,13 @@ public class OakFileDataStore extends FileDataStore 
implements SharedDataStore {
 
         try {
             File file = new File(getPath(), name);
-            FileOutputStream os = new FileOutputStream(file);
-            try {
+            try (FileOutputStream os = new FileOutputStream(file)) {
                 IOUtils.copyLarge(input, os);
             } finally {
-                Closeables.close(os, true);
-                Closeables.close(input, true);
+                try {
+                    input.close();
+                } catch (IOException swallowed) {
+                }
             }
         } catch (IOException e) {
             LOG.error("Exception while adding metadata record with name {}, 
{}",
diff --git 
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
 
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
index 77ffbf5b21..82748f1e81 100644
--- 
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
+++ 
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
@@ -42,7 +42,6 @@ import java.util.concurrent.atomic.AtomicInteger;
 import ch.qos.logback.classic.Level;
 
 import org.apache.jackrabbit.guava.common.collect.Iterators;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import org.apache.jackrabbit.guava.common.io.Closer;
 import org.apache.jackrabbit.guava.common.util.concurrent.Futures;
 import org.apache.jackrabbit.guava.common.util.concurrent.ListenableFuture;
@@ -693,7 +692,7 @@ public class UploadStagingCacheTest extends 
AbstractDataStoreCacheTest {
         // Create pre-upgrade load
         File home = folder.newFolder();
         File pendingUploadsFile = new File(home, 
DataStoreCacheUpgradeUtils.UPLOAD_MAP);
-        createGibberishLoad(home, pendingUploadsFile);
+        createGibberishLoad(pendingUploadsFile);
 
         LogCustomizer lc = 
LogCustomizer.forLogger(DataStoreCacheUpgradeUtils.class.getName())
             .filter(Level.WARN)
@@ -717,13 +716,9 @@ public class UploadStagingCacheTest extends 
AbstractDataStoreCacheTest {
     }
 
 
-    private void createGibberishLoad(File home, File pendingUploadFile) throws 
IOException {
-        BufferedWriter writer = null;
-        try {
-            writer = new BufferedWriter(new FileWriter(pendingUploadFile, 
StandardCharsets.UTF_8));
+    private void createGibberishLoad(File pendingUploadFile) throws 
IOException {
+        try (BufferedWriter writer = new BufferedWriter(new 
FileWriter(pendingUploadFile, StandardCharsets.UTF_8))) {
             FileIOUtils.writeAsLine(writer, 
"jerhgiuheirghoeoorqehgsjlwjpfkkwpkf", false);
-        } finally {
-            Closeables.close(writer, true);
         }
     }
 
diff --git 
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTrackerClusterSharedTest.java
 
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTrackerClusterSharedTest.java
index 28bc17d3af..4ac3dac862 100644
--- 
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTrackerClusterSharedTest.java
+++ 
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTrackerClusterSharedTest.java
@@ -41,7 +41,6 @@ import org.junit.rules.TemporaryFolder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.jackrabbit.guava.common.io.Closeables.close;
 import static java.lang.String.valueOf;
 import static java.util.UUID.randomUUID;
 import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
@@ -224,11 +223,11 @@ public class BlobIdTrackerClusterSharedTest {
         Set<String> retrieved = new HashSet<>();
         Iterator<String> iter = tracker.get();
         log.info("retrieving blob ids");
-        while(iter.hasNext()) {
+        while (iter.hasNext()) {
             retrieved.add(iter.next());
         }
         if (iter instanceof Closeable) {
-            close((Closeable)iter, true);
+            ((Closeable)iter).close();
         }
         return retrieved;
     }

Reply via email to