This is an automated email from the ASF dual-hosted git repository.

reschke pushed a commit to branch OAK-11500
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git

commit 78fdcc09387668895c7549e84ff9a64d663ad428
Author: Julian Reschke <[email protected]>
AuthorDate: Tue Feb 18 17:02:44 2025 +0100

    OAK-11500: Remove usage of Guava io.Closeable
---
 .../apache/jackrabbit/oak/commons/FileIOUtils.java | 25 +++-----------
 .../ActiveDeletedBlobCollectorFactory.java         |  7 ++--
 .../jackrabbit/oak/run/DataStoreCheckCommand.java  | 17 ++--------
 .../jackrabbit/oak/run/DataStoreCommand.java       |  9 +++--
 .../oak/segment/DefaultSegmentWriter.java          | 10 ++++--
 .../plugins/document/mongo/MongoDocumentStore.java |  3 +-
 .../oak/plugins/document/MongoBlobGCTest.java      | 39 +++++++---------------
 7 files changed, 39 insertions(+), 71 deletions(-)

diff --git 
a/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/FileIOUtils.java 
b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/FileIOUtils.java
index b4a9e6a3c0..6f9a120d5a 100644
--- 
a/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/FileIOUtils.java
+++ 
b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/FileIOUtils.java
@@ -20,7 +20,6 @@ import static java.io.File.createTempFile;
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.commons.io.FileUtils.forceDelete;
 import static org.apache.commons.io.IOUtils.copyLarge;
-import static org.apache.jackrabbit.guava.common.io.Closeables.close;
 import static 
org.apache.jackrabbit.oak.commons.sort.EscapeUtils.escapeLineBreak;
 import static 
org.apache.jackrabbit.oak.commons.sort.EscapeUtils.unescapeLineBreaks;
 import static 
org.apache.jackrabbit.oak.commons.sort.ExternalSort.mergeSortedFiles;
@@ -140,25 +139,20 @@ public final class FileIOUtils {
      * @throws IOException
      */
     public static void append(List<File> files, File appendTo, boolean delete) 
throws IOException {
-        OutputStream appendStream = null;
-        boolean threw = true;
 
-        try {
-            appendStream = new BufferedOutputStream(new 
FileOutputStream(appendTo, true));
+        try (OutputStream appendStream = new BufferedOutputStream(new 
FileOutputStream(appendTo, true))) {
 
             for (File f : files) {
                 try (InputStream iStream = new FileInputStream(f)) {
                     copyLarge(iStream, appendStream);
                 }
             }
-            threw = false;
         } finally {
             if (delete) {
                 for (File f : files) {
                     f.delete();
                 }
             }
-            close(appendStream, threw);
         }
     }
 
@@ -227,11 +221,9 @@ public final class FileIOUtils {
      */
     public static int writeStrings(Iterator<String> iterator, File f, boolean 
escape,
         @NotNull Function<String, String> transformer, @Nullable Logger 
logger, @Nullable String message) throws IOException {
-        BufferedWriter writer = new BufferedWriter(new FileWriter(f, UTF_8));
-        boolean threw = true;
 
         int count = 0;
-        try {
+        try (BufferedWriter writer = new BufferedWriter(new FileWriter(f, 
UTF_8))) {
             while (iterator.hasNext()) {
                 writeAsLine(writer, transformer.apply(iterator.next()), 
escape);
                 count++;
@@ -241,9 +233,6 @@ public final class FileIOUtils {
                     }
                 }
             }
-            threw = false;
-        } finally {
-            close(writer, threw);
         }
         return count;
     }
@@ -257,13 +246,10 @@ public final class FileIOUtils {
      * @throws IOException
      */
     public static Set<String> readStringsAsSet(InputStream stream, boolean 
unescape) throws IOException {
-        BufferedReader reader = null;
         Set<String> set = new HashSet<>();
-        boolean threw = true;
 
-        try {
-            reader = new BufferedReader(new InputStreamReader(stream, UTF_8));
-            String line  = null;
+        try (BufferedReader reader = new BufferedReader(new 
InputStreamReader(stream, UTF_8))) {
+            String line;
             while ((line = reader.readLine()) != null) {
                 if (unescape) {
                     set.add(unescapeLineBreaks(line));
@@ -271,9 +257,6 @@ public final class FileIOUtils {
                     set.add(line);
                 }
             }
-            threw = false;
-        } finally {
-            close(reader, threw);
         }
         return set;
     }
diff --git 
a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
 
b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
index 92ee7e34cc..323c2056aa 100644
--- 
a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
+++ 
b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
@@ -38,7 +38,6 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.LineIterator;
 import org.apache.commons.io.filefilter.IOFileFilter;
@@ -324,7 +323,11 @@ public class ActiveDeletedBlobCollectorFactory {
             long startBlobTrackerSyncTime = clock.getTime();
             // Synchronize deleted blob ids with the blob id tracker
             try {
-                Closeables.close(idTempDeleteWriter, true);
+                try {
+                    idTempDeleteWriter.close();
+                } catch (IOException ex) {
+                    LOG.warn("IOException thrown while closing 
idTempDeleteWriter", ex);
+                }
 
                 if (blobIdsTracked && numBlobsDeleted > 0) {
                     BlobTracker tracker = ((BlobTrackingStore) 
blobStore).getTracker();
diff --git 
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
 
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
index 389b9bb351..1d55c0c768 100644
--- 
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
+++ 
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
@@ -17,7 +17,6 @@
 package org.apache.jackrabbit.oak.run;
 
 import static org.apache.jackrabbit.guava.common.base.Stopwatch.createStarted;
-import static org.apache.jackrabbit.guava.common.io.Closeables.close;
 import static java.io.File.createTempFile;
 import static java.util.Arrays.asList;
 import static org.apache.commons.io.FileUtils.forceDelete;
@@ -47,7 +46,6 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.jackrabbit.guava.common.base.Splitter;
 import org.apache.jackrabbit.guava.common.base.Stopwatch;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import org.apache.jackrabbit.guava.common.io.Closer;
 import com.mongodb.MongoClient;
 import com.mongodb.MongoClientURI;
@@ -426,10 +424,8 @@ public class DataStoreCheckCommand implements Command {
 
     private static void retrieveBlobReferences(GarbageCollectableBlobStore 
blobStore, BlobReferenceRetriever marker,
         File marked, String dsType, boolean isVerbose) throws IOException {
-        final BufferedWriter writer = new BufferedWriter(new 
FileWriter(marked, StandardCharsets.UTF_8));
         final AtomicInteger count = new AtomicInteger();
-        boolean threw = true;
-        try {
+        try (BufferedWriter writer = new BufferedWriter(new FileWriter(marked, 
StandardCharsets.UTF_8))) {
             final GarbageCollectableBlobStore finalBlobStore = blobStore;
 
             System.out.println("Starting dump of blob references");
@@ -464,9 +460,6 @@ public class DataStoreCheckCommand implements Command {
 
             System.out.println(count.get() + " blob references found");
             System.out.println("Finished in " + 
watch.elapsed(TimeUnit.SECONDS) + " seconds");
-            threw = false;
-        } finally {
-            close(writer, threw);
         }
     }
 
@@ -543,14 +536,11 @@ public class DataStoreCheckCommand implements Command {
         }
 
         public void traverse(String ... paths) throws IOException {
-            BufferedWriter writer = null;
             final AtomicInteger count = new AtomicInteger();
-            boolean threw = true;
             System.out.println("Starting dump of blob references by 
traversing");
             Stopwatch watch = createStarted();
 
-            try {
-                writer = new BufferedWriter(new FileWriter(references, 
StandardCharsets.UTF_8));
+            try (BufferedWriter writer = new BufferedWriter(new 
FileWriter(references, StandardCharsets.UTF_8))) {
                 if (paths.length == 0) {
                     traverseChildren(nodeStore.getRoot(), "/", writer, count);
                 } else {
@@ -570,9 +560,6 @@ public class DataStoreCheckCommand implements Command {
 
                 System.out.println(count.get() + " blob references found");
                 System.out.println("Finished in " + 
watch.elapsed(TimeUnit.SECONDS) + " seconds");
-                threw = false;
-            } finally {
-                Closeables.close(writer, threw);
             }
         }
 
diff --git 
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java 
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
index 1ead8f7cc7..225e6589a5 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
@@ -41,7 +41,6 @@ import java.util.stream.StreamSupport;
 
 import org.apache.jackrabbit.guava.common.base.Splitter;
 import org.apache.jackrabbit.guava.common.base.Stopwatch;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import org.apache.jackrabbit.guava.common.io.Closer;
 import joptsimple.OptionParser;
 import org.apache.commons.io.FileUtils;
@@ -239,7 +238,13 @@ public class DataStoreCommand implements Command {
 
                     FileUtils.copyFile(referencesTemp, references);
                 } finally {
-                    Closeables.close(writer, threw);
+                    try {
+                        writer.close();
+                    } catch (IOException ex) {
+                        if (!threw) {
+                            throw ex;
+                        }
+                    }
                 }
             } else if (dataStoreOpts.dumpIds()) {
                 log.info("Initiating dump of data store IDs");
diff --git 
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
 
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
index 259e16dcf1..0b070810ae 100644
--- 
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
+++ 
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
@@ -55,7 +55,6 @@ import java.util.Map;
 import javax.jcr.PropertyType;
 
 import org.apache.commons.io.IOUtils;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import org.apache.jackrabbit.oak.api.Blob;
 import org.apache.jackrabbit.oak.api.PropertyState;
 import org.apache.jackrabbit.oak.api.Type;
@@ -645,7 +644,14 @@ public class DefaultSegmentWriter implements SegmentWriter 
{
                 threw = false;
                 return id;
             } finally {
-                Closeables.close(stream, threw);
+                try {
+                    stream.close();
+                } catch (IOException ex) {
+                    if (!threw) {
+                        throw ex;
+                    }
+                    LOG.warn("IOException thrown while closing stream", ex);
+                }
             }
         }
 
diff --git 
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
 
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
index 2e3bf96102..d6316c3fa2 100644
--- 
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
+++ 
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
@@ -44,7 +44,6 @@ import java.util.stream.StreamSupport;
 import org.apache.jackrabbit.guava.common.base.Stopwatch;
 import org.apache.jackrabbit.guava.common.collect.Iterables;
 import org.apache.jackrabbit.guava.common.collect.Iterators;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import org.apache.jackrabbit.guava.common.util.concurrent.AtomicDouble;
 import com.mongodb.Block;
 import com.mongodb.DBObject;
@@ -2030,7 +2029,7 @@ public class MongoDocumentStore implements DocumentStore {
             clusterNodesConnection.close();
         }
         try {
-            Closeables.close(throttlingMetricsUpdater, false);
+            throttlingMetricsUpdater.close();
         } catch (IOException e) {
             LOG.warn("Error occurred while closing throttlingMetricsUpdater", 
e);
         }
diff --git 
a/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoBlobGCTest.java
 
b/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoBlobGCTest.java
index 8c5b97018e..aa4a58ff6f 100644
--- 
a/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoBlobGCTest.java
+++ 
b/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoBlobGCTest.java
@@ -36,7 +36,6 @@ import ch.qos.logback.classic.Level;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.jackrabbit.guava.common.base.Splitter;
 import org.apache.jackrabbit.guava.common.base.Stopwatch;
-import org.apache.jackrabbit.guava.common.io.Closeables;
 import com.mongodb.BasicDBObject;
 import com.mongodb.ReadPreference;
 import com.mongodb.client.MongoCollection;
@@ -398,26 +397,18 @@ public class MongoBlobGCTest extends 
AbstractMongoConnectionTest {
     }
 
     private static void assertBlobReferences(Set<String> expected, String 
rootFolder) throws IOException {
-        InputStream is = null;
-        try {
-            is = new FileInputStream(getMarkedFile(rootFolder));
+        try (InputStream is = new FileInputStream(getMarkedFile(rootFolder))) {
             Set<String> records = FileIOUtils.readStringsAsSet(is, true);
             assertEquals(expected, records);
-        } finally {
-            Closeables.close(is, false);
         }
     }
 
     private static void assertBlobReferenceRecords(int expected, String 
rootFolder) throws IOException {
-        InputStream is = null;
-        try {
-            is = new FileInputStream(getMarkedFile(rootFolder));
+        try (InputStream is = new FileInputStream(getMarkedFile(rootFolder))) {
             Set<String> records = FileIOUtils.readStringsAsSet(is, true);
             for (String rec : records) {
                 assertEquals(expected, 
Splitter.on(",").omitEmptyStrings().splitToList(rec).size());
             }
-        } finally {
-            Closeables.close(is, false);
         }
     }
 
@@ -509,42 +500,36 @@ public class MongoBlobGCTest extends 
AbstractMongoConnectionTest {
             this.maxLastModifiedInterval = maxLastModifiedInterval;
             this.additionalBlobs = new HashSet<>();
         }
-        
+
         @Override
         protected void markAndSweep(boolean markOnly, boolean 
forceBlobRetrieve) throws Exception {
-            boolean threw = true;
-            GarbageCollectorFileState fs = new GarbageCollectorFileState(root);
-            try {
+
+            try (GarbageCollectorFileState fs = new 
GarbageCollectorFileState(root)) {
                 Stopwatch sw = Stopwatch.createStarted();
                 LOG.info("Starting Test Blob garbage collection");
-                
+
                 // Sleep a little more than the max interval to get over the 
interval for valid blobs
                 Thread.sleep(maxLastModifiedInterval + 1000);
                 LOG.info("Slept {} to make blobs old", maxLastModifiedInterval 
+ 1000);
-                
+
                 long markStart = System.currentTimeMillis();
                 mark(fs);
                 LOG.info("Mark finished");
-                
+
                 additionalBlobs = createAdditional();
-    
+
                 if (!markOnly) {
                     Thread.sleep(maxLastModifiedInterval + 100);
                     LOG.info("Slept {} to make additional blobs old", 
maxLastModifiedInterval + 100);
-    
+
                     long deleteCount = sweep(fs, markStart, forceBlobRetrieve);
-                    threw = false;
-            
+
                     LOG.info("Blob garbage collection completed in {}. Number 
of blobs deleted [{}]", sw.toString(),
                         deleteCount, maxLastModifiedInterval);
                 }
-            } finally {
-                if (!LOG.isTraceEnabled()) {
-                    Closeables.close(fs, threw);
-                }
             }
         }
-    
+
         public HashSet<String> createAdditional() throws Exception {
             HashSet<String> blobSet = new HashSet<String>();
             DocumentNodeStore s = mk.getNodeStore();

Reply via email to