This is an automated email from the ASF dual-hosted git repository.
reschke pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git
The following commit(s) were added to refs/heads/trunk by this push:
new 94a1cbc61d Revert " OAK-11500: Remove usage of Guava io.Closeable
(#2129)"
94a1cbc61d is described below
commit 94a1cbc61df7a3653e724d251665f2cbb6700b28
Author: Julian Reschke <[email protected]>
AuthorDate: Tue Mar 4 10:27:32 2025 +0100
Revert " OAK-11500: Remove usage of Guava io.Closeable (#2129)"
This reverts commit 58bc057cfb8fee66535e19485b69fe4515e28cef.
---
.../blob/AbstractSharedCachingDataStore.java | 3 +-
.../jackrabbit/oak/plugins/blob/FileCache.java | 9 ++++-
.../plugins/blob/MarkSweepGarbageCollector.java | 25 +++++---------
.../plugins/blob/datastore/DataStoreBlobStore.java | 24 +++++++------
.../oak/plugins/blob/datastore/FSBackend.java | 10 +++---
.../plugins/blob/datastore/OakFileDataStore.java | 10 +++---
.../oak/plugins/blob/UploadStagingCacheTest.java | 11 ++++--
.../datastore/BlobIdTrackerClusterSharedTest.java | 5 +--
.../apache/jackrabbit/oak/commons/FileIOUtils.java | 25 +++++++++++---
.../ActiveDeletedBlobCollectorFactory.java | 7 ++--
.../jackrabbit/oak/run/DataStoreCheckCommand.java | 17 ++++++++--
.../jackrabbit/oak/run/DataStoreCommand.java | 9 ++---
.../oak/segment/DefaultSegmentWriter.java | 10 ++----
.../plugins/document/mongo/MongoDocumentStore.java | 3 +-
.../oak/plugins/document/MongoBlobGCTest.java | 39 +++++++++++++++-------
15 files changed, 125 insertions(+), 82 deletions(-)
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java
index dcd57525b4..5bb8a907ca 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/AbstractSharedCachingDataStore.java
@@ -58,6 +58,7 @@ import org.slf4j.LoggerFactory;
import org.apache.jackrabbit.guava.common.base.Stopwatch;
import org.apache.jackrabbit.guava.common.collect.Iterators;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import
org.apache.jackrabbit.guava.common.util.concurrent.ListeningExecutorService;
/**
@@ -334,7 +335,7 @@ public abstract class AbstractSharedCachingDataStore
extends AbstractDataStore
copyInputStreamToFile(in, tmpFile);
return new LazyFileInputStream(tmpFile);
} finally {
- in.close();
+ Closeables.close(in, false);
}
} else {
return new FileInputStream(cached);
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/FileCache.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/FileCache.java
index 847d41d0f1..88db2ebd5f 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/FileCache.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/FileCache.java
@@ -49,6 +49,7 @@ import org.slf4j.LoggerFactory;
import org.apache.jackrabbit.guava.common.base.Stopwatch;
import org.apache.jackrabbit.guava.common.cache.AbstractCache;
+import org.apache.jackrabbit.guava.common.io.Closeables;
/**
*/
@@ -109,12 +110,18 @@ public class FileCache extends AbstractCache<String,
File> implements Closeable
if (cachedFile.exists()) {
return cachedFile;
} else {
+ InputStream is = null;
+ boolean threw = true;
long startNanos = System.nanoTime();
- try (InputStream is = loader.load(key)) {
+ try {
+ is = loader.load(key);
copyInputStreamToFile(is, cachedFile);
+ threw = false;
} catch (Exception e) {
LOG.warn("Error reading object for id [{}] from
backend", key, e);
throw e;
+ } finally {
+ Closeables.close(is, threw);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Loaded file: {} in {}", key,
(System.nanoTime() - startNanos) / 1_000_000);
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
index 5fe7f6bea0..578377ded5 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
@@ -58,6 +58,7 @@ import org.apache.commons.collections4.ListValuedMap;
import org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
import org.apache.jackrabbit.guava.common.base.Stopwatch;
import org.apache.jackrabbit.guava.common.collect.Iterators;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import org.apache.jackrabbit.guava.common.util.concurrent.ListenableFutureTask;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.LineIterator;
@@ -302,10 +303,14 @@ public class MarkSweepGarbageCollector implements
BlobGarbageCollector {
stat.setStartTime(markers.get(uniqueSessionId).getLastModified());
}
- try (LineNumberReader reader = new
LineNumberReader(new InputStreamReader(refRec.getStream()))) {
+ LineNumberReader reader = null;
+ try {
+ reader = new LineNumberReader(new
InputStreamReader(refRec.getStream()));
while (reader.readLine() != null) {
}
stat.setNumLines(reader.getLineNumber());
+ } finally {
+ Closeables.close(reader, true);
}
}
}
@@ -375,13 +380,7 @@ public class MarkSweepGarbageCollector implements
BlobGarbageCollector {
} finally {
statsCollector.updateDuration(sw.elapsed(TimeUnit.MILLISECONDS),
TimeUnit.MILLISECONDS);
if (!LOG.isTraceEnabled() && !traceOutput) {
- try {
- fs.close();
- } catch (IOException ioe) {
- if (!threw) {
- throw ioe;
- }
- }
+ Closeables.close(fs, threw);
}
}
}
@@ -770,13 +769,7 @@ public class MarkSweepGarbageCollector implements
BlobGarbageCollector {
}
} finally {
if (!traceOutput && (!LOG.isTraceEnabled() && candidates == 0)) {
- try {
- fs.close();
- } catch (IOException ioe) {
- if (!threw) {
- throw ioe;
- }
- }
+ Closeables.close(fs, threw);
}
sw.stop();
consistencyStatsCollector.updateDuration(sw.elapsed(TimeUnit.MILLISECONDS),
TimeUnit.MILLISECONDS);
@@ -1098,7 +1091,7 @@ public class MarkSweepGarbageCollector implements
BlobGarbageCollector {
} finally {
if (idsIter instanceof Closeable) {
try {
- ((Closeable)idsIter).close();
+ Closeables.close((Closeable) idsIter, false);
} catch (Exception e) {
LOG.debug("Error closing iterator");
}
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
index 24aa6b0b65..240e519ca6 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
@@ -79,6 +79,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.jackrabbit.guava.common.collect.Iterators;
+import org.apache.jackrabbit.guava.common.io.Closeables;
/**
* BlobStore wrapper for DataStore. Wraps Jackrabbit 2 DataStore and expose
them as BlobStores
@@ -340,15 +341,9 @@ public class DataStoreBlobStore
stats.uploadFailed();
throw new IOException(e);
} finally {
- //DataStore does not close the stream internally
+ //DataStore does not closes the stream internally
//So close the stream explicitly
- try {
- stream.close();
- } catch (IOException ioe) {
- if (!threw) {
- throw ioe;
- }
- }
+ Closeables.close(stream, threw);
}
}
@@ -369,10 +364,15 @@ public class DataStoreBlobStore
//This is inefficient as repeated calls for same blobId would involve
opening new Stream
//instead clients should directly access the stream from DataRecord by
special casing for
//BlobStore which implements DataStore
- try (InputStream stream = getInputStream(encodedBlobId)) {
+ InputStream stream = getInputStream(encodedBlobId);
+ boolean threw = true;
+ try {
IOUtils.skipFully(stream, pos);
int readCount = stream.read(buff, off, length);
+ threw = false;
return readCount;
+ } finally {
+ Closeables.close(stream, threw);
}
}
@@ -439,9 +439,13 @@ public class DataStoreBlobStore
@Override
public byte[] call() throws Exception {
boolean threw = true;
- try (InputStream stream = getStream(blobId.blobId)) {
+ InputStream stream = getStream(blobId.blobId);
+ try {
byte[] result = IOUtils.toByteArray(stream);
+ threw = false;
return result;
+ } finally {
+ Closeables.close(stream, threw);
}
}
});
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
index 6ebb1fecd5..7822b115b4 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
@@ -30,6 +30,7 @@ import java.util.List;
import java.util.Properties;
import org.apache.commons.lang3.StringUtils;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.filefilter.FileFilterUtils;
@@ -188,13 +189,12 @@ public class FSBackend extends AbstractSharedBackend {
try {
File file = new File(fsPathDir, name);
- try (FileOutputStream os = new FileOutputStream(file)) {
+ FileOutputStream os = new FileOutputStream(file);
+ try {
IOUtils.copyLarge(input, os);
} finally {
- try {
- input.close();
- } catch (IOException swallowed) {
- }
+ Closeables.close(os, true);
+ Closeables.close(input, true);
}
} catch (IOException e) {
LOG.error("Exception while adding metadata record with name {},
{}",
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
index 1cddd3713c..7ad09d04c1 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
@@ -33,6 +33,7 @@ import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.filefilter.FileFilterUtils;
@@ -138,13 +139,12 @@ public class OakFileDataStore extends FileDataStore
implements SharedDataStore {
try {
File file = new File(getPath(), name);
- try (FileOutputStream os = new FileOutputStream(file)) {
+ FileOutputStream os = new FileOutputStream(file);
+ try {
IOUtils.copyLarge(input, os);
} finally {
- try {
- input.close();
- } catch (IOException swallowed) {
- }
+ Closeables.close(os, true);
+ Closeables.close(input, true);
}
} catch (IOException e) {
LOG.error("Exception while adding metadata record with name {},
{}",
diff --git
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
index 82748f1e81..77ffbf5b21 100644
---
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
+++
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
@@ -42,6 +42,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import ch.qos.logback.classic.Level;
import org.apache.jackrabbit.guava.common.collect.Iterators;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import org.apache.jackrabbit.guava.common.io.Closer;
import org.apache.jackrabbit.guava.common.util.concurrent.Futures;
import org.apache.jackrabbit.guava.common.util.concurrent.ListenableFuture;
@@ -692,7 +693,7 @@ public class UploadStagingCacheTest extends
AbstractDataStoreCacheTest {
// Create pre-upgrade load
File home = folder.newFolder();
File pendingUploadsFile = new File(home,
DataStoreCacheUpgradeUtils.UPLOAD_MAP);
- createGibberishLoad(pendingUploadsFile);
+ createGibberishLoad(home, pendingUploadsFile);
LogCustomizer lc =
LogCustomizer.forLogger(DataStoreCacheUpgradeUtils.class.getName())
.filter(Level.WARN)
@@ -716,9 +717,13 @@ public class UploadStagingCacheTest extends
AbstractDataStoreCacheTest {
}
- private void createGibberishLoad(File pendingUploadFile) throws
IOException {
- try (BufferedWriter writer = new BufferedWriter(new
FileWriter(pendingUploadFile, StandardCharsets.UTF_8))) {
+ private void createGibberishLoad(File home, File pendingUploadFile) throws
IOException {
+ BufferedWriter writer = null;
+ try {
+ writer = new BufferedWriter(new FileWriter(pendingUploadFile,
StandardCharsets.UTF_8));
FileIOUtils.writeAsLine(writer,
"jerhgiuheirghoeoorqehgsjlwjpfkkwpkf", false);
+ } finally {
+ Closeables.close(writer, true);
}
}
diff --git
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTrackerClusterSharedTest.java
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTrackerClusterSharedTest.java
index 4ac3dac862..28bc17d3af 100644
---
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTrackerClusterSharedTest.java
+++
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTrackerClusterSharedTest.java
@@ -41,6 +41,7 @@ import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.apache.jackrabbit.guava.common.io.Closeables.close;
import static java.lang.String.valueOf;
import static java.util.UUID.randomUUID;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
@@ -223,11 +224,11 @@ public class BlobIdTrackerClusterSharedTest {
Set<String> retrieved = new HashSet<>();
Iterator<String> iter = tracker.get();
log.info("retrieving blob ids");
- while (iter.hasNext()) {
+ while(iter.hasNext()) {
retrieved.add(iter.next());
}
if (iter instanceof Closeable) {
- ((Closeable)iter).close();
+ close((Closeable)iter, true);
}
return retrieved;
}
diff --git
a/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/FileIOUtils.java
b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/FileIOUtils.java
index 6f9a120d5a..b4a9e6a3c0 100644
---
a/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/FileIOUtils.java
+++
b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/FileIOUtils.java
@@ -20,6 +20,7 @@ import static java.io.File.createTempFile;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.commons.io.FileUtils.forceDelete;
import static org.apache.commons.io.IOUtils.copyLarge;
+import static org.apache.jackrabbit.guava.common.io.Closeables.close;
import static
org.apache.jackrabbit.oak.commons.sort.EscapeUtils.escapeLineBreak;
import static
org.apache.jackrabbit.oak.commons.sort.EscapeUtils.unescapeLineBreaks;
import static
org.apache.jackrabbit.oak.commons.sort.ExternalSort.mergeSortedFiles;
@@ -139,20 +140,25 @@ public final class FileIOUtils {
* @throws IOException
*/
public static void append(List<File> files, File appendTo, boolean delete)
throws IOException {
+ OutputStream appendStream = null;
+ boolean threw = true;
- try (OutputStream appendStream = new BufferedOutputStream(new
FileOutputStream(appendTo, true))) {
+ try {
+ appendStream = new BufferedOutputStream(new
FileOutputStream(appendTo, true));
for (File f : files) {
try (InputStream iStream = new FileInputStream(f)) {
copyLarge(iStream, appendStream);
}
}
+ threw = false;
} finally {
if (delete) {
for (File f : files) {
f.delete();
}
}
+ close(appendStream, threw);
}
}
@@ -221,9 +227,11 @@ public final class FileIOUtils {
*/
public static int writeStrings(Iterator<String> iterator, File f, boolean
escape,
@NotNull Function<String, String> transformer, @Nullable Logger
logger, @Nullable String message) throws IOException {
+ BufferedWriter writer = new BufferedWriter(new FileWriter(f, UTF_8));
+ boolean threw = true;
int count = 0;
- try (BufferedWriter writer = new BufferedWriter(new FileWriter(f,
UTF_8))) {
+ try {
while (iterator.hasNext()) {
writeAsLine(writer, transformer.apply(iterator.next()),
escape);
count++;
@@ -233,6 +241,9 @@ public final class FileIOUtils {
}
}
}
+ threw = false;
+ } finally {
+ close(writer, threw);
}
return count;
}
@@ -246,10 +257,13 @@ public final class FileIOUtils {
* @throws IOException
*/
public static Set<String> readStringsAsSet(InputStream stream, boolean
unescape) throws IOException {
+ BufferedReader reader = null;
Set<String> set = new HashSet<>();
+ boolean threw = true;
- try (BufferedReader reader = new BufferedReader(new
InputStreamReader(stream, UTF_8))) {
- String line;
+ try {
+ reader = new BufferedReader(new InputStreamReader(stream, UTF_8));
+ String line = null;
while ((line = reader.readLine()) != null) {
if (unescape) {
set.add(unescapeLineBreaks(line));
@@ -257,6 +271,9 @@ public final class FileIOUtils {
set.add(line);
}
}
+ threw = false;
+ } finally {
+ close(reader, threw);
}
return set;
}
diff --git
a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
index 323c2056aa..92ee7e34cc 100644
---
a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
+++
b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
@@ -38,6 +38,7 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.LineIterator;
import org.apache.commons.io.filefilter.IOFileFilter;
@@ -323,11 +324,7 @@ public class ActiveDeletedBlobCollectorFactory {
long startBlobTrackerSyncTime = clock.getTime();
// Synchronize deleted blob ids with the blob id tracker
try {
- try {
- idTempDeleteWriter.close();
- } catch (IOException ex) {
- LOG.warn("IOException thrown while closing
idTempDeleteWriter", ex);
- }
+ Closeables.close(idTempDeleteWriter, true);
if (blobIdsTracked && numBlobsDeleted > 0) {
BlobTracker tracker = ((BlobTrackingStore)
blobStore).getTracker();
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
index 1d55c0c768..389b9bb351 100644
---
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
+++
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
@@ -17,6 +17,7 @@
package org.apache.jackrabbit.oak.run;
import static org.apache.jackrabbit.guava.common.base.Stopwatch.createStarted;
+import static org.apache.jackrabbit.guava.common.io.Closeables.close;
import static java.io.File.createTempFile;
import static java.util.Arrays.asList;
import static org.apache.commons.io.FileUtils.forceDelete;
@@ -46,6 +47,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.jackrabbit.guava.common.base.Splitter;
import org.apache.jackrabbit.guava.common.base.Stopwatch;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import org.apache.jackrabbit.guava.common.io.Closer;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientURI;
@@ -424,8 +426,10 @@ public class DataStoreCheckCommand implements Command {
private static void retrieveBlobReferences(GarbageCollectableBlobStore
blobStore, BlobReferenceRetriever marker,
File marked, String dsType, boolean isVerbose) throws IOException {
+ final BufferedWriter writer = new BufferedWriter(new
FileWriter(marked, StandardCharsets.UTF_8));
final AtomicInteger count = new AtomicInteger();
- try (BufferedWriter writer = new BufferedWriter(new FileWriter(marked,
StandardCharsets.UTF_8))) {
+ boolean threw = true;
+ try {
final GarbageCollectableBlobStore finalBlobStore = blobStore;
System.out.println("Starting dump of blob references");
@@ -460,6 +464,9 @@ public class DataStoreCheckCommand implements Command {
System.out.println(count.get() + " blob references found");
System.out.println("Finished in " +
watch.elapsed(TimeUnit.SECONDS) + " seconds");
+ threw = false;
+ } finally {
+ close(writer, threw);
}
}
@@ -536,11 +543,14 @@ public class DataStoreCheckCommand implements Command {
}
public void traverse(String ... paths) throws IOException {
+ BufferedWriter writer = null;
final AtomicInteger count = new AtomicInteger();
+ boolean threw = true;
System.out.println("Starting dump of blob references by
traversing");
Stopwatch watch = createStarted();
- try (BufferedWriter writer = new BufferedWriter(new
FileWriter(references, StandardCharsets.UTF_8))) {
+ try {
+ writer = new BufferedWriter(new FileWriter(references,
StandardCharsets.UTF_8));
if (paths.length == 0) {
traverseChildren(nodeStore.getRoot(), "/", writer, count);
} else {
@@ -560,6 +570,9 @@ public class DataStoreCheckCommand implements Command {
System.out.println(count.get() + " blob references found");
System.out.println("Finished in " +
watch.elapsed(TimeUnit.SECONDS) + " seconds");
+ threw = false;
+ } finally {
+ Closeables.close(writer, threw);
}
}
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
index 225e6589a5..1ead8f7cc7 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
@@ -41,6 +41,7 @@ import java.util.stream.StreamSupport;
import org.apache.jackrabbit.guava.common.base.Splitter;
import org.apache.jackrabbit.guava.common.base.Stopwatch;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import org.apache.jackrabbit.guava.common.io.Closer;
import joptsimple.OptionParser;
import org.apache.commons.io.FileUtils;
@@ -238,13 +239,7 @@ public class DataStoreCommand implements Command {
FileUtils.copyFile(referencesTemp, references);
} finally {
- try {
- writer.close();
- } catch (IOException ex) {
- if (!threw) {
- throw ex;
- }
- }
+ Closeables.close(writer, threw);
}
} else if (dataStoreOpts.dumpIds()) {
log.info("Initiating dump of data store IDs");
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
index 0b070810ae..259e16dcf1 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
@@ -55,6 +55,7 @@ import java.util.Map;
import javax.jcr.PropertyType;
import org.apache.commons.io.IOUtils;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
@@ -644,14 +645,7 @@ public class DefaultSegmentWriter implements SegmentWriter
{
threw = false;
return id;
} finally {
- try {
- stream.close();
- } catch (IOException ex) {
- if (!threw) {
- throw ex;
- }
- LOG.warn("IOException thrown while closing stream", ex);
- }
+ Closeables.close(stream, threw);
}
}
diff --git
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
index 8bd3a48e12..a91470d0ed 100644
---
a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
+++
b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java
@@ -44,6 +44,7 @@ import java.util.stream.StreamSupport;
import org.apache.jackrabbit.guava.common.base.Stopwatch;
import org.apache.jackrabbit.guava.common.collect.Iterables;
import org.apache.jackrabbit.guava.common.collect.Iterators;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import org.apache.jackrabbit.guava.common.util.concurrent.AtomicDouble;
import com.mongodb.Block;
import com.mongodb.DBObject;
@@ -2029,7 +2030,7 @@ public class MongoDocumentStore implements DocumentStore {
clusterNodesConnection.close();
}
try {
- throttlingMetricsUpdater.close();
+ Closeables.close(throttlingMetricsUpdater, false);
} catch (IOException e) {
LOG.warn("Error occurred while closing throttlingMetricsUpdater",
e);
}
diff --git
a/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoBlobGCTest.java
b/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoBlobGCTest.java
index aa4a58ff6f..8c5b97018e 100644
---
a/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoBlobGCTest.java
+++
b/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoBlobGCTest.java
@@ -36,6 +36,7 @@ import ch.qos.logback.classic.Level;
import org.apache.commons.lang3.StringUtils;
import org.apache.jackrabbit.guava.common.base.Splitter;
import org.apache.jackrabbit.guava.common.base.Stopwatch;
+import org.apache.jackrabbit.guava.common.io.Closeables;
import com.mongodb.BasicDBObject;
import com.mongodb.ReadPreference;
import com.mongodb.client.MongoCollection;
@@ -397,18 +398,26 @@ public class MongoBlobGCTest extends
AbstractMongoConnectionTest {
}
private static void assertBlobReferences(Set<String> expected, String
rootFolder) throws IOException {
- try (InputStream is = new FileInputStream(getMarkedFile(rootFolder))) {
+ InputStream is = null;
+ try {
+ is = new FileInputStream(getMarkedFile(rootFolder));
Set<String> records = FileIOUtils.readStringsAsSet(is, true);
assertEquals(expected, records);
+ } finally {
+ Closeables.close(is, false);
}
}
private static void assertBlobReferenceRecords(int expected, String
rootFolder) throws IOException {
- try (InputStream is = new FileInputStream(getMarkedFile(rootFolder))) {
+ InputStream is = null;
+ try {
+ is = new FileInputStream(getMarkedFile(rootFolder));
Set<String> records = FileIOUtils.readStringsAsSet(is, true);
for (String rec : records) {
assertEquals(expected,
Splitter.on(",").omitEmptyStrings().splitToList(rec).size());
}
+ } finally {
+ Closeables.close(is, false);
}
}
@@ -500,36 +509,42 @@ public class MongoBlobGCTest extends
AbstractMongoConnectionTest {
this.maxLastModifiedInterval = maxLastModifiedInterval;
this.additionalBlobs = new HashSet<>();
}
-
+
@Override
protected void markAndSweep(boolean markOnly, boolean
forceBlobRetrieve) throws Exception {
-
- try (GarbageCollectorFileState fs = new
GarbageCollectorFileState(root)) {
+ boolean threw = true;
+ GarbageCollectorFileState fs = new GarbageCollectorFileState(root);
+ try {
Stopwatch sw = Stopwatch.createStarted();
LOG.info("Starting Test Blob garbage collection");
-
+
// Sleep a little more than the max interval to get over the
interval for valid blobs
Thread.sleep(maxLastModifiedInterval + 1000);
LOG.info("Slept {} to make blobs old", maxLastModifiedInterval
+ 1000);
-
+
long markStart = System.currentTimeMillis();
mark(fs);
LOG.info("Mark finished");
-
+
additionalBlobs = createAdditional();
-
+
if (!markOnly) {
Thread.sleep(maxLastModifiedInterval + 100);
LOG.info("Slept {} to make additional blobs old",
maxLastModifiedInterval + 100);
-
+
long deleteCount = sweep(fs, markStart, forceBlobRetrieve);
-
+ threw = false;
+
LOG.info("Blob garbage collection completed in {}. Number
of blobs deleted [{}]", sw.toString(),
deleteCount, maxLastModifiedInterval);
}
+ } finally {
+ if (!LOG.isTraceEnabled()) {
+ Closeables.close(fs, threw);
+ }
}
}
-
+
public HashSet<String> createAdditional() throws Exception {
HashSet<String> blobSet = new HashSet<String>();
DocumentNodeStore s = mk.getNodeStore();