This is an automated email from the ASF dual-hosted git repository.
reschke pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git
The following commit(s) were added to refs/heads/trunk by this push:
new a159cfa979 OAK-10691: remove use of Guava Charsets class (#1538)
a159cfa979 is described below
commit a159cfa979be8e8fd6441eaa68af3159f5999f21
Author: Julian Reschke <[email protected]>
AuthorDate: Mon Jun 17 16:54:01 2024 +0200
OAK-10691: remove use of Guava Charsets class (#1538)
---
.../jackrabbit/oak/scalability/ScalabilityRunner.java | 4 ++--
.../cloud/azure/blobstorage/AzureBlobStoreBackend.java | 8 ++++----
.../oak/plugins/blob/MarkSweepGarbageCollector.java | 8 ++++----
.../oak/plugins/blob/datastore/BlobIdTracker.java | 4 ++--
.../oak/plugins/blob/datastore/OakFileDataStore.java | 4 ++--
.../directaccess/DataRecordDownloadOptions.java | 3 +--
.../oak/plugins/blob/ConsolidatedDataStoreStatsTest.java | 4 ++--
.../oak/plugins/blob/UploadStagingCacheTest.java | 4 ++--
.../AbstractDataRecordAccessProviderTest.java | 6 +++---
.../directaccess/DataRecordDownloadOptionsTest.java | 6 +++---
.../plugins/blob/serializer/FSBlobSerializerTest.java | 4 ++--
.../jackrabbit/oak/spi/blob/AbstractBlobStore.java | 4 ++--
.../apache/jackrabbit/oak/spi/blob/split/BlobIdSet.java | 4 ++--
.../oak/plugins/index/datastore/DataStoreTextWriter.java | 10 +++++-----
.../plugins/index/importer/IndexDefinitionUpdater.java | 4 ++--
.../oak/plugins/index/property/PropertyIndexUtil.java | 4 ++--
.../oak/plugins/nodetype/write/NodeTypeRegistry.java | 4 ++--
.../oak/plugins/index/importer/IndexImporterTest.java | 6 +++---
.../apache/jackrabbit/oak/http/HtmlRepresentation.java | 4 ++--
.../org/apache/jackrabbit/oak/jcr/TestContentLoader.java | 4 ++--
.../directory/ActiveDeletedBlobCollectorFactory.java | 4 ++--
.../index/lucene/directory/IndexRootDirectory.java | 4 ++--
.../plugins/index/lucene/LucenePropertyIndexTest.java | 6 +++---
.../apache/jackrabbit/oak/run/osgi/ConfigTracker.java | 4 ++--
.../indexer/document/flatfile/NodeStateEntrySorter.java | 4 ++--
.../indexer/document/flatfile/StateInBytesHolder.java | 6 +++---
.../document/flatfile/TraverseWithSortStrategy.java | 4 ++--
.../jackrabbit/oak/exporter/NodeStateSerializer.java | 6 +++---
.../oak/index/IndexConsistencyCheckPrinter.java | 4 ++--
.../oak/plugins/tika/CSVFileBinaryResourceProvider.java | 4 ++--
.../jackrabbit/oak/plugins/tika/CSVFileGenerator.java | 4 ++--
.../jackrabbit/oak/plugins/tika/TextPopulator.java | 4 ++--
.../apache/jackrabbit/oak/run/DataStoreCheckCommand.java | 8 ++++----
.../org/apache/jackrabbit/oak/run/DataStoreCommand.java | 8 ++++----
.../jackrabbit/oak/exporter/NodeStateSerializerTest.java | 6 +++---
.../java/org/apache/jackrabbit/oak/index/ReindexIT.java | 4 ++--
.../plugins/tika/CSVFileBinaryResourceProviderTest.java | 6 +++---
.../jackrabbit/oak/plugins/tika/TextPopulatorTest.java | 4 ++--
.../jackrabbit/oak/plugins/tika/TikaHelperTest.java | 6 +++---
.../apache/jackrabbit/oak/run/DataStoreCheckTest.java | 6 +++---
.../jackrabbit/oak/segment/azure/AzureGCJournalFile.java | 4 ++--
.../jackrabbit/oak/segment/DefaultSegmentWriter.java | 6 +++---
.../java/org/apache/jackrabbit/oak/segment/Segment.java | 4 ++--
.../org/apache/jackrabbit/oak/segment/SegmentBlob.java | 4 ++--
.../jackrabbit/oak/segment/SegmentBufferWriter.java | 4 ++--
.../org/apache/jackrabbit/oak/segment/SegmentDump.java | 4 ++--
.../org/apache/jackrabbit/oak/segment/SegmentParser.java | 6 +++---
.../org/apache/jackrabbit/oak/segment/SegmentStream.java | 6 +++---
.../jackrabbit/oak/segment/data/SegmentDataV12.java | 6 +++---
.../jackrabbit/oak/segment/file/LocalGCJournalFile.java | 6 +++---
.../oak/segment/file/tar/SegmentTarManager.java | 6 +++---
.../oak/segment/file/tar/SegmentTarWriter.java | 16 ++++++++--------
.../file/tar/binaries/BinaryReferencesIndexLoaderV1.java | 4 ++--
.../file/tar/binaries/BinaryReferencesIndexLoaderV2.java | 4 ++--
.../file/tar/binaries/BinaryReferencesIndexWriter.java | 6 +++---
.../standby/codec/GetReferencesResponseEncoder.java | 5 +++--
.../oak/segment/standby/codec/ResponseDecoder.java | 8 ++++----
.../jackrabbit/oak/segment/DefaultSegmentWriterTest.java | 10 +++++-----
.../jackrabbit/oak/segment/file/tar/TarFileTest.java | 8 ++++----
.../jackrabbit/oak/segment/file/tar/TarWriterTest.java | 4 ++--
.../tar/binaries/BinaryReferencesIndexLoaderTest.java | 4 ++--
.../tar/binaries/BinaryReferencesIndexLoaderV1Test.java | 4 ++--
.../tar/binaries/BinaryReferencesIndexLoaderV2Test.java | 4 ++--
.../jackrabbit/oak/segment/standby/StandbyTestUtils.java | 4 ++--
.../standby/codec/GetHeadResponseEncoderTest.java | 5 +++--
.../standby/codec/GetReferencesResponseEncoderTest.java | 5 +++--
.../oak/segment/standby/codec/ResponseDecoderTest.java | 14 +++++++-------
.../oak/segment/tool/CheckRepositoryTestBase.java | 4 ++--
.../jackrabbit/oak/plugins/memory/StringBasedBlob.java | 7 +++----
.../apache/jackrabbit/oak/plugins/value/Conversions.java | 4 ++--
.../apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java | 6 +++---
.../oak/upgrade/blob/LengthCachingDataStore.java | 6 +++---
.../oak/upgrade/nodestate/NameFilteringNodeState.java | 6 +++---
73 files changed, 198 insertions(+), 197 deletions(-)
diff --git
a/oak-benchmarks/src/main/java/org/apache/jackrabbit/oak/scalability/ScalabilityRunner.java
b/oak-benchmarks/src/main/java/org/apache/jackrabbit/oak/scalability/ScalabilityRunner.java
index 478843837e..9d2728b1ee 100644
---
a/oak-benchmarks/src/main/java/org/apache/jackrabbit/oak/scalability/ScalabilityRunner.java
+++
b/oak-benchmarks/src/main/java/org/apache/jackrabbit/oak/scalability/ScalabilityRunner.java
@@ -20,6 +20,7 @@ package org.apache.jackrabbit.oak.scalability;
import java.io.IOException;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
@@ -27,7 +28,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Splitter;
import org.apache.jackrabbit.guava.common.collect.Lists;
import org.apache.jackrabbit.guava.common.collect.Maps;
@@ -179,7 +179,7 @@ public class ScalabilityRunner {
if (options.has(scalabilityOptions.getCsvFile())) {
out =
new
PrintStream(FileUtils.openOutputStream(scalabilityOptions.getCsvFile().value(options),
true), false,
- Charsets.UTF_8.name());
+ StandardCharsets.UTF_8);
}
for (ScalabilitySuite suite : suites) {
if (suite instanceof CSVResultGenerator) {
diff --git
a/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
b/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
index dd01df74ec..b7338594a3 100644
---
a/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
+++
b/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
@@ -33,6 +33,7 @@ import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
import java.security.InvalidKeyException;
import java.time.Instant;
import java.util.Collection;
@@ -46,7 +47,6 @@ import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Function;
import org.apache.jackrabbit.guava.common.base.Strings;
import org.apache.jackrabbit.guava.common.cache.Cache;
@@ -1179,9 +1179,9 @@ public class AzureBlobStoreBackend extends
AbstractSharedBackend {
StringBuilder builder = new StringBuilder();
for (Map.Entry<String, String> e :
additionalQueryParams.entrySet()) {
builder.append("&");
- builder.append(URLEncoder.encode(e.getKey(),
Charsets.UTF_8.name()));
+ builder.append(URLEncoder.encode(e.getKey(),
StandardCharsets.UTF_8));
builder.append("=");
- builder.append(URLEncoder.encode(e.getValue(),
Charsets.UTF_8.name()));
+ builder.append(URLEncoder.encode(e.getValue(),
StandardCharsets.UTF_8));
}
uriString += builder.toString();
}
@@ -1191,7 +1191,7 @@ public class AzureBlobStoreBackend extends
AbstractSharedBackend {
catch (DataStoreException e) {
LOG.error("No connection to Azure Blob Storage", e);
}
- catch (URISyntaxException | InvalidKeyException |
UnsupportedEncodingException e) {
+ catch (URISyntaxException | InvalidKeyException e) {
LOG.error("Can't generate a presigned URI for key {}", key, e);
}
catch (StorageException e) {
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
index 078f058801..9f016f161f 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
@@ -35,6 +35,7 @@ import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
+import java.nio.charset.StandardCharsets;
import java.sql.Timestamp;
import java.util.ArrayDeque;
import java.util.Collections;
@@ -50,7 +51,6 @@ import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Function;
import org.apache.jackrabbit.guava.common.base.Joiner;
import org.apache.jackrabbit.guava.common.base.StandardSystemProperty;
@@ -519,10 +519,10 @@ public class MarkSweepGarbageCollector implements
BlobGarbageCollector {
long deletedSize = 0;
int numDeletedSizeAvailable = 0;
try {
- removesWriter = Files.newWriter(fs.getGarbage(), Charsets.UTF_8);
+ removesWriter = Files.newWriter(fs.getGarbage(),
StandardCharsets.UTF_8);
ArrayDeque<String> removesQueue = new ArrayDeque<String>();
iterator =
- FileUtils.lineIterator(fs.getGcCandidates(),
Charsets.UTF_8.name());
+ FileUtils.lineIterator(fs.getGcCandidates(),
StandardCharsets.UTF_8.name());
Iterator<List<String>> partitions = Iterators.partition(iterator,
getBatchCount());
while (partitions.hasNext()) {
@@ -627,7 +627,7 @@ public class MarkSweepGarbageCollector implements
BlobGarbageCollector {
* @param logPath whether to log path in the file or not
*/
protected void iterateNodeTree(GarbageCollectorFileState fs, final boolean
logPath) throws IOException {
- final BufferedWriter writer = Files.newWriter(fs.getMarkedRefs(),
Charsets.UTF_8);
+ final BufferedWriter writer = Files.newWriter(fs.getMarkedRefs(),
StandardCharsets.UTF_8);
final AtomicInteger count = new AtomicInteger();
try {
marker.collectReferences(
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTracker.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTracker.java
index 926f450b7b..ad4f653c57 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTracker.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/BlobIdTracker.java
@@ -23,6 +23,7 @@ import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Iterator;
import java.util.List;
@@ -46,7 +47,6 @@ import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.guava.common.base.Predicates.alwaysTrue;
import static org.apache.jackrabbit.guava.common.collect.Iterables.transform;
import static org.apache.jackrabbit.guava.common.collect.Lists.newArrayList;
@@ -678,7 +678,7 @@ public class BlobIdTracker implements Closeable,
BlobTracker {
close();
processFile = new File(rootDir, prefix +
IN_PROCESS.getFileNameSuffix());
- writer = newWriter(processFile, UTF_8);
+ writer = newWriter(processFile, StandardCharsets.UTF_8);
LOG.info("Created new process file and writer over {} ",
processFile.getAbsolutePath());
}
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
index 9e16f39a8b..057f92bfd9 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
@@ -24,6 +24,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.ref.WeakReference;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@@ -34,7 +35,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Strings;
import org.apache.jackrabbit.guava.common.io.BaseEncoding;
import org.apache.jackrabbit.guava.common.io.Closeables;
@@ -111,7 +111,7 @@ public class OakFileDataStore extends FileDataStore
implements SharedDataStore {
* @see org.apache.jackrabbit.oak.commons.PropertiesUtil#populate(Object,
java.util.Map, boolean)
*/
public void setReferenceKeyPlainText(String textKey) {
- this.referenceKey = textKey.getBytes(Charsets.UTF_8);
+ this.referenceKey = textKey.getBytes(StandardCharsets.UTF_8);
}
public void setReferenceKey(byte[] referenceKey) {
diff --git
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/DataRecordDownloadOptions.java
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/DataRecordDownloadOptions.java
index 05d37328b5..5963cf2e6e 100644
---
a/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/DataRecordDownloadOptions.java
+++
b/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/DataRecordDownloadOptions.java
@@ -27,7 +27,6 @@ import org.apache.jackrabbit.oak.api.blob.BlobDownloadOptions;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Joiner;
import org.apache.jackrabbit.guava.common.base.Strings;
import org.apache.jackrabbit.guava.common.collect.Sets;
@@ -158,7 +157,7 @@ public class DataRecordDownloadOptions {
private String formatContentDispositionHeader(@NotNull final String
dispositionType,
@NotNull final String
fileName,
@Nullable final String
rfc8187EncodedFileName) {
- Charset ISO_8859_1 = Charsets.ISO_8859_1;
+ Charset ISO_8859_1 = StandardCharsets.ISO_8859_1;
String iso_8859_1_fileName = new String(
ISO_8859_1.encode(fileName).array(),
ISO_8859_1
diff --git
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/ConsolidatedDataStoreStatsTest.java
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/ConsolidatedDataStoreStatsTest.java
index fd3688217a..0b37a8b191 100644
---
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/ConsolidatedDataStoreStatsTest.java
+++
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/ConsolidatedDataStoreStatsTest.java
@@ -21,6 +21,7 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
import java.security.DigestOutputStream;
import java.security.MessageDigest;
import java.util.List;
@@ -30,7 +31,6 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Optional;
import org.apache.jackrabbit.guava.common.collect.Lists;
import org.apache.jackrabbit.guava.common.io.Closer;
@@ -501,6 +501,6 @@ public class ConsolidatedDataStoreStatsTest extends
AbstractDataStoreCacheTest {
}
private static InputStream getStream(String str) {
- return new ByteArrayInputStream(str.getBytes(Charsets.UTF_8));
+ return new ByteArrayInputStream(str.getBytes(StandardCharsets.UTF_8));
}
}
diff --git
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
index 67cc08d73d..5a0a42a905 100644
---
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
+++
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/UploadStagingCacheTest.java
@@ -22,6 +22,7 @@ import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -34,7 +35,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import ch.qos.logback.classic.Level;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Optional;
import org.apache.jackrabbit.guava.common.collect.Iterators;
import org.apache.jackrabbit.guava.common.collect.Lists;
@@ -718,7 +718,7 @@ public class UploadStagingCacheTest extends
AbstractDataStoreCacheTest {
private void createGibberishLoad(File home, File pendingUploadFile) throws
IOException {
BufferedWriter writer = null;
try {
- writer = Files.newWriter(pendingUploadFile, Charsets.UTF_8);
+ writer = Files.newWriter(pendingUploadFile,
StandardCharsets.UTF_8);
FileIOUtils.writeAsLine(writer,
"jerhgiuheirghoeoorqehgsjlwjpfkkwpkf", false);
} finally {
Closeables.close(writer, true);
diff --git
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/AbstractDataRecordAccessProviderTest.java
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/AbstractDataRecordAccessProviderTest.java
index 517fac1482..da6d040af4 100644
---
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/AbstractDataRecordAccessProviderTest.java
+++
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/AbstractDataRecordAccessProviderTest.java
@@ -25,6 +25,7 @@ import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLDecoder;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@@ -44,7 +45,6 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Strings;
import org.apache.jackrabbit.guava.common.collect.Lists;
import org.apache.jackrabbit.guava.common.collect.Maps;
@@ -179,8 +179,8 @@ public abstract class AbstractDataRecordAccessProviderTest {
public void testGetDownloadURIWithCustomHeadersIT() throws
DataStoreException, IOException {
String umlautFilename = "Uml\u00e4utfile.png";
String umlautFilename_ISO_8859_1 = new String(
- Charsets.ISO_8859_1.encode(umlautFilename).array(),
- Charsets.ISO_8859_1
+ StandardCharsets.ISO_8859_1.encode(umlautFilename).array(),
+ StandardCharsets.ISO_8859_1
);
List<String> fileNames = Lists.newArrayList(
"image.png",
diff --git
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/DataRecordDownloadOptionsTest.java
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/DataRecordDownloadOptionsTest.java
index de0f9bb87b..de0e9f590f 100644
---
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/DataRecordDownloadOptionsTest.java
+++
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/DataRecordDownloadOptionsTest.java
@@ -19,12 +19,12 @@
package org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.jackrabbit.oak.api.blob.BlobDownloadOptions;
import org.junit.Test;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Joiner;
import org.apache.jackrabbit.guava.common.base.Strings;
import org.apache.jackrabbit.guava.common.collect.Lists;
@@ -264,8 +264,8 @@ public class DataRecordDownloadOptionsTest {
public void testGetContentDispositionWithSpecialCharacterFilenames() {
String umlautFilename = "Uml\u00e4utfile.jpg";
String umlautFilename_ISO_8859_1 = new String(
- Charsets.ISO_8859_1.encode(umlautFilename).array(),
- Charsets.ISO_8859_1
+ StandardCharsets.ISO_8859_1.encode(umlautFilename).array(),
+ StandardCharsets.ISO_8859_1
);
List<String> filenames = Lists.newArrayList(
"image.png",
diff --git
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/serializer/FSBlobSerializerTest.java
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/serializer/FSBlobSerializerTest.java
index 3b9bcb1e0a..a7a1050fe4 100644
---
a/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/serializer/FSBlobSerializerTest.java
+++
b/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/serializer/FSBlobSerializerTest.java
@@ -21,6 +21,7 @@ package org.apache.jackrabbit.oak.plugins.blob.serializer;
import java.io.File;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import org.apache.jackrabbit.guava.common.base.Strings;
import org.apache.jackrabbit.oak.api.Blob;
@@ -32,7 +33,6 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -48,7 +48,7 @@ public class FSBlobSerializerTest {
FSBlobSerializer serializer = new FSBlobSerializer(folder.getRoot(),
maxInlineSize);
String data = Strings.repeat("x", maxInlineSize * 10);
- Blob b = new ArrayBasedBlob(data.getBytes(UTF_8));
+ Blob b = new ArrayBasedBlob(data.getBytes(StandardCharsets.UTF_8));
String id = serializer.serialize(b);
Blob b2 = serializer.deserialize(id);
diff --git
a/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java
b/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java
index 58ce19e529..a338eb4ce6 100644
---
a/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java
+++
b/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java
@@ -24,6 +24,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.lang.ref.WeakReference;
+import java.nio.charset.StandardCharsets;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
@@ -43,7 +44,6 @@ import java.util.concurrent.atomic.AtomicReference;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.io.BaseEncoding;
import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.oak.commons.cache.Cache;
@@ -325,7 +325,7 @@ public abstract class AbstractBlobStore implements
GarbageCollectableBlobStore,
* java.util.Map, boolean)
*/
public void setReferenceKeyPlainText(String textKey) {
- setReferenceKey(textKey.getBytes(Charsets.UTF_8));
+ setReferenceKey(textKey.getBytes(StandardCharsets.UTF_8));
}
protected void usesBlobId(String blobId) {
diff --git
a/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/split/BlobIdSet.java
b/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/split/BlobIdSet.java
index b70209ca8b..e62084fcfe 100644
---
a/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/split/BlobIdSet.java
+++
b/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/split/BlobIdSet.java
@@ -25,12 +25,12 @@ import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.cache.Cache;
import org.apache.jackrabbit.guava.common.cache.CacheBuilder;
import org.apache.jackrabbit.guava.common.hash.BloomFilter;
@@ -48,7 +48,7 @@ class BlobIdSet {
BlobIdSet(String repositoryDir, String filename) {
store = new File(new File(repositoryDir), filename);
- bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8),
9000000); // about 8MB
+ bloomFilter =
BloomFilter.create(Funnels.stringFunnel(StandardCharsets.UTF_8), 9000000); //
about 8MB
cache = CacheBuilder.newBuilder().maximumSize(1000).build();
fillBloomFilter();
}
diff --git
a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/datastore/DataStoreTextWriter.java
b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/datastore/DataStoreTextWriter.java
index ea30279bdf..d6c5de5467 100644
---
a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/datastore/DataStoreTextWriter.java
+++
b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/datastore/DataStoreTextWriter.java
@@ -24,10 +24,10 @@ import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.lang.ref.SoftReference;
+import java.nio.charset.StandardCharsets;
import java.util.Set;
import java.util.concurrent.Callable;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.collect.Sets;
import org.apache.jackrabbit.guava.common.io.Files;
import org.apache.commons.io.FileUtils;
@@ -109,7 +109,7 @@ public class DataStoreTextWriter implements TextWriter,
Closeable, PreExtractedT
} else {
File textFile = getFile(blobId);
if (textFile.exists()) {
- String text = Files.toString(textFile, Charsets.UTF_8);
+ String text = Files.toString(textFile, StandardCharsets.UTF_8);
result = new ExtractedText(ExtractionResult.SUCCESS, text);
}
}
@@ -130,7 +130,7 @@ public class DataStoreTextWriter implements TextWriter,
Closeable, PreExtractedT
File textFile = getFile(stripLength(blobId));
ensureParentExists(textFile);
//TODO should we compress
- Files.write(text, textFile, Charsets.UTF_8);
+ Files.write(text, textFile, StandardCharsets.UTF_8);
}
@Override
@@ -232,7 +232,7 @@ public class DataStoreTextWriter implements TextWriter,
Closeable, PreExtractedT
private Set<String> loadFromFile(File file) throws IOException {
Set<String> result = Sets.newHashSet();
if (file.exists()) {
- result.addAll(Files.readLines(file, Charsets.UTF_8));
+ result.addAll(Files.readLines(file, StandardCharsets.UTF_8));
}
return result;
}
@@ -242,7 +242,7 @@ public class DataStoreTextWriter implements TextWriter,
Closeable, PreExtractedT
return;
}
File file = new File(directory, fileName);
- BufferedWriter bw = Files.newWriter(file, Charsets.UTF_8);
+ BufferedWriter bw = Files.newWriter(file, StandardCharsets.UTF_8);
for (String id : blobIds) {
bw.write(id);
bw.newLine();
diff --git
a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/importer/IndexDefinitionUpdater.java
b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/importer/IndexDefinitionUpdater.java
index 4c12d34af8..ccaa3636fc 100644
---
a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/importer/IndexDefinitionUpdater.java
+++
b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/importer/IndexDefinitionUpdater.java
@@ -21,10 +21,10 @@ package org.apache.jackrabbit.oak.plugins.index.importer;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.Set;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.collect.Maps;
import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.oak.api.CommitFailedException;
@@ -58,7 +58,7 @@ public class IndexDefinitionUpdater {
public IndexDefinitionUpdater(File file) throws IOException {
checkArgument(file.exists() && file.canRead(), "File [%s] cannot be
read", file);
- this.indexNodeStates =
getIndexDefnStates(FileUtils.readFileToString(file, Charsets.UTF_8));
+ this.indexNodeStates =
getIndexDefnStates(FileUtils.readFileToString(file, StandardCharsets.UTF_8));
}
public IndexDefinitionUpdater(String json) throws IOException {
diff --git
a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/PropertyIndexUtil.java
b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/PropertyIndexUtil.java
index 3467118066..fa84a97524 100644
---
a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/PropertyIndexUtil.java
+++
b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/PropertyIndexUtil.java
@@ -21,10 +21,10 @@ package org.apache.jackrabbit.oak.plugins.index.property;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Set;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.oak.api.PropertyValue;
public class PropertyIndexUtil {
@@ -53,7 +53,7 @@ public class PropertyIndexUtil {
if (v.isEmpty()) {
v = EMPTY_TOKEN;
} else {
- v = URLEncoder.encode(v, Charsets.UTF_8.name());
+ v = URLEncoder.encode(v, StandardCharsets.UTF_8.name());
}
values.add(v);
}
diff --git
a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/nodetype/write/NodeTypeRegistry.java
b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/nodetype/write/NodeTypeRegistry.java
index 80477dc893..a6a79b1438 100644
---
a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/nodetype/write/NodeTypeRegistry.java
+++
b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/nodetype/write/NodeTypeRegistry.java
@@ -22,13 +22,13 @@ import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
+import java.nio.charset.StandardCharsets;
import javax.jcr.NamespaceRegistry;
import javax.jcr.RepositoryException;
import javax.jcr.ValueFactory;
import javax.jcr.nodetype.NodeTypeManager;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.commons.cnd.CndImporter;
import org.apache.jackrabbit.commons.cnd.ParseException;
import org.apache.jackrabbit.oak.api.Root;
@@ -100,7 +100,7 @@ public final class NodeTypeRegistry {
private void registerNodeTypes(InputStream stream, String systemId) {
try {
- Reader reader = new InputStreamReader(stream, Charsets.UTF_8);
+ Reader reader = new InputStreamReader(stream,
StandardCharsets.UTF_8);
// OAK-9134: nt:frozenNode is not implementing mix:referenceable
from JCR 2.0.
// This system property allows to add it back when initializing a
repository.
// PS: To keep supporting tests in fiddling this setting, the
SystemPropertySupplier
diff --git
a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/importer/IndexImporterTest.java
b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/importer/IndexImporterTest.java
index c3abb429a1..680b50f2be 100644
---
a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/importer/IndexImporterTest.java
+++
b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/importer/IndexImporterTest.java
@@ -23,6 +23,7 @@ import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
+import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.util.Properties;
import java.util.Set;
@@ -68,7 +69,6 @@ import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.slf4j.event.Level;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.guava.common.collect.ImmutableSet.of;
import static java.util.Arrays.asList;
import static org.apache.jackrabbit.JcrConstants.NT_BASE;
@@ -311,7 +311,7 @@ public class IndexImporterTest {
info.save();
//Create index definitions json
- Files.write(json, new File(indexFolder, INDEX_DEFINITIONS_JSON),
UTF_8);
+ Files.write(json, new File(indexFolder, INDEX_DEFINITIONS_JSON),
StandardCharsets.UTF_8);
createIndexFolder(indexFolder, "/oak:index/fooIndex");
@@ -555,7 +555,7 @@ public class IndexImporterTest {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
printer.print(pw, Format.JSON, false);
- Files.write(sw.toString(), file, UTF_8);
+ Files.write(sw.toString(), file, StandardCharsets.UTF_8);
}
private String importDataIncrementalUpdateBeforeSetupMethod() throws
IOException, CommitFailedException {
diff --git
a/oak-http/src/main/java/org/apache/jackrabbit/oak/http/HtmlRepresentation.java
b/oak-http/src/main/java/org/apache/jackrabbit/oak/http/HtmlRepresentation.java
index 8e9aed2a0d..cc82230d88 100644
---
a/oak-http/src/main/java/org/apache/jackrabbit/oak/http/HtmlRepresentation.java
+++
b/oak-http/src/main/java/org/apache/jackrabbit/oak/http/HtmlRepresentation.java
@@ -18,6 +18,7 @@ package org.apache.jackrabbit.oak.http;
import java.io.IOException;
import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
import javax.servlet.http.HttpServletResponse;
import javax.xml.transform.OutputKeys;
@@ -35,7 +36,6 @@ import org.apache.tika.mime.MediaType;
import org.apache.tika.sax.XHTMLContentHandler;
import org.xml.sax.SAXException;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import static org.apache.jackrabbit.oak.api.Type.STRING;
import static org.apache.jackrabbit.oak.api.Type.STRINGS;
@@ -74,7 +74,7 @@ class HtmlRepresentation implements Representation {
xhtml.element("dt", name);
xhtml.startElement("dd");
xhtml.startElement("a", "href", response.encodeRedirectURL(
- URLEncoder.encode(name, Charsets.UTF_8.name()) + "/"));
+ URLEncoder.encode(name, StandardCharsets.UTF_8) +
"/"));
xhtml.characters(child.getPath());
xhtml.endElement("a");
xhtml.endElement("dd");
diff --git
a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/TestContentLoader.java
b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/TestContentLoader.java
index 3474a54582..607bc7d816 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/TestContentLoader.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/TestContentLoader.java
@@ -20,6 +20,7 @@ import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import java.util.Calendar;
import javax.jcr.Node;
import javax.jcr.PathNotFoundException;
@@ -33,7 +34,6 @@ import javax.jcr.nodetype.NodeType;
import javax.jcr.retention.RetentionPolicy;
import javax.jcr.security.Privilege;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.commons.JcrUtils;
import org.apache.jackrabbit.commons.cnd.CndImporter;
import org.apache.jackrabbit.commons.cnd.ParseException;
@@ -77,7 +77,7 @@ public class TestContentLoader {
InputStream stream =
TestContentLoader.class.getResourceAsStream("test_nodetypes.cnd");
try {
CndImporter.registerNodeTypes(
- new InputStreamReader(stream, Charsets.UTF_8), session);
+ new InputStreamReader(stream, StandardCharsets.UTF_8),
session);
} finally {
stream.close();
}
diff --git
a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
index c354544b7b..ca965aa0c3 100644
---
a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
+++
b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/ActiveDeletedBlobCollectorFactory.java
@@ -25,6 +25,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
@@ -36,7 +37,6 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Joiner;
import org.apache.jackrabbit.guava.common.collect.Lists;
import org.apache.jackrabbit.guava.common.io.Closeables;
@@ -218,7 +218,7 @@ public class ActiveDeletedBlobCollectorFactory {
if (blobIdsTracked) {
try {
idTempDeleteFile = File.createTempFile("idTempDelete",
null, rootDirectory);
- idTempDeleteWriter = Files.newWriter(idTempDeleteFile,
Charsets.UTF_8);
+ idTempDeleteWriter = Files.newWriter(idTempDeleteFile,
StandardCharsets.UTF_8);
} catch (Exception e) {
LOG.warn("Unable to open a writer to a temp file, will
ignore tracker sync");
blobIdsTracked = false;
diff --git
a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectory.java
b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectory.java
index cc07744746..a86939da22 100644
---
a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectory.java
+++
b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/directory/IndexRootDirectory.java
@@ -23,13 +23,13 @@ import java.io.File;
import java.io.FileFilter;
import java.io.FilenameFilter;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Joiner;
import org.apache.jackrabbit.guava.common.collect.ArrayListMultimap;
import org.apache.jackrabbit.guava.common.collect.Iterables;
@@ -213,7 +213,7 @@ public class IndexRootDirectory {
}
static String getPathHash(String indexPath) {
- return Hashing.sha256().hashString(indexPath,
Charsets.UTF_8).toString();
+ return Hashing.sha256().hashString(indexPath,
StandardCharsets.UTF_8).toString();
}
/**
diff --git
a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndexTest.java
b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndexTest.java
index a45fb2b210..a4d17cb768 100644
---
a/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndexTest.java
+++
b/oak-lucene/src/test/java/org/apache/jackrabbit/oak/plugins/index/lucene/LucenePropertyIndexTest.java
@@ -25,6 +25,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.Calendar;
import java.util.Collection;
@@ -40,7 +41,6 @@ import java.util.concurrent.Executors;
import javax.jcr.PropertyType;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.collect.ComparisonChain;
import org.apache.jackrabbit.guava.common.collect.ImmutableList;
import org.apache.jackrabbit.guava.common.collect.ImmutableSet;
@@ -3411,11 +3411,11 @@ public class LucenePropertyIndexTest extends
AbstractQueryTest {
}
public AccessStateProvidingBlob(String content) {
- this(content.getBytes(Charsets.UTF_8));
+ this(content.getBytes(StandardCharsets.UTF_8));
}
public AccessStateProvidingBlob(String content, String id) {
- this(content.getBytes(Charsets.UTF_8));
+ this(content.getBytes(StandardCharsets.UTF_8));
this.id = id;
}
diff --git
a/oak-pojosr/src/main/java/org/apache/jackrabbit/oak/run/osgi/ConfigTracker.java
b/oak-pojosr/src/main/java/org/apache/jackrabbit/oak/run/osgi/ConfigTracker.java
index 8838d49fe5..d5be634b50 100644
---
a/oak-pojosr/src/main/java/org/apache/jackrabbit/oak/run/osgi/ConfigTracker.java
+++
b/oak-pojosr/src/main/java/org/apache/jackrabbit/oak/run/osgi/ConfigTracker.java
@@ -21,11 +21,11 @@ package org.apache.jackrabbit.oak.run.osgi;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Splitter;
import org.apache.jackrabbit.guava.common.collect.Maps;
import org.apache.jackrabbit.guava.common.collect.Sets;
@@ -113,7 +113,7 @@ class ConfigTracker extends
ServiceTracker<ConfigurationAdmin, ConfigurationAdmi
continue;
}
- String content = Files.toString(jsonFile, Charsets.UTF_8);
+ String content = Files.toString(jsonFile, StandardCharsets.UTF_8);
JSONObject json = (JSONObject) JSONValue.parse(content);
configs.putAll(json);
}
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/NodeStateEntrySorter.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/NodeStateEntrySorter.java
index f5460ffc66..3211149925 100644
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/NodeStateEntrySorter.java
+++
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/NodeStateEntrySorter.java
@@ -32,11 +32,11 @@ import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Comparator;
import java.util.List;
import java.util.function.Function;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.commons.io.FileUtils.ONE_GB;
import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount;
import static
org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createReader;
@@ -48,7 +48,7 @@ public class NodeStateEntrySorter {
private static final int DEFAULTMAXTEMPFILES = 1024;
private final File nodeStateFile;
private final File workDir;
- private final Charset charset = UTF_8;
+ private final Charset charset = StandardCharsets.UTF_8;
private final Comparator<Iterable<String>> pathComparator;
private File sortedFile;
private boolean deleteOriginal;
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/StateInBytesHolder.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/StateInBytesHolder.java
index 5466290eeb..3b46e85ba8 100644
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/StateInBytesHolder.java
+++
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/StateInBytesHolder.java
@@ -19,11 +19,11 @@
package org.apache.jackrabbit.oak.index.indexer.document.flatfile;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.jackrabbit.oak.commons.StringUtils;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.guava.common.collect.ImmutableList.copyOf;
import static org.apache.jackrabbit.oak.commons.PathUtils.elements;
@@ -33,7 +33,7 @@ class StateInBytesHolder implements NodeStateHolder {
public StateInBytesHolder(String path, String line) {
this.pathElements = copyOf(elements(path));
- this.content = line.getBytes(UTF_8);
+ this.content = line.getBytes(StandardCharsets.UTF_8);
}
@Override
@@ -46,7 +46,7 @@ class StateInBytesHolder implements NodeStateHolder {
*/
@Override
public String getLine() {
- return new String(content, UTF_8);
+ return new String(content, StandardCharsets.UTF_8);
}
@Override
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/TraverseWithSortStrategy.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/TraverseWithSortStrategy.java
index 4a920d8b19..47ff8d149b 100644
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/TraverseWithSortStrategy.java
+++
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/TraverseWithSortStrategy.java
@@ -46,6 +46,7 @@ import java.lang.management.MemoryNotificationInfo;
import java.lang.management.MemoryPoolMXBean;
import java.lang.management.MemoryUsage;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
@@ -58,7 +59,6 @@ import static
java.lang.management.ManagementFactory.getMemoryMXBean;
import static java.lang.management.ManagementFactory.getMemoryPoolMXBeans;
import static java.lang.management.MemoryType.HEAP;
import static org.apache.commons.io.FileUtils.ONE_GB;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount;
import static
org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_MAX_SORT_MEMORY_IN_GB;
import static
org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_MAX_SORT_MEMORY_IN_GB_DEFAULT;
@@ -72,7 +72,7 @@ class TraverseWithSortStrategy extends
IndexStoreSortStrategyBase {
private final AtomicBoolean sufficientMemory = new AtomicBoolean(true);
private final NodeStateEntryTraverserFactory nodeStatesFactory;
private final NodeStateEntryWriter entryWriter;
- private final Charset charset = UTF_8;
+ private final Charset charset = StandardCharsets.UTF_8;
private final Comparator<NodeStateHolder> comparator;
private NotificationEmitter emitter;
private MemoryListener listener;
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/exporter/NodeStateSerializer.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/exporter/NodeStateSerializer.java
index d89970cdac..aea802403c 100644
---
a/oak-run/src/main/java/org/apache/jackrabbit/oak/exporter/NodeStateSerializer.java
+++
b/oak-run/src/main/java/org/apache/jackrabbit/oak/exporter/NodeStateSerializer.java
@@ -25,6 +25,7 @@ import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
+import java.nio.charset.StandardCharsets;
import org.apache.jackrabbit.guava.common.io.Files;
import com.google.gson.stream.JsonWriter;
@@ -36,7 +37,6 @@ import
org.apache.jackrabbit.oak.plugins.blob.serializer.FSBlobSerializer;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStateUtils;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkArgument;
import static org.apache.jackrabbit.guava.common.base.Preconditions.checkState;
@@ -75,7 +75,7 @@ public class NodeStateSerializer {
checkState(dir.mkdirs(), "Cannot create directory [%s]",
dir.getAbsolutePath());
}
File file = new File(dir, getFileName());
- try (Writer writer = Files.newWriter(file, UTF_8)){
+ try (Writer writer = Files.newWriter(file, StandardCharsets.UTF_8)){
serialize(writer, createBlobSerializer(dir));
}
closeSerializer();
@@ -126,7 +126,7 @@ public class NodeStateSerializer {
}
private String getFilter() throws IOException {
- return filterFile != null ? Files.toString(filterFile, UTF_8) : filter;
+ return filterFile != null ? Files.toString(filterFile,
StandardCharsets.UTF_8) : filter;
}
public String getFileName() {
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/index/IndexConsistencyCheckPrinter.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/index/IndexConsistencyCheckPrinter.java
index 845f697a80..433d53ae33 100644
---
a/oak-run/src/main/java/org/apache/jackrabbit/oak/index/IndexConsistencyCheckPrinter.java
+++
b/oak-run/src/main/java/org/apache/jackrabbit/oak/index/IndexConsistencyCheckPrinter.java
@@ -21,10 +21,10 @@ package org.apache.jackrabbit.oak.index;
import java.io.PrintStream;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Stopwatch;
import org.apache.commons.io.output.WriterOutputStream;
import org.apache.felix.inventory.Format;
@@ -63,7 +63,7 @@ class IndexConsistencyCheckPrinter implements
InventoryPrinter {
}
IndexConsistencyChecker checker = new
IndexConsistencyChecker(root, indexPath, indexHelper.getWorkDir());
- checker.setPrintStream(new PrintStream(new WriterOutputStream(pw,
Charsets.UTF_8)));
+ checker.setPrintStream(new PrintStream(new WriterOutputStream(pw,
StandardCharsets.UTF_8)));
try {
IndexConsistencyChecker.Result result = checker.check(level);
result.dump(pw);
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileBinaryResourceProvider.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileBinaryResourceProvider.java
index 9f20a596e6..5b46b31093 100644
---
a/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileBinaryResourceProvider.java
+++
b/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileBinaryResourceProvider.java
@@ -22,8 +22,8 @@ package org.apache.jackrabbit.oak.plugins.tika;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Function;
import org.apache.jackrabbit.guava.common.base.Predicate;
import org.apache.jackrabbit.guava.common.collect.FluentIterable;
@@ -72,7 +72,7 @@ class CSVFileBinaryResourceProvider implements
BinaryResourceProvider, Closeable
@Override
public FluentIterable<BinaryResource> getBinaries(final String path)
throws IOException {
- CSVParser parser = CSVParser.parse(dataFile, Charsets.UTF_8, FORMAT);
+ CSVParser parser = CSVParser.parse(dataFile, StandardCharsets.UTF_8,
FORMAT);
closer.register(parser);
return FluentIterable.from(parser)
.transform(new RecordTransformer())
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileGenerator.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileGenerator.java
index cb337c38c2..d65ab97c14 100644
---
a/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileGenerator.java
+++
b/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileGenerator.java
@@ -21,8 +21,8 @@ package org.apache.jackrabbit.oak.plugins.tika;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.collect.FluentIterable;
import org.apache.jackrabbit.guava.common.io.Closer;
import org.apache.jackrabbit.guava.common.io.Files;
@@ -43,7 +43,7 @@ public class CSVFileGenerator {
Closer closer = Closer.create();
int count = 0;
try{
- CSVPrinter printer = new CSVPrinter(Files.newWriter(outFile,
Charsets.UTF_8),
+ CSVPrinter printer = new CSVPrinter(Files.newWriter(outFile,
StandardCharsets.UTF_8),
CSVFileBinaryResourceProvider.FORMAT);
closer.register(printer);
for (BinaryResource br : binaries){
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/TextPopulator.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/TextPopulator.java
index f6156caef5..60b7970022 100644
---
a/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/TextPopulator.java
+++
b/oak-run/src/main/java/org/apache/jackrabbit/oak/plugins/tika/TextPopulator.java
@@ -37,8 +37,8 @@ import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.JcrConstants.JCR_PATH;
import static
org.apache.jackrabbit.oak.plugins.index.search.FieldNames.FULLTEXT;
import static org.apache.jackrabbit.oak.plugins.index.search.FieldNames.PATH;
@@ -66,7 +66,7 @@ class TextPopulator {
void populate(File dataFile, File indexDir) throws IOException {
try (Closer closer = Closer.create()) {
- Iterable<CSVRecord> csvRecords =
closer.register(CSVParser.parse(dataFile, UTF_8, FORMAT));
+ Iterable<CSVRecord> csvRecords =
closer.register(CSVParser.parse(dataFile, StandardCharsets.UTF_8, FORMAT));
final FSDirectory dir =
closer.register(FSDirectory.open(indexDir));
final DirectoryReader reader =
closer.register(DirectoryReader.open(dir));
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
index 5b3de1d847..7d3e4c53cc 100644
---
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
+++
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCheckCommand.java
@@ -35,6 +35,7 @@ import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
@@ -44,7 +45,6 @@ import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Function;
import org.apache.jackrabbit.guava.common.base.Joiner;
import org.apache.jackrabbit.guava.common.base.Splitter;
@@ -285,7 +285,7 @@ public class DataStoreCheckCommand implements Command {
}
private static void verboseIds(Closer closer, final String dsType, File
readFile, File writeFile) throws IOException {
- LineIterator idIterator = FileUtils.lineIterator(readFile,
Charsets.UTF_8.name());
+ LineIterator idIterator = FileUtils.lineIterator(readFile,
StandardCharsets.UTF_8.name());
try {
// Create a temp file to write real ids and register with closer
File longIdTemp = createTempFile("longids", null);
@@ -430,7 +430,7 @@ public class DataStoreCheckCommand implements Command {
private static void retrieveBlobReferences(GarbageCollectableBlobStore
blobStore, BlobReferenceRetriever marker,
File marked, String dsType, boolean isVerbose) throws IOException {
- final BufferedWriter writer = Files.newWriter(marked, Charsets.UTF_8);
+ final BufferedWriter writer = Files.newWriter(marked,
StandardCharsets.UTF_8);
final AtomicInteger count = new AtomicInteger();
boolean threw = true;
try {
@@ -554,7 +554,7 @@ public class DataStoreCheckCommand implements Command {
Stopwatch watch = createStarted();
try {
- writer = Files.newWriter(references, Charsets.UTF_8);
+ writer = Files.newWriter(references, StandardCharsets.UTF_8);
if (paths.length == 0) {
traverseChildren(nodeStore.getRoot(), "/", writer, count);
} else {
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
index 84620ba898..66f59a55db 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/DataStoreCommand.java
@@ -20,6 +20,7 @@ import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
+import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
@@ -86,7 +87,6 @@ import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkNotNull;
import static
org.apache.jackrabbit.guava.common.base.StandardSystemProperty.FILE_SEPARATOR;
import static org.apache.jackrabbit.guava.common.base.Stopwatch.createStarted;
@@ -199,7 +199,7 @@ public class DataStoreCommand implements Command {
if (dataStoreOpts.dumpRefs()) {
log.info("Initiating dump of data store references");
final File referencesTemp = File.createTempFile("traverseref",
null, new File(opts.getTempDirectory()));
- final BufferedWriter writer = Files.newWriter(referencesTemp,
UTF_8);
+ final BufferedWriter writer = Files.newWriter(referencesTemp,
StandardCharsets.UTF_8);
boolean threw = true;
try {
@@ -405,7 +405,7 @@ public class DataStoreCommand implements Command {
}
private static void verboseIds(BlobStoreOptions blobOpts, File readFile,
File writeFile) throws IOException {
- LineIterator idIterator = FileUtils.lineIterator(readFile,
UTF_8.name());
+ LineIterator idIterator = FileUtils.lineIterator(readFile,
StandardCharsets.UTF_8.name());
try (BurnOnCloseFileIterator<String> iterator = new
BurnOnCloseFileIterator<String>(idIterator, readFile,
(Function<String, String>) input ->
VerboseIdLogger.encodeId(input, blobOpts.getBlobStoreType()))) {
@@ -687,7 +687,7 @@ public class DataStoreCommand implements Command {
File tempFile = new File(outDir, outFile.getName() + "-temp");
FileUtils.moveFile(outFile, tempFile);
try (BurnOnCloseFileIterator<String> iterator = new
BurnOnCloseFileIterator<String>(
- FileUtils.lineIterator(tempFile, UTF_8.toString()),
tempFile,
+ FileUtils.lineIterator(tempFile,
StandardCharsets.UTF_8.toString()), tempFile,
(Function<String, String>) input -> encodeId(input,
blobStoreType))) {
writeStrings(iterator, outFile, true, log, "Transformed to
verbose ids - ");
}
diff --git
a/oak-run/src/test/java/org/apache/jackrabbit/oak/exporter/NodeStateSerializerTest.java
b/oak-run/src/test/java/org/apache/jackrabbit/oak/exporter/NodeStateSerializerTest.java
index 11e37f3c13..5930f0974b 100644
---
a/oak-run/src/test/java/org/apache/jackrabbit/oak/exporter/NodeStateSerializerTest.java
+++
b/oak-run/src/test/java/org/apache/jackrabbit/oak/exporter/NodeStateSerializerTest.java
@@ -20,6 +20,7 @@
package org.apache.jackrabbit.oak.exporter;
import java.io.File;
+import java.nio.charset.StandardCharsets;
import java.util.Collections;
import org.apache.jackrabbit.guava.common.io.Files;
@@ -33,7 +34,6 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static java.util.Arrays.asList;
import static
org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.junit.Assert.*;
@@ -66,7 +66,7 @@ public class NodeStateSerializerTest {
File json = new File(folder.getRoot(), serializer.getFileName());
assertTrue(json.exists());
- String text = Files.toString(json, UTF_8);
+ String text = Files.toString(json, StandardCharsets.UTF_8);
NodeState nodeState2 = deserialize(text);
assertTrue(EqualsDiff.equals(builder.getNodeState(), nodeState2));
}
@@ -91,4 +91,4 @@ public class NodeStateSerializerTest {
return deserializer.deserialize(json);
}
-}
\ No newline at end of file
+}
diff --git
a/oak-run/src/test/java/org/apache/jackrabbit/oak/index/ReindexIT.java
b/oak-run/src/test/java/org/apache/jackrabbit/oak/index/ReindexIT.java
index fff1c622fb..52b1600f22 100644
--- a/oak-run/src/test/java/org/apache/jackrabbit/oak/index/ReindexIT.java
+++ b/oak-run/src/test/java/org/apache/jackrabbit/oak/index/ReindexIT.java
@@ -52,11 +52,11 @@ import javax.jcr.query.RowIterator;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.oak.spi.state.NodeStateUtils.getNode;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.hasItem;
@@ -339,7 +339,7 @@ public class ReindexIT extends
LuceneAbstractIndexCommandTest {
"}";
File jsonFile = temporaryFolder.newFile();
- Files.write(json, jsonFile, UTF_8);
+ Files.write(json, jsonFile, StandardCharsets.UTF_8);
File outDir = temporaryFolder.newFolder();
File storeDir = fixture.getDir();
diff --git
a/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileBinaryResourceProviderTest.java
b/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileBinaryResourceProviderTest.java
index f7323e4ffe..ebd3a7077e 100644
---
a/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileBinaryResourceProviderTest.java
+++
b/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/CSVFileBinaryResourceProviderTest.java
@@ -20,9 +20,9 @@
package org.apache.jackrabbit.oak.plugins.tika;
import java.io.File;
+import java.nio.charset.StandardCharsets;
import java.util.Map;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.io.Files;
import org.apache.commons.csv.CSVPrinter;
import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore;
@@ -48,7 +48,7 @@ public class CSVFileBinaryResourceProviderTest {
p.printRecord(null, null, "text/plain", null, "/c");
File dataFile = temporaryFolder.newFile();
- Files.write(sb, dataFile, Charsets.UTF_8);
+ Files.write(sb, dataFile, StandardCharsets.UTF_8);
CSVFileBinaryResourceProvider provider = new
CSVFileBinaryResourceProvider(dataFile, new MemoryBlobStore());
@@ -62,4 +62,4 @@ public class CSVFileBinaryResourceProviderTest {
provider.close();
}
-}
\ No newline at end of file
+}
diff --git
a/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/TextPopulatorTest.java
b/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/TextPopulatorTest.java
index cb97dd9b34..2a6906e193 100644
---
a/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/TextPopulatorTest.java
+++
b/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/TextPopulatorTest.java
@@ -42,12 +42,12 @@ import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -301,7 +301,7 @@ public class TextPopulatorTest {
@Override
public InputStream openStream() {
- return new ByteArrayInputStream(data.getBytes(UTF_8));
+ return new
ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
}
}
}
diff --git
a/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/TikaHelperTest.java
b/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/TikaHelperTest.java
index ba7a91b05e..3409de9c91 100644
---
a/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/TikaHelperTest.java
+++
b/oak-run/src/test/java/org/apache/jackrabbit/oak/plugins/tika/TikaHelperTest.java
@@ -20,8 +20,8 @@
package org.apache.jackrabbit.oak.plugins.tika;
import java.io.File;
+import java.nio.charset.StandardCharsets;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.io.Files;
import org.junit.Rule;
import org.junit.Test;
@@ -55,9 +55,9 @@ public class TikaHelperTest {
" </parser>\n" +
" </parsers>\n" +
"</properties>";
- Files.write(configText, config, Charsets.UTF_8);
+ Files.write(configText, config, StandardCharsets.UTF_8);
TikaHelper tika = new TikaHelper(config);
assertFalse(tika.isIndexed("application/xml"));
}
-}
\ No newline at end of file
+}
diff --git
a/oak-run/src/test/java/org/apache/jackrabbit/oak/run/DataStoreCheckTest.java
b/oak-run/src/test/java/org/apache/jackrabbit/oak/run/DataStoreCheckTest.java
index 460e25a9e0..60e720cf31 100644
---
a/oak-run/src/test/java/org/apache/jackrabbit/oak/run/DataStoreCheckTest.java
+++
b/oak-run/src/test/java/org/apache/jackrabbit/oak/run/DataStoreCheckTest.java
@@ -18,7 +18,6 @@
*/
package org.apache.jackrabbit.oak.run;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.junit.Assert.assertEquals;
import java.io.ByteArrayInputStream;
@@ -30,6 +29,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
@@ -406,10 +406,10 @@ public class DataStoreCheckTest {
public static void testIncorrectParams(List<String> argList,
ArrayList<String> assertMsg) throws Exception {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
- System.setErr(new PrintStream(buffer, true, UTF_8.toString()));
+ System.setErr(new PrintStream(buffer, true, StandardCharsets.UTF_8));
DataStoreCheckCommand.checkDataStore(argList.toArray(new String[0]));
- String message = buffer.toString(UTF_8.toString());
+ String message = buffer.toString(StandardCharsets.UTF_8);
log.info("Assert message: {}", assertMsg);
log.info("Message logged in System.err: {}", message);
diff --git
a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureGCJournalFile.java
b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureGCJournalFile.java
index 899912e2dd..c33094750b 100644
---
a/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureGCJournalFile.java
+++
b/oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureGCJournalFile.java
@@ -16,7 +16,6 @@
*/
package org.apache.jackrabbit.oak.segment.azure;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import com.microsoft.azure.storage.StorageException;
import com.microsoft.azure.storage.blob.CloudAppendBlob;
import org.apache.commons.io.IOUtils;
@@ -25,6 +24,7 @@ import
org.apache.jackrabbit.oak.segment.spi.persistence.GCJournalFile;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
@@ -42,7 +42,7 @@ public class AzureGCJournalFile implements GCJournalFile {
if (!gcJournal.exists()) {
gcJournal.createOrReplace();
}
- gcJournal.appendText(line + "\n", Charsets.UTF_8.name(), null,
null, null);
+ gcJournal.appendText(line + "\n", StandardCharsets.UTF_8.name(),
null, null, null);
} catch (StorageException e) {
throw new IOException(e);
}
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
index edef45810b..7e32b57dce 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java
@@ -19,7 +19,6 @@
package org.apache.jackrabbit.oak.segment;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkArgument;
import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkElementIndex;
import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkNotNull;
@@ -53,6 +52,7 @@ import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.SequenceInputStream;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
@@ -519,7 +519,7 @@ public class DefaultSegmentWriter implements SegmentWriter {
return id; // shortcut if the same string was recently stored
}
- byte[] data = string.getBytes(UTF_8);
+ byte[] data = string.getBytes(StandardCharsets.UTF_8);
if (data.length < Segment.MEDIUM_LIMIT) {
// only cache short strings to avoid excessive memory use
@@ -607,7 +607,7 @@ public class DefaultSegmentWriter implements SegmentWriter {
* @see Segment#BLOB_ID_SMALL_LIMIT
*/
private RecordId writeBlobId(String blobId) throws IOException {
- byte[] data = blobId.getBytes(UTF_8);
+ byte[] data = blobId.getBytes(StandardCharsets.UTF_8);
if (data.length < Segment.BLOB_ID_SMALL_LIMIT) {
return writeOperationHandler.execute(gcGeneration,
newWriteOperation(
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Segment.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Segment.java
index 830b4051d3..abf1f89404 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Segment.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Segment.java
@@ -36,11 +36,11 @@ import static
org.apache.jackrabbit.oak.segment.file.tar.GCGeneration.newGCGener
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Iterator;
import java.util.UUID;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.collect.AbstractIterator;
import org.apache.commons.io.HexDump;
import org.apache.commons.io.output.ByteArrayOutputStream;
@@ -221,7 +221,7 @@ public class Segment {
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
HexDump.dump(bytes, 0, out, 0);
- return out.toString(Charsets.UTF_8.name());
+ return out.toString(StandardCharsets.UTF_8);
} catch (IOException e) {
return "Error dumping segment: " + e.getMessage();
} finally {
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBlob.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBlob.java
index 05cca5caa3..ca0ef82ada 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBlob.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBlob.java
@@ -18,7 +18,6 @@
*/
package org.apache.jackrabbit.oak.segment;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.guava.common.collect.Sets.newHashSet;
import static java.util.Collections.emptySet;
import static org.apache.jackrabbit.oak.segment.Segment.MEDIUM_LIMIT;
@@ -26,6 +25,7 @@ import static
org.apache.jackrabbit.oak.segment.Segment.SMALL_LIMIT;
import static org.apache.jackrabbit.oak.segment.SegmentStream.BLOCK_SIZE;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -238,7 +238,7 @@ public class SegmentBlob extends Record implements Blob {
private static String readShortBlobId(Segment segment, int recordNumber,
byte head) {
int length = (head & 0x0f) << 8 | (segment.readByte(recordNumber, 1) &
0xff);
- return segment.readBytes(recordNumber, 2,
length).decode(UTF_8).toString();
+ return segment.readBytes(recordNumber, 2,
length).decode(StandardCharsets.UTF_8).toString();
}
private static String readLongBlobId(Segment segment, int recordNumber,
Function<RecordId, Segment> getSegmentFunction) {
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBufferWriter.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBufferWriter.java
index 4c9eeb9f9b..cd84140f22 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBufferWriter.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentBufferWriter.java
@@ -19,7 +19,6 @@
package org.apache.jackrabbit.oak.segment;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkArgument;
import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.guava.common.base.Preconditions.checkState;
@@ -40,6 +39,7 @@ import static
org.apache.jackrabbit.oak.segment.SegmentVersion.LATEST_VERSION;
import java.io.IOException;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Set;
@@ -220,7 +220,7 @@ public class SegmentBufferWriter implements
WriteOperationHandler {
statistics = new Statistics();
statistics.id = segment.getSegmentId();
- byte[] data = metaInfo.getBytes(UTF_8);
+ byte[] data = metaInfo.getBytes(StandardCharsets.UTF_8);
RecordWriters.newValueWriter(data.length, data).write(this, store);
dirty = false;
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentDump.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentDump.java
index a3744ee333..467f0b9d22 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentDump.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentDump.java
@@ -24,9 +24,9 @@ import static
org.apache.jackrabbit.oak.segment.Segment.MAX_SEGMENT_SIZE;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
+import java.nio.charset.StandardCharsets;
import java.util.function.Consumer;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.commons.io.output.WriterOutputStream;
import org.apache.jackrabbit.oak.segment.RecordNumbers.Entry;
import org.apache.jackrabbit.oak.segment.file.tar.GCGeneration;
@@ -57,7 +57,7 @@ class SegmentDump {
}
}
writer.println("--------------------------------------------------------------------------");
- dumper.accept(new WriterOutputStream(writer, Charsets.UTF_8));
+ dumper.accept(new WriterOutputStream(writer,
StandardCharsets.UTF_8));
writer.println("--------------------------------------------------------------------------");
}
return string.toString();
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentParser.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentParser.java
index 084e6bb633..089163adb5 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentParser.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentParser.java
@@ -19,7 +19,6 @@
package org.apache.jackrabbit.oak.segment;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkArgument;
import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.guava.common.collect.Lists.newArrayList;
@@ -36,6 +35,7 @@ import static
org.apache.jackrabbit.oak.segment.SegmentStream.BLOCK_SIZE;
import static org.apache.jackrabbit.oak.segment.Template.MANY_CHILD_NODES;
import static org.apache.jackrabbit.oak.segment.Template.ZERO_CHILD_NODES;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.jackrabbit.oak.api.Type;
@@ -677,12 +677,12 @@ public class SegmentParser {
blobType = BlobType.LONG;
} else if ((head & 0xf0) == 0xe0) {
// 1110 xxxx: external value, short blob ID
- int length = UTF_8.encode(requireNonNull(readBlobId(segment,
blobId.getRecordNumber()))).limit();
+ int length =
StandardCharsets.UTF_8.encode(requireNonNull(readBlobId(segment,
blobId.getRecordNumber()))).limit();
size += (2 + length);
blobType = BlobType.EXTERNAL;
} else if ((head & 0xf8) == 0xf0) {
// 1111 0xxx: external value, long blob ID
- int length = UTF_8.encode(requireNonNull(readBlobId(segment,
blobId.getRecordNumber()))).limit();
+ int length =
StandardCharsets.UTF_8.encode(requireNonNull(readBlobId(segment,
blobId.getRecordNumber()))).limit();
size += (2 + length);
blobType = BlobType.EXTERNAL;
} else {
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentStream.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentStream.java
index 59caf85565..dcbae351d5 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentStream.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentStream.java
@@ -25,9 +25,9 @@ import static
org.apache.jackrabbit.guava.common.base.Preconditions.checkState;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.List;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.io.ByteStreams;
import org.apache.jackrabbit.oak.commons.Buffer;
@@ -95,7 +95,7 @@ public class SegmentStream extends InputStream {
public String getString() {
if (inline != null) {
- return inline.decode(Charsets.UTF_8).toString();
+ return inline.decode(StandardCharsets.UTF_8).toString();
} else if (length > Integer.MAX_VALUE) {
throw new IllegalStateException("Too long value: " + length);
} else {
@@ -103,7 +103,7 @@ public class SegmentStream extends InputStream {
try {
byte[] data = new byte[(int) length];
ByteStreams.readFully(stream, data);
- return new String(data, Charsets.UTF_8);
+ return new String(data, StandardCharsets.UTF_8);
} catch (IOException e) {
throw new IllegalStateException("Unexpected IOException", e);
} finally {
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/data/SegmentDataV12.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/data/SegmentDataV12.java
index 3b0a22cd97..d7f4274322 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/data/SegmentDataV12.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/data/SegmentDataV12.java
@@ -19,10 +19,10 @@ package org.apache.jackrabbit.oak.segment.data;
import java.io.IOException;
import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
import org.apache.jackrabbit.oak.commons.Buffer;
-import org.apache.jackrabbit.guava.common.base.Charsets;
class SegmentDataV12 implements SegmentData {
@@ -85,7 +85,7 @@ class SegmentDataV12 implements SegmentData {
signature[i] = buffer.get(SIGNATURE_OFFSET + i);
}
- return new String(signature, Charsets.UTF_8);
+ return new String(signature, StandardCharsets.UTF_8);
}
@Override
@@ -197,7 +197,7 @@ class SegmentDataV12 implements SegmentData {
Buffer duplicate = buffer.duplicate();
duplicate.position(index);
duplicate.limit(index + length);
- String string = duplicate.decode(Charsets.UTF_8).toString();
+ String string = duplicate.decode(StandardCharsets.UTF_8).toString();
return new StringData(string, length);
}
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/LocalGCJournalFile.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/LocalGCJournalFile.java
index ee91f56150..daeb25c979 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/LocalGCJournalFile.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/LocalGCJournalFile.java
@@ -23,10 +23,10 @@ import
org.apache.jackrabbit.oak.segment.spi.persistence.GCJournalFile;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static java.nio.file.Files.newBufferedWriter;
import static java.nio.file.Files.readAllLines;
import static java.nio.file.StandardOpenOption.APPEND;
@@ -48,7 +48,7 @@ public class LocalGCJournalFile implements GCJournalFile {
@Override
public void writeLine(String line) throws IOException {
- try (BufferedWriter w = newBufferedWriter(file.toPath(), UTF_8, WRITE,
APPEND, CREATE, DSYNC)) {
+ try (BufferedWriter w = newBufferedWriter(file.toPath(),
StandardCharsets.UTF_8, WRITE, APPEND, CREATE, DSYNC)) {
w.write(line);
w.newLine();
}
@@ -57,7 +57,7 @@ public class LocalGCJournalFile implements GCJournalFile {
@Override
public List<String> readLines() throws IOException {
if (file.exists()) {
- return readAllLines(file.toPath(), UTF_8);
+ return readAllLines(file.toPath(), StandardCharsets.UTF_8);
}
return new ArrayList<String>();
}
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/SegmentTarManager.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/SegmentTarManager.java
index 0c23f3aae4..4b11e3d0c7 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/SegmentTarManager.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/SegmentTarManager.java
@@ -18,13 +18,13 @@
*/
package org.apache.jackrabbit.oak.segment.file.tar;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.oak.commons.Buffer.wrap;
import static
org.apache.jackrabbit.oak.segment.file.tar.TarConstants.BLOCK_SIZE;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.LinkedHashMap;
@@ -225,7 +225,7 @@ public class SegmentTarManager implements
SegmentArchiveManager {
sum += ' ';
}
- byte[] checkbytes = String.format("%06o\0 ", sum).getBytes(UTF_8);
+ byte[] checkbytes = String.format("%06o\0 ",
sum).getBytes(StandardCharsets.UTF_8);
for (int i = 0; i < checkbytes.length; i++) {
if (checkbytes[i] != header[148 + i]) {
log.warn("Invalid entry checksum at offset {} in tar file
{}, skipping...",
@@ -292,7 +292,7 @@ public class SegmentTarManager implements
SegmentArchiveManager {
while (n < fieldSize && b[n] != 0) {
n++;
}
- return new String(b, 0, n, UTF_8);
+ return new String(b, 0, n, StandardCharsets.UTF_8);
}
private static int readNumber(Buffer buffer, int fieldSize) {
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/SegmentTarWriter.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/SegmentTarWriter.java
index c3d726faa6..cc1e76ce69 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/SegmentTarWriter.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/SegmentTarWriter.java
@@ -18,7 +18,6 @@
*/
package org.apache.jackrabbit.oak.segment.file.tar;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.guava.common.base.Preconditions.checkState;
import static
org.apache.jackrabbit.oak.segment.file.tar.TarConstants.BLOCK_SIZE;
@@ -27,6 +26,7 @@ import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
@@ -250,34 +250,34 @@ public class SegmentTarWriter implements
SegmentArchiveWriter {
byte[] header = new byte[BLOCK_SIZE];
// File name
- byte[] nameBytes = name.getBytes(UTF_8);
+ byte[] nameBytes = name.getBytes(StandardCharsets.UTF_8);
System.arraycopy(
nameBytes, 0, header, 0, Math.min(nameBytes.length, 100));
// File mode
System.arraycopy(
- String.format("%07o", 0400).getBytes(UTF_8), 0,
+ String.format("%07o", 0400).getBytes(StandardCharsets.UTF_8),
0,
header, 100, 7);
// User's numeric user ID
System.arraycopy(
- String.format("%07o", 0).getBytes(UTF_8), 0,
+ String.format("%07o", 0).getBytes(StandardCharsets.UTF_8), 0,
header, 108, 7);
// Group's numeric user ID
System.arraycopy(
- String.format("%07o", 0).getBytes(UTF_8), 0,
+ String.format("%07o", 0).getBytes(StandardCharsets.UTF_8), 0,
header, 116, 7);
// File size in bytes (octal basis)
System.arraycopy(
- String.format("%011o", size).getBytes(UTF_8), 0,
+ String.format("%011o", size).getBytes(StandardCharsets.UTF_8),
0,
header, 124, 11);
// Last modification time in numeric Unix time format (octal)
long time = System.currentTimeMillis() / 1000;
System.arraycopy(
- String.format("%011o", time).getBytes(UTF_8), 0,
+ String.format("%011o", time).getBytes(StandardCharsets.UTF_8),
0,
header, 136, 11);
// Checksum for header record
@@ -294,7 +294,7 @@ public class SegmentTarWriter implements
SegmentArchiveWriter {
checksum += aHeader & 0xff;
}
System.arraycopy(
- String.format("%06o\0 ", checksum).getBytes(UTF_8), 0,
+ String.format("%06o\0 ",
checksum).getBytes(StandardCharsets.UTF_8), 0,
header, 148, 8);
return header;
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV1.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV1.java
index 56f289e8d0..760f59c854 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV1.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV1.java
@@ -18,6 +18,7 @@
package org.apache.jackrabbit.oak.segment.file.tar.binaries;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@@ -25,7 +26,6 @@ import java.util.Set;
import java.util.UUID;
import java.util.zip.CRC32;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.oak.commons.Buffer;
import org.apache.jackrabbit.oak.segment.util.ReaderAtEnd;
@@ -145,6 +145,6 @@ class BinaryReferencesIndexLoaderV1 {
private static String parseString(int length, Buffer buffer) {
byte[] data = new byte[length];
buffer.get(data);
- return new String(data, Charsets.UTF_8);
+ return new String(data, StandardCharsets.UTF_8);
}
}
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV2.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV2.java
index 189333b845..e464b5f5f1 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV2.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV2.java
@@ -18,6 +18,7 @@
package org.apache.jackrabbit.oak.segment.file.tar.binaries;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@@ -25,7 +26,6 @@ import java.util.Set;
import java.util.UUID;
import java.util.zip.CRC32;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.oak.commons.Buffer;
import org.apache.jackrabbit.oak.segment.util.ReaderAtEnd;
@@ -147,6 +147,6 @@ class BinaryReferencesIndexLoaderV2 {
private static String parseString(int length, Buffer buffer) {
byte[] data = new byte[length];
buffer.get(data);
- return new String(data, Charsets.UTF_8);
+ return new String(data, StandardCharsets.UTF_8);
}
}
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexWriter.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexWriter.java
index 4ad402bb97..5598bad9d8 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexWriter.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexWriter.java
@@ -17,6 +17,7 @@
package org.apache.jackrabbit.oak.segment.file.tar.binaries;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@@ -27,7 +28,6 @@ import java.util.zip.CRC32;
import org.apache.jackrabbit.oak.commons.Buffer;
-import org.apache.jackrabbit.guava.common.base.Charsets;
/**
* Maintains the transient state of a binary references index, formats it and
@@ -125,7 +125,7 @@ public class BinaryReferencesIndexWriter {
binaryReferenceSize += 4;
// A variable amount of bytes, depending on the reference
itself.
- binaryReferenceSize +=
reference.getBytes(Charsets.UTF_8).length;
+ binaryReferenceSize +=
reference.getBytes(StandardCharsets.UTF_8).length;
}
}
}
@@ -150,7 +150,7 @@ public class BinaryReferencesIndexWriter {
buffer.putInt(references.size());
for (String reference : references) {
- byte[] bytes = reference.getBytes(Charsets.UTF_8);
+ byte[] bytes = reference.getBytes(StandardCharsets.UTF_8);
buffer.putInt(bytes.length);
buffer.put(bytes);
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/standby/codec/GetReferencesResponseEncoder.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/standby/codec/GetReferencesResponseEncoder.java
index 49e6901b15..10de5cf6b6 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/standby/codec/GetReferencesResponseEncoder.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/standby/codec/GetReferencesResponseEncoder.java
@@ -17,7 +17,8 @@
package org.apache.jackrabbit.oak.segment.standby.codec;
-import org.apache.jackrabbit.guava.common.base.Charsets;
+import java.nio.charset.StandardCharsets;
+
import org.apache.jackrabbit.guava.common.base.Joiner;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
@@ -36,7 +37,7 @@ public class GetReferencesResponseEncoder extends
MessageToByteEncoder<GetRefere
}
private static void encode(String segmentId, Iterable<String> references,
ByteBuf out) {
- byte[] data = serialize(segmentId,
references).getBytes(Charsets.UTF_8);
+ byte[] data = serialize(segmentId,
references).getBytes(StandardCharsets.UTF_8);
out.writeInt(data.length + 1);
out.writeByte(Messages.HEADER_REFERENCES);
out.writeBytes(data);
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/standby/codec/ResponseDecoder.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/standby/codec/ResponseDecoder.java
index 63e5cc5530..3b650b57dc 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/standby/codec/ResponseDecoder.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/standby/codec/ResponseDecoder.java
@@ -27,11 +27,11 @@ import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.List;
import java.util.UUID;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.hash.Hashing;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
@@ -104,7 +104,7 @@ public class ResponseDecoder extends ByteToMessageDecoder {
private static void decodeGetHeadResponse(int length, ByteBuf in,
List<Object> out) {
byte[] data = new byte[length - 1];
in.readBytes(data);
- String recordId = new String(data, Charsets.UTF_8);
+ String recordId = new String(data, StandardCharsets.UTF_8);
out.add(new GetHeadResponse(null, recordId));
}
@@ -134,7 +134,7 @@ public class ResponseDecoder extends ByteToMessageDecoder {
int blobIdLength = in.readInt();
byte[] blobIdBytes = new byte[blobIdLength];
in.readBytes(blobIdBytes);
- String blobId = new String(blobIdBytes, Charsets.UTF_8);
+ String blobId = new String(blobIdBytes, StandardCharsets.UTF_8);
File tempFile = new File(spoolFolder, blobId + ".tmp");
// START_CHUNK flag enabled
@@ -180,7 +180,7 @@ public class ResponseDecoder extends ByteToMessageDecoder {
in.readBytes(data);
- String body = new String(data, Charsets.UTF_8);
+ String body = new String(data, StandardCharsets.UTF_8);
int colon = body.indexOf(":");
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriterTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriterTest.java
index d2e96c1707..6af65003d9 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriterTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriterTest.java
@@ -35,6 +35,7 @@ import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
@@ -56,7 +57,6 @@ import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import org.slf4j.LoggerFactory;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Strings;
import org.apache.jackrabbit.guava.common.collect.ImmutableMap;
@@ -70,7 +70,7 @@ public class DefaultSegmentWriterTest {
private static final String HELLO_WORLD = "Hello, World!";
- private final byte[] bytes = HELLO_WORLD.getBytes(Charsets.UTF_8);
+ private final byte[] bytes = HELLO_WORLD.getBytes(StandardCharsets.UTF_8);
private static final int SMALL_BINARIES_INLINE_THRESHOLD = 4;
@@ -104,7 +104,7 @@ public class DefaultSegmentWriterTest {
InputStream stream = new ByteArrayInputStream(bytes);
RecordId valueId = writer.writeStream(stream);
SegmentBlob blob = new SegmentBlob(null, valueId);
- assertEquals(HELLO_WORLD, IOUtils.toString(blob.getNewStream(),
Charsets.UTF_8));
+ assertEquals(HELLO_WORLD, IOUtils.toString(blob.getNewStream(),
StandardCharsets.UTF_8));
}
@Test
@@ -117,14 +117,14 @@ public class DefaultSegmentWriterTest {
for (int i = 0; i + n <= bytes.length; i++) {
Arrays.fill(bytes, i, i + n, (byte) '.');
assertEquals(n, block.read(i, bytes, i, n));
- assertEquals(HELLO_WORLD, new String(bytes, Charsets.UTF_8));
+ assertEquals(HELLO_WORLD, new String(bytes,
StandardCharsets.UTF_8));
}
}
// Check reading with a too long length
byte[] large = new byte[bytes.length * 2];
assertEquals(bytes.length, block.read(0, large, 0, large.length));
- assertEquals(HELLO_WORLD, new String(large, 0, bytes.length,
Charsets.UTF_8));
+ assertEquals(HELLO_WORLD, new String(large, 0, bytes.length,
StandardCharsets.UTF_8));
}
@Test
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarFileTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarFileTest.java
index 4b2ac413c2..e521b0602a 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarFileTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarFileTest.java
@@ -18,7 +18,6 @@
*/
package org.apache.jackrabbit.oak.segment.file.tar;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static org.apache.jackrabbit.guava.common.collect.Lists.newArrayList;
import static org.apache.jackrabbit.guava.common.collect.Maps.newHashMap;
import static
org.apache.jackrabbit.oak.segment.file.tar.GCGeneration.newGCGeneration;
@@ -28,6 +27,7 @@ import static
org.mockito.internal.util.collections.Sets.newSet;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -70,7 +70,7 @@ public class TarFileTest {
UUID id = UUID.randomUUID();
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits() & (-1 >>> 4); // OAK-1672
- byte[] data = "Hello, World!".getBytes(UTF_8);
+ byte[] data = "Hello, World!".getBytes(StandardCharsets.UTF_8);
try (TarWriter writer = new TarWriter(archiveManager,
"data00000a.tar")) {
writer.writeEntry(msb, lsb, data, 0, data.length, generation(0));
@@ -89,7 +89,7 @@ public class TarFileTest {
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits();
String data = "test";
- byte[] buffer = data.getBytes(UTF_8);
+ byte[] buffer = data.getBytes(StandardCharsets.UTF_8);
try (TarWriter writer = new TarWriter(archiveManager,
"data00000a.tar")) {
writer.writeEntry(msb, lsb, buffer, 0, buffer.length,
newGCGeneration(1, 2, false));
@@ -107,7 +107,7 @@ public class TarFileTest {
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits();
String data = "test";
- byte[] buffer = data.getBytes(UTF_8);
+ byte[] buffer = data.getBytes(StandardCharsets.UTF_8);
try (TarWriter writer = new TarWriter(archiveManager,
"data00000a.tar")) {
writer.writeEntry(msb, lsb, buffer, 0, buffer.length,
newGCGeneration(1, 2, true));
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarWriterTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarWriterTest.java
index 92b2eb5dd9..5b0ff2248b 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarWriterTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/TarWriterTest.java
@@ -19,7 +19,6 @@
package org.apache.jackrabbit.oak.segment.file.tar;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import static
org.apache.jackrabbit.oak.segment.file.tar.GCGeneration.newGCGeneration;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
@@ -29,6 +28,7 @@ import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.UUID;
import org.apache.jackrabbit.oak.segment.file.UnrecoverableArchiveException;
@@ -81,7 +81,7 @@ public class TarWriterTest {
UUID id = UUID.randomUUID();
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits() & (-1 >>> 4); // OAK-1672
- byte[] data = "Hello, World!".getBytes(UTF_8);
+ byte[] data = "Hello, World!".getBytes(StandardCharsets.UTF_8);
writer.writeEntry(msb, lsb, data, 0, data.length, newGCGeneration(0,
0, false));
}
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderTest.java
index ff885140ea..caf707d81b 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderTest.java
@@ -20,6 +20,7 @@ package org.apache.jackrabbit.oak.segment.file.tar.binaries;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@@ -27,7 +28,6 @@ import java.util.Set;
import java.util.UUID;
import java.util.zip.CRC32;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.oak.commons.Buffer;
import org.junit.Test;
@@ -39,7 +39,7 @@ public class BinaryReferencesIndexLoaderTest {
}
private static byte[] bytes(String s) {
- return s.getBytes(Charsets.UTF_8);
+ return s.getBytes(StandardCharsets.UTF_8);
}
private static int checksum(Buffer buffer) {
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV1Test.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV1Test.java
index bbb4153154..c5fde02a6d 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV1Test.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV1Test.java
@@ -24,9 +24,9 @@ import static
org.apache.jackrabbit.oak.segment.file.tar.binaries.BinaryReferenc
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
+import java.nio.charset.StandardCharsets;
import java.util.zip.CRC32;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.oak.commons.Buffer;
import org.junit.Test;
@@ -38,7 +38,7 @@ public class BinaryReferencesIndexLoaderV1Test {
}
private static byte[] bytes(String s) {
- return s.getBytes(Charsets.UTF_8);
+ return s.getBytes(StandardCharsets.UTF_8);
}
private static BinaryReferencesIndex loadIndex(Buffer buffer) throws
Exception {
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV2Test.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV2Test.java
index a9f6936881..8e874e2ba5 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV2Test.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tar/binaries/BinaryReferencesIndexLoaderV2Test.java
@@ -24,9 +24,9 @@ import static
org.apache.jackrabbit.oak.segment.file.tar.binaries.BinaryReferenc
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
+import java.nio.charset.StandardCharsets;
import java.util.zip.CRC32;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.oak.commons.Buffer;
import org.junit.Test;
@@ -38,7 +38,7 @@ public class BinaryReferencesIndexLoaderV2Test {
}
private static byte[] bytes(String s) {
- return s.getBytes(Charsets.UTF_8);
+ return s.getBytes(StandardCharsets.UTF_8);
}
private static BinaryReferencesIndex loadIndex(Buffer buffer) throws
Exception {
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/StandbyTestUtils.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/StandbyTestUtils.java
index 44f10f43b0..9886a95bba 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/StandbyTestUtils.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/StandbyTestUtils.java
@@ -19,9 +19,9 @@ package org.apache.jackrabbit.oak.segment.standby;
import static org.mockito.Mockito.mock;
+import java.nio.charset.StandardCharsets;
import java.util.UUID;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.hash.Hashing;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
@@ -77,7 +77,7 @@ public class StandbyTestUtils {
}
public static ByteBuf createBlobChunkBuffer(byte header, long blobLength,
String blobId, byte[] data, byte mask) {
- byte[] blobIdBytes = blobId.getBytes(Charsets.UTF_8);
+ byte[] blobIdBytes = blobId.getBytes(StandardCharsets.UTF_8);
ByteBuf buf = Unpooled.buffer();
buf.writeInt(1 + 1 + 8 + 4 + blobIdBytes.length + 8 + data.length);
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/GetHeadResponseEncoderTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/GetHeadResponseEncoderTest.java
index 0f3b6f0a00..529e63ebbe 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/GetHeadResponseEncoderTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/GetHeadResponseEncoderTest.java
@@ -19,7 +19,8 @@ package org.apache.jackrabbit.oak.segment.standby.codec;
import static org.junit.Assert.assertEquals;
-import org.apache.jackrabbit.guava.common.base.Charsets;
+import java.nio.charset.StandardCharsets;
+
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.embedded.EmbeddedChannel;
@@ -30,7 +31,7 @@ public class GetHeadResponseEncoderTest {
@Test
public void encodeResponse() throws Exception {
String recordId = "recordId";
- byte[] recordIdBytes = recordId.getBytes(Charsets.UTF_8);
+ byte[] recordIdBytes = recordId.getBytes(StandardCharsets.UTF_8);
EmbeddedChannel channel = new EmbeddedChannel(new
GetHeadResponseEncoder());
channel.writeOutbound(new GetHeadResponse("clientId", "recordId"));
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/GetReferencesResponseEncoderTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/GetReferencesResponseEncoderTest.java
index e3dbcd2f33..337e6be125 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/GetReferencesResponseEncoderTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/GetReferencesResponseEncoderTest.java
@@ -20,7 +20,8 @@ package org.apache.jackrabbit.oak.segment.standby.codec;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
-import org.apache.jackrabbit.guava.common.base.Charsets;
+import java.nio.charset.StandardCharsets;
+
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.embedded.EmbeddedChannel;
@@ -35,7 +36,7 @@ public class GetReferencesResponseEncoderTest {
ByteBuf buffer = (ByteBuf) channel.readOutbound();
String body = "a:b,c";
- byte[] data = body.getBytes(Charsets.UTF_8);
+ byte[] data = body.getBytes(StandardCharsets.UTF_8);
ByteBuf expected = Unpooled.buffer();
expected.writeInt(data.length + 1);
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/ResponseDecoderTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/ResponseDecoderTest.java
index eec5cbc5c7..3303ca6730 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/ResponseDecoderTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/standby/codec/ResponseDecoderTest.java
@@ -31,9 +31,9 @@ import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.UUID;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.embedded.EmbeddedChannel;
@@ -109,7 +109,7 @@ public class ResponseDecoderTest {
byte[] blobData = new byte[] {1, 2, 3};
String blobId = "blobId";
- byte[] blobIdBytes = blobId.getBytes(Charsets.UTF_8);
+ byte[] blobIdBytes = blobId.getBytes(StandardCharsets.UTF_8);
byte mask = createMask(1, 1);
ByteBuf buf = Unpooled.buffer();
@@ -130,7 +130,7 @@ public class ResponseDecoderTest {
@Test
public void shouldDecodeValidGetHeadResponses() throws Exception {
String recordId = "recordId";
- byte[] recordIdBytes = recordId.getBytes(Charsets.UTF_8);
+ byte[] recordIdBytes = recordId.getBytes(StandardCharsets.UTF_8);
ByteBuf in = Unpooled.buffer();
in.writeInt(recordIdBytes.length + 1);
@@ -165,7 +165,7 @@ public class ResponseDecoderTest {
@Test
public void shouldDecodeValidGetReferencesResponses() throws Exception {
- byte[] data = "a:b,c".getBytes(Charsets.UTF_8);
+ byte[] data = "a:b,c".getBytes(StandardCharsets.UTF_8);
ByteBuf buf = Unpooled.buffer();
buf.writeInt(data.length + 1);
@@ -181,7 +181,7 @@ public class ResponseDecoderTest {
@Test
public void shouldDropGetReferencesResponsesWithoutDelimiter() throws
Exception {
- byte[] data = "a".getBytes(Charsets.UTF_8);
+ byte[] data = "a".getBytes(StandardCharsets.UTF_8);
ByteBuf buf = Unpooled.buffer();
buf.writeInt(data.length + 1);
@@ -195,7 +195,7 @@ public class ResponseDecoderTest {
@Test
public void shouldDecodeValidSingleElementGetReferencesResponses() throws
Exception {
- byte[] data = "a:b".getBytes(Charsets.UTF_8);
+ byte[] data = "a:b".getBytes(StandardCharsets.UTF_8);
ByteBuf buf = Unpooled.buffer();
buf.writeInt(data.length + 1);
@@ -211,7 +211,7 @@ public class ResponseDecoderTest {
@Test
public void shouldDecodeValidZeroElementsGetReferencesResponses() throws
Exception {
- byte[] data = "a:".getBytes(Charsets.UTF_8);
+ byte[] data = "a:".getBytes(StandardCharsets.UTF_8);
ByteBuf buf = Unpooled.buffer();
buf.writeInt(data.length + 1);
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/tool/CheckRepositoryTestBase.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/tool/CheckRepositoryTestBase.java
index 543accc62d..5d67161eba 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/tool/CheckRepositoryTestBase.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/tool/CheckRepositoryTestBase.java
@@ -19,7 +19,6 @@
package org.apache.jackrabbit.oak.segment.tool;
-import static org.apache.jackrabbit.guava.common.base.Charsets.UTF_8;
import java.io.ByteArrayInputStream;
import java.io.File;
@@ -29,6 +28,7 @@ import java.io.InputStream;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Random;
@@ -156,7 +156,7 @@ public class CheckRepositoryTestBase {
// read entry size from header
byte[] crtEntryNameBytes = new byte[100];
System.arraycopy(entryHeader.array(), 0, crtEntryNameBytes, 0,
100);
- crtEntryName = new String(crtEntryNameBytes, 0, 100, UTF_8);
+ crtEntryName = new String(crtEntryNameBytes, 0, 100,
StandardCharsets.UTF_8);
crtEntryName = crtEntryName.substring(0,
crtEntryName.indexOf('.'));
byte[] entrySizeBytes = new byte[11];
diff --git
a/oak-store-spi/src/main/java/org/apache/jackrabbit/oak/plugins/memory/StringBasedBlob.java
b/oak-store-spi/src/main/java/org/apache/jackrabbit/oak/plugins/memory/StringBasedBlob.java
index b533149255..26cd223df1 100644
---
a/oak-store-spi/src/main/java/org/apache/jackrabbit/oak/plugins/memory/StringBasedBlob.java
+++
b/oak-store-spi/src/main/java/org/apache/jackrabbit/oak/plugins/memory/StringBasedBlob.java
@@ -20,8 +20,7 @@ package org.apache.jackrabbit.oak.plugins.memory;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
-
-import org.apache.jackrabbit.guava.common.base.Charsets;
+import java.nio.charset.StandardCharsets;
import org.jetbrains.annotations.NotNull;
@@ -47,7 +46,7 @@ public class StringBasedBlob extends AbstractBlob {
@NotNull
@Override
public InputStream getNewStream() {
- return new ByteArrayInputStream(value.getBytes(Charsets.UTF_8));
+ return new
ByteArrayInputStream(value.getBytes(StandardCharsets.UTF_8));
}
/**
@@ -56,6 +55,6 @@ public class StringBasedBlob extends AbstractBlob {
*/
@Override
public long length() {
- return value.getBytes(Charsets.UTF_8).length;
+ return value.getBytes(StandardCharsets.UTF_8).length;
}
}
diff --git
a/oak-store-spi/src/main/java/org/apache/jackrabbit/oak/plugins/value/Conversions.java
b/oak-store-spi/src/main/java/org/apache/jackrabbit/oak/plugins/value/Conversions.java
index 39209f426e..6785e3ecce 100644
---
a/oak-store-spi/src/main/java/org/apache/jackrabbit/oak/plugins/value/Conversions.java
+++
b/oak-store-spi/src/main/java/org/apache/jackrabbit/oak/plugins/value/Conversions.java
@@ -21,10 +21,10 @@ package org.apache.jackrabbit.oak.plugins.value;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
+import java.nio.charset.StandardCharsets;
import java.util.Calendar;
import java.util.TimeZone;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.io.ByteStreams;
import org.apache.jackrabbit.oak.api.Blob;
@@ -159,7 +159,7 @@ public final class Conversions {
try {
InputStream in = value.getNewStream();
try {
- return new String(ByteStreams.toByteArray(in),
Charsets.UTF_8);
+ return new String(ByteStreams.toByteArray(in),
StandardCharsets.UTF_8);
}
finally {
in.close();
diff --git
a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
index 73e9f4df6d..cf6b27da99 100644
---
a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
+++
b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgrade.java
@@ -37,6 +37,7 @@ import static
org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory.S
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.Calendar;
import java.util.Collection;
import java.util.Iterator;
@@ -58,7 +59,6 @@ import javax.jcr.nodetype.NodeTypeTemplate;
import javax.jcr.nodetype.PropertyDefinitionTemplate;
import javax.jcr.security.Privilege;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.base.Function;
import org.apache.jackrabbit.guava.common.base.Stopwatch;
import org.apache.jackrabbit.guava.common.collect.HashBiMap;
@@ -1025,7 +1025,7 @@ public class RepositoryUpgrade {
if (name.length() <= Utils.NODE_NAME_LIMIT / 3) {
return false;
}
- if (name.getBytes(Charsets.UTF_8).length <= Utils.NODE_NAME_LIMIT) {
+ if (name.getBytes(StandardCharsets.UTF_8).length <=
Utils.NODE_NAME_LIMIT) {
return false;
}
return true;
@@ -1038,7 +1038,7 @@ public class RepositoryUpgrade {
if (parentPath.length() < Utils.PATH_SHORT) {
return false;
}
- if (parentPath.getBytes(Charsets.UTF_8).length < Utils.PATH_LONG) {
+ if (parentPath.getBytes(StandardCharsets.UTF_8).length <
Utils.PATH_LONG) {
return false;
}
return true;
diff --git
a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java
b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java
index 4b4865b770..408b824776 100644
---
a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java
+++
b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/blob/LengthCachingDataStore.java
@@ -26,6 +26,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@@ -34,7 +35,6 @@ import java.util.Properties;
import javax.jcr.RepositoryException;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.guava.common.collect.Maps;
import org.apache.jackrabbit.guava.common.io.Files;
import org.apache.commons.io.FilenameUtils;
@@ -310,7 +310,7 @@ public class LengthCachingDataStore extends
AbstractDataStore {
BufferedWriter w = null;
try {
w = new BufferedWriter(
- new OutputStreamWriter(new
FileOutputStream(mappingFile, true), Charsets.UTF_8));
+ new OutputStreamWriter(new
FileOutputStream(mappingFile, true), StandardCharsets.UTF_8));
for (Map.Entry<String, Long> e : newMappings.entrySet()) {
w.write(String.valueOf(e.getValue()));
w.write(SEPARATOR);
@@ -330,7 +330,7 @@ public class LengthCachingDataStore extends
AbstractDataStore {
private static Map<String, Long> loadMappingData(File mappingFile) throws
FileNotFoundException {
Map<String, Long> mapping = new HashMap<String, Long>();
log.info("Reading mapping data from {}",
mappingFile.getAbsolutePath());
- LineIterator itr = new LineIterator(Files.newReader(mappingFile,
Charsets.UTF_8));
+ LineIterator itr = new LineIterator(Files.newReader(mappingFile,
StandardCharsets.UTF_8));
try {
while (itr.hasNext()) {
String line = itr.nextLine();
diff --git
a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/nodestate/NameFilteringNodeState.java
b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/nodestate/NameFilteringNodeState.java
index f72c7a6f30..262df93dea 100644
---
a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/nodestate/NameFilteringNodeState.java
+++
b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/nodestate/NameFilteringNodeState.java
@@ -16,7 +16,6 @@
*/
package org.apache.jackrabbit.oak.upgrade.nodestate;
-import org.apache.jackrabbit.guava.common.base.Charsets;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.document.util.Utils;
@@ -26,6 +25,7 @@ import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
public class NameFilteringNodeState extends AbstractDecoratedNodeState {
@@ -80,14 +80,14 @@ public class NameFilteringNodeState extends
AbstractDecoratedNodeState {
if (name.length() <= Utils.NODE_NAME_LIMIT / 3) {
return false;
}
- if (name.getBytes(Charsets.UTF_8).length <= Utils.NODE_NAME_LIMIT) {
+ if (name.getBytes(StandardCharsets.UTF_8).length <=
Utils.NODE_NAME_LIMIT) {
return false;
}
String path = getPath();
if (path.length() <= Utils.PATH_SHORT) {
return false;
}
- if (path.getBytes(Charsets.UTF_8).length < Utils.PATH_LONG) {
+ if (path.getBytes(StandardCharsets.UTF_8).length < Utils.PATH_LONG) {
return false;
}
return true;