This is an automated email from the ASF dual-hosted git repository.
janhoy pushed a commit to branch branch_9x
in repository https://gitbox.apache.org/repos/asf/solr.git
The following commit(s) were added to refs/heads/branch_9x by this push:
new e5a48d6 SOLR-15862 Fix linter warnings about guava Strings, removing
unused variable (#521)
e5a48d6 is described below
commit e5a48d620feef82cc02c84e5dcac139b5a2e5271
Author: Jan Høydahl <[email protected]>
AuthorDate: Thu Jan 13 01:51:11 2022 +0100
SOLR-15862 Fix linter warnings about guava Strings, removing unused
variable (#521)
(cherry picked from commit ddf73603d6d57ceaa1081ee46fd7bc275eff0066)
---
.../test/org/apache/solr/s3/S3BackupRepositoryTest.java | 9 +++------
.../org/apache/solr/core/HdfsDirectoryFactoryTest.java | 15 ++++++++-------
2 files changed, 11 insertions(+), 13 deletions(-)
diff --git
a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java
b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java
index 2a5828c..3f3284c 100644
---
a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java
+++
b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java
@@ -19,7 +19,6 @@ package org.apache.solr.s3;
import static org.apache.solr.s3.S3BackupRepository.S3_SCHEME;
import com.adobe.testing.s3mock.junit4.S3MockRule;
-import com.google.common.base.Strings;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
@@ -165,7 +164,7 @@ public class S3BackupRepositoryTest extends
AbstractBackupRepositoryTest {
doTestCopyFileTo(content);
// copy a 10Mb file
- content += Strings.repeat("1234567890", 1024 * 1024);
+ content += "1234567890".repeat(1024 * 1024);
doTestCopyFileFrom(content);
doTestCopyFileTo(content);
}
@@ -232,7 +231,7 @@ public class S3BackupRepositoryTest extends
AbstractBackupRepositoryTest {
doRandomAccessTest(content, content.indexOf("content"));
// Large text, we force to refill the buffer
- String blank = Strings.repeat(" ", 5 * BufferedIndexInput.BUFFER_SIZE);
+ String blank = " ".repeat(5 * BufferedIndexInput.BUFFER_SIZE);
content = "This is a super large" + blank + "content";
doRandomAccessTest(content, content.indexOf("content"));
}
@@ -246,8 +245,6 @@ public class S3BackupRepositoryTest extends
AbstractBackupRepositoryTest {
private void doRandomAccessTest(String content, int position) throws
Exception {
try (S3BackupRepository repo = getRepository()) {
- File tmp = temporaryFolder.newFolder();
-
// Open an index input on a file
pushObject("/my-repo/content", content);
IndexInput input = repo.openInput(new URI("s3://my-repo"), "content",
IOContext.DEFAULT);
@@ -278,7 +275,7 @@ public class S3BackupRepositoryTest extends
AbstractBackupRepositoryTest {
try (S3BackupRepository repo = getRepository()) {
// Open an index input on a file
- String blank = Strings.repeat(" ", 5 * BufferedIndexInput.BUFFER_SIZE);
+ String blank = " ".repeat(5 * BufferedIndexInput.BUFFER_SIZE);
String content = "This is the file " + blank + "content";
pushObject("/content", content);
diff --git
a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
index 87337fa..653a8f8 100644
--- a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
@@ -29,7 +29,6 @@ import java.util.Map;
import java.util.Random;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
-import com.google.common.base.Strings;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.store.Directory;
@@ -55,7 +54,7 @@ import org.junit.Test;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
-@ThreadLeakFilters(defaultFilters = true, filters = {
+@ThreadLeakFilters(filters = {
SolrIgnoredThreadsFilter.class,
QuickPatchThreadsFilter.class,
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
@@ -246,14 +245,16 @@ public class HdfsDirectoryFactoryTest extends
SolrTestCaseJ4 {
@Test
public void testIsAbsolute() throws Exception {
- try(HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory()) {
- String relativePath = Strings.repeat(
- RandomStrings.randomAsciiAlphanumOfLength(random(),
random().nextInt(10) + 1) + '/',
- random().nextInt(5) + 1);
+ try (HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory()) {
+ String pathToRepeat = RandomStrings.randomAsciiAlphanumOfLength(
+ random(),
+ random().nextInt(10) + 1
+ ) + '/';
+ String relativePath =pathToRepeat.repeat(random().nextInt(5) + 1);
assertFalse(hdfsFactory.isAbsolute(relativePath));
assertFalse(hdfsFactory.isAbsolute("/" + relativePath));
- for(String rootPrefix : Arrays.asList("file://", "hdfs://", "s3a://",
"foo://")) {
+ for (String rootPrefix : Arrays.asList("file://", "hdfs://", "s3a://",
"foo://")) {
assertTrue(hdfsFactory.isAbsolute(rootPrefix + relativePath));
}
}