This is an automated email from the ASF dual-hosted git repository.
mdrob pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/solr.git
The following commit(s) were added to refs/heads/main by this push:
new dd22ed1 SOLR-15244 Switch File to Path (#10)
dd22ed1 is described below
commit dd22ed1a18a6e3d5b6f599c4c665b6374dc9e24d
Author: Mike Drob <[email protected]>
AuthorDate: Thu Mar 25 11:01:59 2021 -0500
SOLR-15244 Switch File to Path (#10)
---
.../solr/handler/clustering/EngineContext.java | 3 +-
.../src/java/org/apache/solr/cloud/CloudUtil.java | 3 +-
.../org/apache/solr/cloud/RecoveryStrategy.java | 5 +-
.../java/org/apache/solr/cloud/SolrZkServer.java | 29 +++++------
.../java/org/apache/solr/cloud/ZkController.java | 4 +-
.../org/apache/solr/core/ConfigSetService.java | 3 +-
.../org/apache/solr/core/DirectoryFactory.java | 2 +-
.../org/apache/solr/core/MMapDirectoryFactory.java | 9 +---
.../apache/solr/core/NIOFSDirectoryFactory.java | 12 ++---
.../solr/core/NRTCachingDirectoryFactory.java | 11 +----
.../src/java/org/apache/solr/core/SolrConfig.java | 3 +-
.../org/apache/solr/core/SolrResourceLoader.java | 31 ++++++------
.../apache/solr/core/StandardDirectoryFactory.java | 7 +--
.../solr/core/snapshots/SolrSnapshotsTool.java | 56 +++++++++++-----------
.../apache/solr/filestore/DistribPackageStore.java | 39 +++++++--------
.../java/org/apache/solr/handler/CatStream.java | 11 ++---
.../java/org/apache/solr/handler/IndexFetcher.java | 8 ++--
.../apache/solr/handler/PingRequestHandler.java | 26 +++++-----
.../apache/solr/handler/ReplicationHandler.java | 11 ++---
.../apache/solr/AnalysisAfterCoreReloadTest.java | 23 +++++----
.../apache/solr/core/AlternateDirectoryTest.java | 5 +-
.../apache/solr/core/MockFSDirectoryFactory.java | 6 +--
22 files changed, 131 insertions(+), 176 deletions(-)
diff --git
a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/EngineContext.java
b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/EngineContext.java
index 70bc2ca..4ad7792 100644
---
a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/EngineContext.java
+++
b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/EngineContext.java
@@ -33,7 +33,6 @@ import java.io.IOException;
import java.io.UncheckedIOException;
import java.lang.invoke.MethodHandles;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -70,7 +69,7 @@ final class EngineContext {
List<Path> resourceLocations = new ArrayList<>();
- Path configDir = Paths.get(core.getResourceLoader().getConfigDir());
+ Path configDir = core.getResourceLoader().getConfigPath();
if (resourcesPath != null && !resourcesPath.trim().isEmpty()) {
configDir = configDir.resolve(resourcesPath);
resourceLocations.add(configDir);
diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
index 3dffb72..d98f783 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
@@ -16,7 +16,6 @@
*/
package org.apache.solr.cloud;
-import java.io.File;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.Collection;
@@ -125,7 +124,7 @@ public class CloudUtil {
public static String unifiedResourcePath(SolrResourceLoader loader) {
return (loader instanceof ZkSolrResourceLoader) ?
((ZkSolrResourceLoader) loader).getConfigSetZkPath() + "/" :
- loader.getConfigDir() + File.separator;
+ loader.getConfigPath() + "/";
}
/**Read the list of public keys from ZK
diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
index eaeba2d..0ca1a47 100644
--- a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
@@ -731,6 +731,7 @@ public class RecoveryStrategy implements Runnable,
Closeable {
/**
* Make sure we can connect to the shard leader as currently defined in ZK
* @param ourUrl if the leader url is the same as our url, we will skip
trying to connect
+ * @return the leader replica, or null if closed
*/
private final Replica pingLeader(String ourUrl, CoreDescriptor coreDesc,
boolean mayPutReplicaAsDown)
throws Exception {
@@ -745,12 +746,12 @@ public class RecoveryStrategy implements Runnable,
Closeable {
zkController.publish(coreDesc, Replica.State.DOWN);
}
numTried++;
- Replica leaderReplica = null;
if (isClosed()) {
- return leaderReplica;
+ return null;
}
+ Replica leaderReplica;
try {
leaderReplica = zkStateReader.getLeaderRetry(
cloudDesc.getCollectionName(), cloudDesc.getShardId());
diff --git a/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
b/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
index 62d3248..6349aa8 100644
--- a/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
@@ -26,14 +26,14 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
-import java.io.FileInputStream;
import java.io.IOException;
-import java.io.InputStreamReader;
+import java.io.Reader;
import java.lang.invoke.MethodHandles;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
-import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
@@ -176,29 +176,24 @@ class SolrZkServerProps extends QuorumPeerConfig {
/**
* Parse a ZooKeeper configuration file
* @param path the patch of the configuration file
+ * @throws IllegalArgumentException if a config file does not exist at the
given path
* @throws ConfigException error processing configuration
*/
public static Properties getProperties(String path) throws ConfigException {
- File configFile = new File(path);
-
- log.info("Reading configuration from: {}", configFile);
+ Path configPath = Path.of(path);
+ log.info("Reading configuration from: {}", configPath);
try {
- if (!configFile.exists()) {
- throw new IllegalArgumentException(configFile.toString()
+ if (!Files.exists(configPath)) {
+ throw new IllegalArgumentException(configPath.toString()
+ " file is missing");
}
- Properties cfg = new Properties();
- FileInputStream in = new FileInputStream(configFile);
- try {
- cfg.load(new InputStreamReader(in, StandardCharsets.UTF_8));
- } finally {
- in.close();
+ try (Reader reader = Files.newBufferedReader(configPath)) {
+ Properties cfg = new Properties();
+ cfg.load(reader);
+ return cfg;
}
-
- return cfg;
-
} catch (IOException | IllegalArgumentException e) {
throw new ConfigException("Error processing " + path, e);
}
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index 0a7e733..4cafe66 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -1517,14 +1517,14 @@ public class ZkController implements Closeable {
return baseURL;
}
- public void publish(final CoreDescriptor cd, final Replica.State state)
throws Exception {
+ public void publish(final CoreDescriptor cd, final Replica.State state)
throws KeeperException, InterruptedException {
publish(cd, state, true, false);
}
/**
* Publish core state to overseer.
*/
- public void publish(final CoreDescriptor cd, final Replica.State state,
boolean updateLastState, boolean forcePublish) throws Exception {
+ public void publish(final CoreDescriptor cd, final Replica.State state,
boolean updateLastState, boolean forcePublish) throws KeeperException,
InterruptedException {
if (!forcePublish) {
try (SolrCore core = cc.getCore(cd.getName())) {
if (core == null || core.isClosed()) {
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
index 949d4ff..409851a 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
@@ -24,7 +24,6 @@ import java.lang.reflect.Constructor;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
@@ -272,7 +271,7 @@ public abstract class ConfigSetService {
@Override
protected Long getCurrentSchemaModificationVersion(String configSet,
SolrConfig solrConfig, String schemaFileName) {
- Path schemaFile =
Paths.get(solrConfig.getResourceLoader().getConfigDir()).resolve(schemaFileName);
+ Path schemaFile =
solrConfig.getResourceLoader().getConfigPath().resolve(schemaFileName);
try {
return Files.getLastModifiedTime(schemaFile).toMillis();
} catch (FileNotFoundException e) {
diff --git a/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
b/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
index 619e7c9..2ccc592 100644
--- a/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
@@ -266,7 +266,7 @@ public abstract class DirectoryFactory implements
NamedListInitializedPlugin,
*/
public boolean isAbsolute(String path) {
// back compat
- return new File(path).isAbsolute();
+ return Path.of(path).isAbsolute();
}
public static long sizeOfDirectory(Directory directory) throws IOException {
diff --git a/solr/core/src/java/org/apache/solr/core/MMapDirectoryFactory.java
b/solr/core/src/java/org/apache/solr/core/MMapDirectoryFactory.java
index 69def25..a7e7675 100644
--- a/solr/core/src/java/org/apache/solr/core/MMapDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/MMapDirectoryFactory.java
@@ -15,7 +15,6 @@
* limitations under the License.
*/
package org.apache.solr.core;
-import java.io.File;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.nio.file.Path;
@@ -61,8 +60,7 @@ public class MMapDirectoryFactory extends
StandardDirectoryFactory {
@Override
protected Directory create(String path, LockFactory lockFactory, DirContext
dirContext) throws IOException {
- // we pass NoLockFactory, because the real lock factory is set later by
injectLockFactory:
- MMapDirectory mapDirectory = new MMapDirectory(new File(path).toPath(),
lockFactory, maxChunk);
+ MMapDirectory mapDirectory = new MMapDirectory(Path.of(path), lockFactory,
maxChunk);
try {
mapDirectory.setUseUnmap(unmapHack);
} catch (IllegalArgumentException e) {
@@ -71,9 +69,4 @@ public class MMapDirectoryFactory extends
StandardDirectoryFactory {
mapDirectory.setPreload(preload);
return mapDirectory;
}
-
- @Override
- public boolean isAbsolute(String path) {
- return new File(path).isAbsolute();
- }
}
diff --git a/solr/core/src/java/org/apache/solr/core/NIOFSDirectoryFactory.java
b/solr/core/src/java/org/apache/solr/core/NIOFSDirectoryFactory.java
index 459b12e..9e4f9dc 100644
--- a/solr/core/src/java/org/apache/solr/core/NIOFSDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/NIOFSDirectoryFactory.java
@@ -15,8 +15,9 @@
* limitations under the License.
*/
package org.apache.solr.core;
-import java.io.File;
+
import java.io.IOException;
+import java.nio.file.Path;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockFactory;
@@ -31,13 +32,6 @@ public class NIOFSDirectoryFactory extends
StandardDirectoryFactory {
@Override
protected Directory create(String path, LockFactory lockFactory, DirContext
dirContext) throws IOException {
- // we pass NoLockFactory, because the real lock factory is set later by
injectLockFactory:
- return new NIOFSDirectory(new File(path).toPath(), lockFactory);
- }
-
- @Override
- public boolean isAbsolute(String path) {
- return new File(path).isAbsolute();
+ return new NIOFSDirectory(Path.of(path), lockFactory);
}
-
}
diff --git
a/solr/core/src/java/org/apache/solr/core/NRTCachingDirectoryFactory.java
b/solr/core/src/java/org/apache/solr/core/NRTCachingDirectoryFactory.java
index 4e1fad9..e260a11 100644
--- a/solr/core/src/java/org/apache/solr/core/NRTCachingDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/NRTCachingDirectoryFactory.java
@@ -16,8 +16,8 @@
*/
package org.apache.solr.core;
-import java.io.File;
import java.io.IOException;
+import java.nio.file.Path;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
@@ -52,13 +52,6 @@ public class NRTCachingDirectoryFactory extends
StandardDirectoryFactory {
@Override
protected Directory create(String path, LockFactory lockFactory, DirContext
dirContext) throws IOException {
- // we pass NoLockFactory, because the real lock factory is set later by
injectLockFactory:
- return new NRTCachingDirectory(FSDirectory.open(new File(path).toPath(),
lockFactory), maxMergeSizeMB, maxCachedMB);
+ return new NRTCachingDirectory(FSDirectory.open(Path.of(path),
lockFactory), maxMergeSizeMB, maxCachedMB);
}
-
- @Override
- public boolean isAbsolute(String path) {
- return new File(path).isAbsolute();
- }
-
}
diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
index ce55df2..804ceab 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
@@ -27,7 +27,6 @@ import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
@@ -154,7 +153,7 @@ public class SolrConfig extends XmlConfigFile implements
MapSerializable {
if (loader instanceof ZkSolrResourceLoader) {
resource = name;
} else {
- resource = Paths.get(loader.getConfigDir()).resolve(name).toString();
+ resource = loader.getConfigPath().resolve(name).toString();
}
throw new SolrException(ErrorCode.SERVER_ERROR, "Error loading solr
config from " + resource, e);
}
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index 288259a..b779a1c 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -18,10 +18,8 @@ package org.apache.solr.core;
import java.io.Closeable;
import java.io.File;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.io.OutputStream;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Constructor;
import java.net.MalformedURLException;
@@ -298,8 +296,16 @@ public class SolrResourceLoader implements ResourceLoader,
Closeable, SolrClassL
});
}
+ public Path getConfigPath() {
+ return instanceDir.resolve("conf");
+ }
+
+ /**
+ * @deprecated use {@link #getConfigPath()}
+ */
+ @Deprecated(since="9.0.0")
public String getConfigDir() {
- return instanceDir.resolve("conf").toString();
+ return getConfigPath().toString();
}
/**
@@ -915,27 +921,18 @@ public class SolrResourceLoader implements
ResourceLoader, Closeable, SolrClassL
public static void persistConfLocally(SolrResourceLoader loader, String
resourceName, byte[] content) {
// Persist locally
- File confFile = new File(loader.getConfigDir(), resourceName);
+ Path confFile = loader.getConfigPath().resolve(resourceName);
try {
- File parentDir = confFile.getParentFile();
- if (!parentDir.isDirectory()) {
- if (!parentDir.mkdirs()) {
- final String msg = "Can't create managed schema directory " +
parentDir.getAbsolutePath();
- log.error(msg);
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
- }
- }
- try (OutputStream out = new FileOutputStream(confFile);) {
- out.write(content);
- }
- log.info("Written confile {}", resourceName);
+ Files.createDirectories(confFile.getParent());
+ Files.write(confFile, content);
+ log.info("Written conf file {}", resourceName);
} catch (IOException e) {
final String msg = "Error persisting conf file " + resourceName;
log.error(msg, e);
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg, e);
} finally {
try {
- IOUtils.fsync(confFile.toPath(), false);
+ IOUtils.fsync(confFile, false);
} catch (IOException e) {
final String msg = "Error syncing conf file " + resourceName;
log.error(msg, e);
diff --git
a/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
b/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
index 71bb9a91..65b0353 100644
--- a/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
@@ -52,8 +52,7 @@ public class StandardDirectoryFactory extends
CachingDirectoryFactory {
@Override
protected Directory create(String path, LockFactory lockFactory, DirContext
dirContext) throws IOException {
- // we pass NoLockFactory, because the real lock factory is set later by
injectLockFactory:
- return FSDirectory.open(new File(path).toPath(), lockFactory);
+ return FSDirectory.open(Path.of(path), lockFactory);
}
@Override
@@ -80,9 +79,7 @@ public class StandardDirectoryFactory extends
CachingDirectoryFactory {
@Override
public String normalize(String path) throws IOException {
- String cpath = new File(path).getCanonicalPath();
-
- return super.normalize(cpath);
+ return super.normalize(Path.of(path).toRealPath().toString());
}
public boolean isPersistent() {
diff --git
a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java
b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java
index 4d4c3b8..6931f94 100644
--- a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java
+++ b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java
@@ -18,15 +18,12 @@
package org.apache.solr.core.snapshots;
import java.io.Closeable;
-import java.io.File;
-import java.io.FileOutputStream;
import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
import java.lang.invoke.MethodHandles;
import java.net.URI;
import java.net.URISyntaxException;
-import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@@ -183,26 +180,29 @@ public class SolrSnapshotsTool implements Closeable, CLIO
{
}
}
- public Map<String, List<String>> getIndexFilesPathForSnapshot(String
collectionName, String snapshotName, Optional<String> pathPrefix)
+ /**
+ * @param pathPrefix optional
+ */
+ public Map<String, List<String>> getIndexFilesPathForSnapshot(String
collectionName, String snapshotName, String pathPrefix)
throws SolrServerException, IOException {
Map<String, List<String>> result = new HashMap<>();
Collection<CollectionSnapshotMetaData> snaps =
listCollectionSnapshots(collectionName);
- Optional<CollectionSnapshotMetaData> meta = Optional.empty();
+ CollectionSnapshotMetaData meta = null;
for (CollectionSnapshotMetaData m : snaps) {
if (snapshotName.equals(m.getName())) {
- meta = Optional.of(m);
+ meta = m;
}
}
- if (!meta.isPresent()) {
+ if (meta != null) {
throw new IllegalArgumentException("The snapshot named " + snapshotName
+ " is not found for collection " + collectionName);
}
DocCollection collectionState =
solrClient.getZkStateReader().getClusterState().getCollection(collectionName);
for (Slice s : collectionState.getSlices()) {
- List<CoreSnapshotMetaData> replicaSnaps =
meta.get().getReplicaSnapshotsForShard(s.getName());
+ List<CoreSnapshotMetaData> replicaSnaps =
meta.getReplicaSnapshotsForShard(s.getName());
// Prepare a list of *existing* replicas (since one or more replicas
could have been deleted after the snapshot creation).
List<CoreSnapshotMetaData> availableReplicas = new ArrayList<>();
for (CoreSnapshotMetaData m : replicaSnaps) {
@@ -226,10 +226,9 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
}
String indexDirPath = coreSnap.getIndexDirPath();
- if (pathPrefix.isPresent()) {
+ if (pathPrefix != null) {
// If the path prefix is specified, rebuild the path to the index
directory.
- Path t = new Path(coreSnap.getIndexDirPath());
- indexDirPath = (new Path(pathPrefix.get(),
t.toUri().getPath())).toString();
+ indexDirPath = new Path(pathPrefix,
coreSnap.getIndexDirPath()).toString();
}
List<String> paths = new ArrayList<>();
@@ -244,20 +243,16 @@ public class SolrSnapshotsTool implements Closeable, CLIO
{
return result;
}
- public void buildCopyListings(String collectionName, String snapshotName,
String localFsPath, Optional<String> pathPrefix)
+ /**
+ * @param pathPrefix optional
+ */
+ public void buildCopyListings(String collectionName, String snapshotName,
String localFsPath, String pathPrefix)
throws SolrServerException, IOException {
Map<String, List<String>> paths =
getIndexFilesPathForSnapshot(collectionName, snapshotName, pathPrefix);
for (Map.Entry<String,List<String>> entry : paths.entrySet()) {
- StringBuilder filesBuilder = new StringBuilder();
- for (String filePath : entry.getValue()) {
- filesBuilder.append(filePath);
- filesBuilder.append("\n");
- }
-
- String files = filesBuilder.toString().trim();
- try (Writer w = new OutputStreamWriter(new FileOutputStream(new
File(localFsPath, entry.getKey())), StandardCharsets.UTF_8)) {
- w.write(files);
- }
+ // TODO: this used to trim - check if that's needed
+ // Using Paths.get instead of Path.of because of conflict with
o.a.hadoop.fs.Path
+ Files.write(Paths.get(localFsPath, entry.getKey()), entry.getValue());
}
}
@@ -270,7 +265,10 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
Preconditions.checkState(resp.getStatus() == 0, "The request failed. The
status code is " + resp.getStatus());
}
- public void prepareForExport(String collectionName, String snapshotName,
String localFsPath, Optional<String> pathPrefix, String destPath) {
+ /**
+ * @param pathPrefix optional
+ */
+ public void prepareForExport(String collectionName, String snapshotName,
String localFsPath, String pathPrefix, String destPath) {
try {
buildCopyListings(collectionName, snapshotName, localFsPath, pathPrefix);
CLIO.out("Successfully prepared copylisting for the snapshot export.");
@@ -376,14 +374,14 @@ public class SolrSnapshotsTool implements Closeable, CLIO
{
String collectionName = requiredArg(options, cmd, COLLECTION);
String localFsDir = requiredArg(options, cmd, TEMP_DIR);
String hdfsOpDir = requiredArg(options, cmd, DEST_DIR);
- Optional<String> pathPrefix =
Optional.ofNullable(cmd.getOptionValue(HDFS_PATH_PREFIX));
+ String pathPrefix = cmd.getOptionValue(HDFS_PATH_PREFIX);
- if (pathPrefix.isPresent()) {
+ if (pathPrefix != null) {
try {
- new URI(pathPrefix.get());
+ new URI(pathPrefix);
} catch (URISyntaxException e) {
CLIO.out(
- "The specified File system path prefix " + pathPrefix.get()
+ "The specified File system path prefix " + pathPrefix
+ " is invalid. The error is " +
e.getLocalizedMessage());
System.exit(1);
}
diff --git
a/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
index c7b9653..b6e2f43 100644
--- a/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
+++ b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
@@ -20,13 +20,15 @@ package org.apache.solr.filestore;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.invoke.MethodHandles;
import java.nio.ByteBuffer;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
@@ -93,8 +95,7 @@ public class DistribPackageStore implements PackageStore {
if (!path.isEmpty() && path.charAt(0) != File.separatorChar) {
path = File.separator + path;
}
- return new File(solrHome +
- File.separator + PackageStoreAPI.PACKAGESTORE_DIRECTORY +
path).toPath();
+ return
solrHome.resolve(PackageStoreAPI.PACKAGESTORE_DIRECTORY).resolve(path);
}
class FileInfo {
@@ -109,9 +110,7 @@ public class DistribPackageStore implements PackageStore {
ByteBuffer getFileData(boolean validate) throws IOException {
if (fileData == null) {
- try (FileInputStream fis = new
FileInputStream(getRealpath(path).toFile())) {
- fileData = SimplePostTool.inputStreamToByteArray(fis);
- }
+ fileData = ByteBuffer.wrap(Files.readAllBytes(getRealpath(path)));
}
return fileData;
}
@@ -138,8 +137,8 @@ public class DistribPackageStore implements PackageStore {
public boolean exists(boolean validateContent, boolean fetchMissing)
throws IOException {
- File file = getRealpath(path).toFile();
- if (!file.exists()) {
+ Path file = getRealpath(path);
+ if (!Files.exists(file)) {
if (fetchMissing) {
return fetchFromAnyNode();
} else {
@@ -150,7 +149,7 @@ public class DistribPackageStore implements PackageStore {
if (validateContent) {
MetaData metaData = readMetaData();
if (metaData == null) return false;
- try (InputStream is = new FileInputStream(getRealpath(path).toFile()))
{
+ try (InputStream is = Files.newInputStream(file)) {
if (!Objects.equals(DigestUtils.sha512Hex(is), metaData.sha512)) {
deleteFile();
} else {
@@ -582,29 +581,23 @@ public class DistribPackageStore implements PackageStore {
*/
public static void _persistToFile(Path solrHome, String path, ByteBuffer
data, ByteBuffer meta) throws IOException {
Path realpath = _getRealPath(path, solrHome);
- File file = realpath.toFile();
- File parent = file.getParentFile();
- if (!parent.exists()) {
- parent.mkdirs();
- }
+ Files.createDirectories(realpath.getParent());
@SuppressWarnings({"rawtypes"})
Map m = (Map) Utils.fromJSON(meta.array(), meta.arrayOffset(),
meta.limit());
if (m == null || m.isEmpty()) {
throw new SolrException(SERVER_ERROR, "invalid metadata , discarding : "
+ path);
}
-
- File metdataFile = _getRealPath(_getMetapath(path), solrHome).toFile();
-
- try (FileOutputStream fos = new FileOutputStream(metdataFile)) {
- fos.write(meta.array(), 0, meta.limit());
+ Path metadataPath = _getRealPath(_getMetapath(path), solrHome);
+ try (SeekableByteChannel channel = Files.newByteChannel(metadataPath,
StandardOpenOption.WRITE)) {
+ channel.write(meta);
}
- IOUtils.fsync(metdataFile.toPath(), false);
+ IOUtils.fsync(metadataPath, false);
- try (FileOutputStream fos = new FileOutputStream(file)) {
- fos.write(data.array(), 0, data.limit());
+ try (SeekableByteChannel channel = Files.newByteChannel(realpath,
StandardOpenOption.WRITE)) {
+ channel.write(data);
}
- IOUtils.fsync(file.toPath(), false);
+ IOUtils.fsync(realpath, false);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/handler/CatStream.java
b/solr/core/src/java/org/apache/solr/handler/CatStream.java
index e6d58c5..406fdb8 100644
--- a/solr/core/src/java/org/apache/solr/handler/CatStream.java
+++ b/solr/core/src/java/org/apache/solr/handler/CatStream.java
@@ -17,11 +17,10 @@
package org.apache.solr.handler;
-import java.io.File;
-import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.invoke.MethodHandles;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
@@ -184,11 +183,10 @@ public class CatStream extends TupleStream implements
Expressible {
while (allFilesToCrawl.hasNext()) {
closeCurrentFileIfSet();
currentFilePath = allFilesToCrawl.next();
- File currentFile = currentFilePath.absolutePath.toFile();
- if(currentFile.getName().endsWith(".gz")) {
- currentFileLines = new LineIterator(new InputStreamReader(new
GZIPInputStream(new FileInputStream(currentFile)), "UTF-8"));
+ if(currentFilePath.absolutePath.toString().endsWith(".gz")) {
+ currentFileLines = new LineIterator(new InputStreamReader(new
GZIPInputStream(Files.newInputStream(currentFilePath.absolutePath)),
StandardCharsets.UTF_8));
} else {
- currentFileLines = FileUtils.lineIterator(currentFile, "UTF-8");
+ currentFileLines =
FileUtils.lineIterator(currentFilePath.absolutePath.toFile(), "UTF-8");
}
if (currentFileLines.hasNext()) return true;
}
@@ -196,7 +194,6 @@ public class CatStream extends TupleStream implements
Expressible {
return false;
}
- @SuppressWarnings({"unchecked"})
private Tuple fetchNextLineFromCurrentFile() {
linesReturned++;
diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
index 9fc7440..7f781df 100644
--- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
+++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
@@ -927,7 +927,8 @@ public class IndexFetcher {
private void downloadConfFiles(List<Map<String, Object>>
confFilesToDownload, long latestGeneration) throws Exception {
log.info("Starting download of configuration files from leader: {}",
confFilesToDownload);
confFilesDownloaded = Collections.synchronizedList(new ArrayList<>());
- File tmpconfDir = new File(solrCore.getResourceLoader().getConfigDir(),
"conf." + getDateAsStr(new Date()));
+ Path tmpConfPath =
solrCore.getResourceLoader().getConfigPath().resolve("conf." + getDateAsStr(new
Date()));
+ File tmpconfDir = tmpConfPath.toFile();
try {
boolean status = tmpconfDir.mkdirs();
if (!status) {
@@ -1296,9 +1297,10 @@ public class IndexFetcher {
*/
private void copyTmpConfFiles2Conf(File tmpconfDir) {
boolean status = false;
- File confDir = new File(solrCore.getResourceLoader().getConfigDir());
+ Path confPath = solrCore.getResourceLoader().getConfigPath();
for (File file : makeTmpConfDirFileList(tmpconfDir, new ArrayList<>())) {
- File oldFile = new File(confDir,
file.getPath().substring(tmpconfDir.getPath().length(),
file.getPath().length()));
+ Path oldPath =
confPath.resolve(file.getPath().substring(tmpconfDir.getPath().length()));
+ File oldFile = oldPath.toFile();
if (!oldFile.getParentFile().exists()) {
status = oldFile.getParentFile().mkdirs();
if (!status) {
diff --git a/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java
b/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java
index c8cbc09..c96f79f 100644
--- a/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java
@@ -16,14 +16,14 @@
*/
package org.apache.solr.handler;
-import java.io.File;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
+import java.nio.file.Path;
import java.time.Instant;
import java.util.Locale;
-import org.apache.commons.io.FileUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
@@ -136,7 +136,7 @@ public class PingRequestHandler extends RequestHandlerBase
implements SolrCoreAw
protected enum ACTIONS {STATUS, ENABLE, DISABLE, PING};
private String healthFileName = null;
- private File healthcheck = null;
+ private Path healthcheck = null;
@Override
public void init(@SuppressWarnings({"rawtypes"})NamedList args) {
@@ -148,17 +148,17 @@ public class PingRequestHandler extends
RequestHandlerBase implements SolrCoreAw
@Override
public void inform( SolrCore core ) {
if (null != healthFileName) {
- healthcheck = new File(healthFileName);
- if ( ! healthcheck.isAbsolute()) {
- healthcheck = new File(core.getDataDir(), healthFileName);
- healthcheck = healthcheck.getAbsoluteFile();
+ healthcheck = Path.of(healthFileName);
+ if (!healthcheck.isAbsolute()) {
+ healthcheck = Path.of(core.getDataDir(), healthFileName);
+ healthcheck = healthcheck.toAbsolutePath();
}
- if ( ! healthcheck.getParentFile().canWrite()) {
+ if (!Files.isWritable(healthcheck.getParent())) {
// this is not fatal, users may not care about enable/disable via
// solr request, file might be touched/deleted by an external system
log.warn("Directory for configured healthcheck file is not writable by
solr, PingRequestHandler will not be able to control enable/disable: {}",
- healthcheck.getParentFile().getAbsolutePath());
+ healthcheck.getParent().toAbsolutePath());
}
}
@@ -171,7 +171,7 @@ public class PingRequestHandler extends RequestHandlerBase
implements SolrCoreAw
* returns false.
*/
public boolean isPingDisabled() {
- return (null != healthcheck && ! healthcheck.exists() );
+ return (null != healthcheck && ! Files.exists(healthcheck) );
}
@Override
@@ -309,18 +309,18 @@ public class PingRequestHandler extends
RequestHandlerBase implements SolrCoreAw
if ( enable ) {
try {
// write out when the file was created
- FileUtils.write(healthcheck, Instant.now().toString(), "UTF-8");
+ Files.write(healthcheck,
Instant.now().toString().getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Unable to write healthcheck flag file", e);
}
} else {
try {
- Files.deleteIfExists(healthcheck.toPath());
+ Files.deleteIfExists(healthcheck);
} catch (Throwable cause) {
throw new SolrException(SolrException.ErrorCode.NOT_FOUND,
"Did not successfully delete healthcheck file:
"
- +healthcheck.getAbsolutePath(), cause);
+ +healthcheck.toAbsolutePath(), cause);
}
}
}
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 71ebd49..414d3c7 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -771,11 +771,10 @@ public class ReplicationHandler extends
RequestHandlerBase implements SolrCoreAw
final Map<String,
FileInfo> confFileInfoCache) {
List<Map<String, Object>> confFiles = new ArrayList<>();
synchronized (confFileInfoCache) {
- File confDir = new File(core.getResourceLoader().getConfigDir());
Checksum checksum = null;
for (int i = 0; i < nameAndAlias.size(); i++) {
String cf = nameAndAlias.getName(i);
- File f = new File(confDir, cf);
+ File f = new File(core.getResourceLoader().getConfigDir(), cf);
if (!f.exists() || f.isDirectory()) continue; //must not happen
FileInfo info = confFileInfoCache.get(cf);
if (info == null || info.lastmodified != f.lastModified() || info.size
!= f.length()) {
@@ -1499,7 +1498,7 @@ public class ReplicationHandler extends
RequestHandlerBase implements SolrCoreAw
protected String tlogFileName;
protected String sOffset;
protected String sLen;
- protected String compress;
+ protected final boolean compress;
protected boolean useChecksum;
protected long offset = -1;
@@ -1521,7 +1520,7 @@ public class ReplicationHandler extends
RequestHandlerBase implements SolrCoreAw
sOffset = params.get(OFFSET);
sLen = params.get(LEN);
- compress = params.get(COMPRESSION);
+ compress = Boolean.parseBoolean(params.get(COMPRESSION));
useChecksum = params.getBool(CHECKSUM, false);
indexGen = params.getLong(GENERATION);
if (useChecksum) {
@@ -1565,7 +1564,7 @@ public class ReplicationHandler extends
RequestHandlerBase implements SolrCoreAw
protected void createOutputStream(OutputStream out) {
out = new CloseShieldOutputStream(out); // DeflaterOutputStream requires
a close call, but don't close the request outputstream
- if (Boolean.parseBoolean(compress)) {
+ if (compress) {
fos = new FastOutputStream(new DeflaterOutputStream(out));
} else {
fos = new FastOutputStream(out);
@@ -1726,7 +1725,7 @@ public class ReplicationHandler extends
RequestHandlerBase implements SolrCoreAw
protected File initFile() {
//if it is a conf file read from config directory
- return new File(core.getResourceLoader().getConfigDir(), cfileName);
+ return
core.getResourceLoader().getConfigPath().resolve(cfileName).toFile();
}
}
diff --git
a/solr/core/src/test/org/apache/solr/AnalysisAfterCoreReloadTest.java
b/solr/core/src/test/org/apache/solr/AnalysisAfterCoreReloadTest.java
index 4941bd1..49c6351 100644
--- a/solr/core/src/test/org/apache/solr/AnalysisAfterCoreReloadTest.java
+++ b/solr/core/src/test/org/apache/solr/AnalysisAfterCoreReloadTest.java
@@ -30,6 +30,11 @@ import org.junit.BeforeClass;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+
+import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
public class AnalysisAfterCoreReloadTest extends SolrTestCaseJ4 {
@@ -115,24 +120,22 @@ public class AnalysisAfterCoreReloadTest extends
SolrTestCaseJ4 {
private void overwriteStopwords(String stopwords) throws IOException {
try (SolrCore core = h.getCoreContainer().getCore(collection)) {
- String configDir = core.getResourceLoader().getConfigDir();
- FileUtils.moveFile(new File(configDir, "stopwords.txt"), new
File(configDir, "stopwords.txt.bak"));
- File file = new File(configDir, "stopwords.txt");
- FileUtils.writeStringToFile(file, stopwords, "UTF-8");
-
+ Path configPath = core.getResourceLoader().getConfigPath();
+ Files.move(configPath.resolve("stopwords.txt"),
configPath.resolve("stopwords.txt.bak"));
+ Files.write(configPath.resolve("stopwords.txt"),
stopwords.getBytes(StandardCharsets.UTF_8));
}
}
@Override
public void tearDown() throws Exception {
- String configDir;
+ Path configPath;
try (SolrCore core = h.getCoreContainer().getCore(collection)) {
- configDir = core.getResourceLoader().getConfigDir();
+ configPath = core.getResourceLoader().getConfigPath();
}
super.tearDown();
- if (new File(configDir, "stopwords.txt.bak").exists()) {
- FileUtils.deleteQuietly(new File(configDir, "stopwords.txt"));
- FileUtils.moveFile(new File(configDir, "stopwords.txt.bak"), new
File(configDir, "stopwords.txt"));
+ Path backupFile = configPath.resolve("stopwords.txt.bak");
+ if (Files.exists(backupFile)) {
+ Files.move(backupFile, configPath.resolve("stopwords.txt"),
REPLACE_EXISTING);
}
}
diff --git
a/solr/core/src/test/org/apache/solr/core/AlternateDirectoryTest.java
b/solr/core/src/test/org/apache/solr/core/AlternateDirectoryTest.java
index 9bf08a5..a7323dc 100644
--- a/solr/core/src/test/org/apache/solr/core/AlternateDirectoryTest.java
+++ b/solr/core/src/test/org/apache/solr/core/AlternateDirectoryTest.java
@@ -16,8 +16,8 @@
*/
package org.apache.solr.core;
-import java.io.File;
import java.io.IOException;
+import java.nio.file.Path;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
@@ -58,8 +58,7 @@ public class AlternateDirectoryTest extends SolrTestCaseJ4 {
public Directory create(String path, LockFactory lockFactory, DirContext
dirContext) throws IOException {
openCalled = true;
- // we pass NoLockFactory, because the real lock factory is set later by
injectLockFactory:
- return dir = newFSDirectory(new File(path).toPath(), lockFactory);
+ return dir = newFSDirectory(Path.of(path), lockFactory);
}
}
diff --git
a/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java
b/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java
index 2cae81e..0592294 100644
---
a/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java
+++
b/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java
@@ -34,8 +34,7 @@ public class MockFSDirectoryFactory extends
StandardDirectoryFactory {
@Override
public Directory create(String path, LockFactory lockFactory, DirContext
dirContext) throws IOException {
- // we pass NoLockFactory, because the real lock factory is set later by
injectLockFactory:
- Directory dir = LuceneTestCase.newFSDirectory(new File(path).toPath(),
lockFactory);
+ Directory dir = LuceneTestCase.newFSDirectory(Path.of(path), lockFactory);
// we can't currently do this check because of how
// Solr has to reboot a new Directory sometimes when replicating
// or rolling back - the old directory is closed and the following
@@ -64,8 +63,7 @@ public class MockFSDirectoryFactory extends
StandardDirectoryFactory {
Directory cdir = dir;
if (dir instanceof NRTCachingDirectory) {
cdir = ((NRTCachingDirectory)dir).getDelegate();
- }
- if (cdir instanceof TrackingDirectoryWrapper) {
+ } else if (dir instanceof TrackingDirectoryWrapper) {
cdir = ((TrackingDirectoryWrapper)dir).getDelegate();
}
return cdir;