This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new 20cd7bb [SPARK-32930][CORE] Replace deprecated isFile/isDirectory
methods
20cd7bb is described below
commit 20cd7bba4e6521b5c6d5eb370210d9488e977c62
Author: William Hyun <[email protected]>
AuthorDate: Fri Sep 18 18:13:11 2020 +0900
[SPARK-32930][CORE] Replace deprecated isFile/isDirectory methods
### What changes were proposed in this pull request?
This PR aims to replace deprecated `isFile` and `isDirectory` methods.
```diff
- fs.isDirectory(hadoopPath)
+ fs.getFileStatus(hadoopPath).isDirectory
```
```diff
- fs.isFile(new Path(inProgressLog))
+ fs.getFileStatus(new Path(inProgressLog)).isFile
```
### Why are the changes needed?
It shows deprecation warnings.
-
https://amplab.cs.berkeley.edu/jenkins/view/Spark%20QA%20Test%20(Dashboard)/job/spark-master-test-sbt-hadoop-3.2-hive-2.3/1244/consoleFull
```
[warn]
/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-hive-2.3/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala:815:
method isFile in class FileSystem is deprecated: see corresponding Javadoc for
more information.
[warn] if (!fs.isFile(new Path(inProgressLog))) {
```
```
[warn]
/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-hive-2.3/core/src/main/scala/org/apache/spark/SparkContext.scala:1884:
method isDirectory in class FileSystem is deprecated: see corresponding
Javadoc for more information.
[warn] if (fs.isDirectory(hadoopPath)) {
```
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Pass the Jenkins.
Closes #29796 from williamhyun/filesystem.
Authored-by: William Hyun <[email protected]>
Signed-off-by: HyukjinKwon <[email protected]>
(cherry picked from commit 78928879810a2e96dbb6ec4608b548a0072a040f)
Signed-off-by: HyukjinKwon <[email protected]>
---
core/src/main/scala/org/apache/spark/SparkContext.scala | 2 +-
.../apache/spark/deploy/history/EventLogFileWritersSuite.scala | 6 +++---
.../scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala | 2 +-
.../main/scala/org/apache/spark/streaming/util/HdfsUtils.scala | 8 ++++++--
4 files changed, 11 insertions(+), 7 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 66fe1d7..9f9d611 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1853,7 +1853,7 @@ class SparkContext(config: SparkConf) extends Logging {
if (!fs.exists(hadoopPath)) {
throw new FileNotFoundException(s"Jar ${path} not found")
}
- if (fs.isDirectory(hadoopPath)) {
+ if (fs.getFileStatus(hadoopPath).isDirectory) {
throw new IllegalArgumentException(
s"Directory ${path} is not allowed for addJar")
}
diff --git
a/core/src/test/scala/org/apache/spark/deploy/history/EventLogFileWritersSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/history/EventLogFileWritersSuite.scala
index 060b878..e9b739c 100644
---
a/core/src/test/scala/org/apache/spark/deploy/history/EventLogFileWritersSuite.scala
+++
b/core/src/test/scala/org/apache/spark/deploy/history/EventLogFileWritersSuite.scala
@@ -213,7 +213,7 @@ class SingleEventLogFileWriterSuite extends
EventLogFileWritersSuite {
compressionCodecShortName)
val finalLogPath = new Path(logPath)
- assert(fileSystem.exists(finalLogPath) && fileSystem.isFile(finalLogPath))
+ assert(fileSystem.exists(finalLogPath) &&
fileSystem.getFileStatus(finalLogPath).isFile)
assert(expectedLines === readLinesFromEventLogFile(finalLogPath,
fileSystem))
}
}
@@ -357,10 +357,10 @@ class RollingEventLogFilesWriterSuite extends
EventLogFileWritersSuite {
expectedLines: Seq[String]): Unit = {
val logDirPath = getAppEventLogDirPath(logBaseDir, appId, appAttemptId)
- assert(fileSystem.exists(logDirPath) && fileSystem.isDirectory(logDirPath))
+ assert(fileSystem.exists(logDirPath) &&
fileSystem.getFileStatus(logDirPath).isDirectory)
val appStatusFile = getAppStatusFilePath(logDirPath, appId, appAttemptId,
inProgress = false)
- assert(fileSystem.exists(appStatusFile) &&
fileSystem.isFile(appStatusFile))
+ assert(fileSystem.exists(appStatusFile) &&
fileSystem.getFileStatus(appStatusFile).isFile)
val eventLogFiles = listEventLogFiles(logDirPath)
val allLines = mutable.ArrayBuffer[String]()
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index d0aa618..86385f0 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -1191,7 +1191,7 @@ class HiveDDLSuite
expectedDBUri,
Map.empty))
// the database directory was created
- assert(fs.exists(dbPath) && fs.isDirectory(dbPath))
+ assert(fs.exists(dbPath) && fs.getFileStatus(dbPath).isDirectory)
sql(s"USE $dbName")
val tabName = "tab1"
diff --git
a/streaming/src/main/scala/org/apache/spark/streaming/util/HdfsUtils.scala
b/streaming/src/main/scala/org/apache/spark/streaming/util/HdfsUtils.scala
index 1465772..006bcad 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/HdfsUtils.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/HdfsUtils.scala
@@ -58,7 +58,7 @@ private[streaming] object HdfsUtils {
// If we are really unlucky, the file may be deleted as we're opening
the stream.
// This can happen as clean up is performed by daemon threads that may
be left over from
// previous runs.
- if (!dfs.isFile(dfsPath)) null else throw e
+ if (!dfs.getFileStatus(dfsPath).isFile) null else throw e
}
}
@@ -92,6 +92,10 @@ private[streaming] object HdfsUtils {
def checkFileExists(path: String, conf: Configuration): Boolean = {
val hdpPath = new Path(path)
val fs = getFileSystemForPath(hdpPath, conf)
- fs.isFile(hdpPath)
+ try {
+ fs.getFileStatus(hdpPath).isFile
+ } catch {
+ case _: FileNotFoundException => false
+ }
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]