This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 2e192b6 [SPARK-34284][CORE][TESTS] Fix deprecated API usage of Apache
commons-io
2e192b6 is described below
commit 2e192b6f4546d39986b95603a1aaf2ccce947098
Author: yangjie01 <[email protected]>
AuthorDate: Fri Jan 29 17:50:14 2021 +0900
[SPARK-34284][CORE][TESTS] Fix deprecated API usage of Apache commons-io
### What changes were proposed in this pull request?
There are some deprecated API usage compilation warning related to Apache
commons-io as follows:
```
[WARNING] [Warn]
/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala:1109:
[deprecation
org.apache.spark.deploy.SparkSubmitSuite.checkDownloadedFile.$org_scalatest_assert_macro_expr.$org_scalatest_assert_macro_left
| origin=org.apache.commons.io.FileUtils.readFileToString | version=] method
readFileToString in class FileUtils is deprecated
[WARNING] [Warn]
/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala:1110:
[deprecation
org.apache.spark.deploy.SparkSubmitSuite.checkDownloadedFile.$org_scalatest_assert_macro_expr.$org_scalatest_assert_macro_right
| origin=org.apache.commons.io.FileUtils.readFileToString | version=] method
readFileToString in class FileUtils is deprecated
[WARNING] [Warn]
/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala:1152:
[deprecation org.apache.spark.deploy.SparkSubmitSuite |
origin=org.apache.commons.io.FileUtils.write | version=] method write in class
FileUtils is deprecated
[WARNING] [Warn]
/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala:1167:
[deprecation org.apache.spark.deploy.SparkSubmitSuite |
origin=org.apache.commons.io.FileUtils.write | version=] method write in class
FileUtils is deprecated
[WARNING] [Warn]
/spark/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala:201:
[deprecation org.apache.spark.deploy.history.HistoryServerSuite.<local
HistoryServerSuite>.$anonfun.exp |
origin=org.apache.commons.io.IOUtils.toString | version=] method toString in
class IOUtils is deprecated
[WARNING] [Warn]
/spark/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala:716:
[deprecation
org.apache.spark.deploy.history.HistoryServerSuite.getContentAndCode.inString.$anonfun
| origin=org.apache.commons.io.IOUtils.toString | version=] method toString in
class IOUtils is deprecated
[WARNING] [Warn]
/spark/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala:732:
[deprecation
org.apache.spark.deploy.history.HistoryServerSuite.connectAndGetInputStream.errString.$anonfun
| origin=org.apache.commons.io.IOUtils.toString | version=] method toString in
class IOUtils is deprecated
[WARNING] [Warn]
/spark/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala:267:
[deprecation org.apache.spark.streaming.InputStreamsSuite.<local
InputStreamsSuite>.$anonfun.$anonfun.write |
origin=org.apache.commons.io.IOUtils.write | version=] method write in class
IOUtils is deprecated
[WARNING] [Warn]
/spark/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala:912:
[deprecation
org.apache.spark.streaming.StreamingContextSuite.createCorruptedCheckpoint |
origin=org.apache.commons.io.FileUtils.write | version=] method write in class
FileUtils is deprecated
```
The main API change is to need to add a `java.nio.charset.Charset`
parameter when the corresponding method is called, so the main change of is pr
is add a `StandardCharsets.UTF_8` parameter to the these method.
### Why are the changes needed?
Fix deprecated API usage of Apache commons-io.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Pass the Jenkins or GitHub Action
Closes #31389 from LuciferYang/SPARK-34284.
Authored-by: yangjie01 <[email protected]>
Signed-off-by: HyukjinKwon <[email protected]>
---
.../src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala | 8 ++++----
.../org/apache/spark/deploy/history/HistoryServerSuite.scala | 7 ++++---
.../test/scala/org/apache/spark/streaming/InputStreamsSuite.scala | 2 +-
.../scala/org/apache/spark/streaming/StreamingContextSuite.scala | 3 ++-
4 files changed, 11 insertions(+), 9 deletions(-)
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index edcebf5..02efcae 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -1106,8 +1106,8 @@ class SparkSubmitSuite
// The path and filename are preserved.
assert(outputUri.getPath.endsWith(new Path(sourceUri).getName))
- assert(FileUtils.readFileToString(new File(outputUri.getPath)) ===
- FileUtils.readFileToString(new File(sourceUri.getPath)))
+ assert(FileUtils.readFileToString(new File(outputUri.getPath),
StandardCharsets.UTF_8) ===
+ FileUtils.readFileToString(new File(sourceUri.getPath),
StandardCharsets.UTF_8))
}
private def deleteTempOutputFile(outputPath: String): Unit = {
@@ -1149,7 +1149,7 @@ class SparkSubmitSuite
val jarFile = File.createTempFile("test", ".jar")
jarFile.deleteOnExit()
val content = "hello, world"
- FileUtils.write(jarFile, content)
+ FileUtils.write(jarFile, content, StandardCharsets.UTF_8)
val hadoopConf = new Configuration()
val tmpDir = Files.createTempDirectory("tmp").toFile
updateConfWithFakeS3Fs(hadoopConf)
@@ -1164,7 +1164,7 @@ class SparkSubmitSuite
val jarFile = File.createTempFile("test", ".jar")
jarFile.deleteOnExit()
val content = "hello, world"
- FileUtils.write(jarFile, content)
+ FileUtils.write(jarFile, content, StandardCharsets.UTF_8)
val hadoopConf = new Configuration()
val tmpDir = Files.createTempDirectory("tmp").toFile
updateConfWithFakeS3Fs(hadoopConf)
diff --git
a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index 08b2118..938eb8d 100644
---
a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++
b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -199,7 +199,8 @@ class HistoryServerSuite extends SparkFunSuite with
BeforeAndAfter with Matchers
errOpt should be (None)
val exp = IOUtils.toString(new FileInputStream(
- new File(expRoot, HistoryServerSuite.sanitizePath(name) +
"_expectation.json")))
+ new File(expRoot, HistoryServerSuite.sanitizePath(name) +
"_expectation.json")),
+ StandardCharsets.UTF_8)
// compare the ASTs so formatting differences don't cause failures
import org.json4s._
import org.json4s.jackson.JsonMethods._
@@ -713,7 +714,7 @@ object HistoryServerSuite {
def getContentAndCode(url: URL): (Int, Option[String], Option[String]) = {
val (code, in, errString) = connectAndGetInputStream(url)
- val inString = in.map(IOUtils.toString)
+ val inString = in.map(IOUtils.toString(_, StandardCharsets.UTF_8))
(code, inString, errString)
}
@@ -729,7 +730,7 @@ object HistoryServerSuite {
}
val errString = try {
val err = Option(connection.getErrorStream())
- err.map(IOUtils.toString)
+ err.map(IOUtils.toString(_, StandardCharsets.UTF_8))
} catch {
case io: IOException => None
}
diff --git
a/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
index 53ef840..03182ae 100644
---
a/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
+++
b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
@@ -264,7 +264,7 @@ class InputStreamsSuite extends TestSuiteBase with
BeforeAndAfter {
def write(path: Path, text: String): Unit = {
val out = fs.create(path, true)
- IOUtils.write(text, out)
+ IOUtils.write(text, out, StandardCharsets.UTF_8)
out.close()
}
diff --git
a/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala
b/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala
index 1d66378..29eb1db 100644
---
a/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala
+++
b/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.streaming
import java.io.{File, NotSerializableException}
+import java.nio.charset.StandardCharsets
import java.util.Locale
import java.util.concurrent.{CountDownLatch, TimeUnit}
import java.util.concurrent.atomic.AtomicInteger
@@ -909,7 +910,7 @@ class StreamingContextSuite
def createCorruptedCheckpoint(): String = {
val checkpointDirectory = Utils.createTempDir().getAbsolutePath()
val fakeCheckpointFile = Checkpoint.checkpointFile(checkpointDirectory,
Time(1000))
- FileUtils.write(new File(fakeCheckpointFile.toString()), "blablabla")
+ FileUtils.write(new File(fakeCheckpointFile.toString()), "blablabla",
StandardCharsets.UTF_8)
assert(Checkpoint.getCheckpointFiles(checkpointDirectory).nonEmpty)
checkpointDirectory
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]