LuciferYang commented on a change in pull request #31323:
URL: https://github.com/apache/spark/pull/31323#discussion_r563757768
##########
File path:
external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceOffsetSuite.scala
##########
@@ -99,7 +100,9 @@ class KafkaSourceOffsetSuite extends OffsetSuite with
SharedSparkSession {
private def readFromResource(file: String): SerializedOffset = {
import scala.io.Source
val input = getClass.getResource(s"/$file").toURI
- val str = Source.fromFile(input).mkString
- SerializedOffset(str)
+ Utils.tryWithResource(Source.fromFile(input)) { source =>
+ val str = source.mkString
Review comment:
removed
##########
File path:
examples/src/main/scala/org/apache/spark/examples/DFSReadWriteTest.scala
##########
@@ -46,7 +47,10 @@ object DFSReadWriteTest {
private val NPARAMS = 2
private def readFile(filename: String): List[String] = {
- val lineIter: Iterator[String] = fromFile(filename).getLines()
+ val lineIter: Iterator[String] =
Review comment:
done
##########
File path: core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
##########
@@ -327,22 +328,31 @@ class MasterSuite extends SparkFunSuite
val masterUrl = s"http://localhost:${localCluster.masterWebUIPort}"
try {
eventually(timeout(5.seconds), interval(100.milliseconds)) {
- val json = Source.fromURL(s"$masterUrl/json").getLines().mkString("\n")
+ val json = Utils.tryWithResource(
+ Source.fromURL(s"$masterUrl/json")) { source =>
+ source.getLines().mkString("\n")
Review comment:
thx ~ done
##########
File path:
core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
##########
@@ -43,7 +43,9 @@ class LogUrlsStandaloneSuite extends SparkFunSuite with
LocalSparkContext {
assert(info.logUrlMap.nonEmpty)
// Browse to each URL to check that it's valid
info.logUrlMap.foreach { case (logType, logUrl) =>
- val html = Source.fromURL(logUrl).mkString
+ val html = Utils.tryWithResource(Source.fromURL(logUrl)) { source =>
+ source.mkString
Review comment:
done
##########
File path: core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
##########
@@ -327,22 +328,25 @@ class MasterSuite extends SparkFunSuite
val masterUrl = s"http://localhost:${localCluster.masterWebUIPort}"
try {
eventually(timeout(5.seconds), interval(100.milliseconds)) {
- val json = Source.fromURL(s"$masterUrl/json").getLines().mkString("\n")
+ val json = Utils
+
.tryWithResource(Source.fromURL(s"$masterUrl/json"))(_.getLines().mkString("\n"))
val JArray(workers) = (parse(json) \ "workers")
workers.size should be (2)
workers.foreach { workerSummaryJson =>
val JString(workerWebUi) = workerSummaryJson \ "webuiaddress"
- val workerResponse = parse(Source.fromURL(s"${workerWebUi}/json")
- .getLines().mkString("\n"))
+ val workerResponse = parse(Utils
+
.tryWithResource(Source.fromURL(s"$workerWebUi/json"))(_.getLines().mkString("\n")))
Review comment:
The original logic is this, but I can try it.
##########
File path:
external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala
##########
@@ -167,29 +167,31 @@ class KafkaTestUtils(
* In this method we rewrite krb5.conf to make kdc and client use the same
enctypes
*/
private def rewriteKrb5Conf(): Unit = {
- val krb5Conf = Source.fromFile(kdc.getKrb5conf, "UTF-8").getLines()
- var rewritten = false
- val addedConfig =
- addedKrb5Config("default_tkt_enctypes", "aes128-cts-hmac-sha1-96") +
- addedKrb5Config("default_tgs_enctypes", "aes128-cts-hmac-sha1-96")
- val rewriteKrb5Conf = krb5Conf.map(s =>
- if (s.contains("libdefaults")) {
- rewritten = true
- s + addedConfig
+ Utils.tryWithResource(Source.fromFile(kdc.getKrb5conf, "UTF-8")) { source
=>
Review comment:
```
sbt.ForkMain$ForkError: java.io.IOException: Stream Closed
at java.io.FileInputStream.readBytes(Native Method)
at java.io.FileInputStream.read(FileInputStream.java:255)
at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:284)
at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:326)
at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:178)
at java.io.InputStreamReader.read(InputStreamReader.java:184)
at java.io.BufferedReader.fill(BufferedReader.java:161)
at java.io.BufferedReader.readLine(BufferedReader.java:324)
at java.io.BufferedReader.readLine(BufferedReader.java:389)
at
scala.io.BufferedSource$BufferedLineIterator.hasNext(BufferedSource.scala:74)
at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:460)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:513)
at scala.collection.Iterator.foreach(Iterator.scala:943)
at scala.collection.Iterator.foreach$(Iterator.scala:943)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
at scala.collection.TraversableOnce.addString(TraversableOnce.scala:424)
at
scala.collection.TraversableOnce.addString$(TraversableOnce.scala:407)
at scala.collection.AbstractIterator.addString(Iterator.scala:1431)
at scala.collection.TraversableOnce.mkString(TraversableOnce.scala:377)
at scala.collection.TraversableOnce.mkString$(TraversableOnce.scala:376)
at scala.collection.AbstractIterator.mkString(Iterator.scala:1431)
at scala.collection.TraversableOnce.mkString(TraversableOnce.scala:379)
at scala.collection.TraversableOnce.mkString$(TraversableOnce.scala:379)
at scala.collection.AbstractIterator.mkString(Iterator.scala:1431)
at
org.apache.spark.sql.kafka010.KafkaTestUtils.rewriteKrb5Conf(KafkaTestUtils.scala:182)
at
org.apache.spark.sql.kafka010.KafkaTestUtils.setUpMiniKdc(KafkaTestUtils.scala:162)
at
org.apache.spark.sql.kafka010.KafkaTestUtils.setup(KafkaTestUtils.scala:304)
at
org.apache.spark.sql.kafka010.KafkaDelegationTokenSuite.beforeAll(KafkaDelegationTokenSuite.scala:49)
```
`Utils.tryWithResource(Source.fromFile(kdc.getKrb5conf, "UTF-8"))
(_.getLines())` seems return a lazy iter, so there expansion`
Utils.tryWithResource` scope to avoid `java.io.IOException: Stream Closed`.
Or we need to materialize
`Utils.tryWithResource(Source.fromFile(kdc.getKrb5conf, "UTF-8"))
(_.getLines())` like
`Utils.tryWithResource(Source.fromFile(kdc.getKrb5conf, "UTF-8"))
(_.getLines().toList)`
Which one would you choose @dongjoon-hyun
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]