jaceklaskowski commented on a change in pull request #31323:
URL: https://github.com/apache/spark/pull/31323#discussion_r563724184
##########
File path:
core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
##########
@@ -43,7 +43,9 @@ class LogUrlsStandaloneSuite extends SparkFunSuite with
LocalSparkContext {
assert(info.logUrlMap.nonEmpty)
// Browse to each URL to check that it's valid
info.logUrlMap.foreach { case (logType, logUrl) =>
- val html = Source.fromURL(logUrl).mkString
+ val html = Utils.tryWithResource(Source.fromURL(logUrl)) { source =>
+ source.mkString
Review comment:
nit: `_.mkString`
##########
File path: core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
##########
@@ -327,22 +328,31 @@ class MasterSuite extends SparkFunSuite
val masterUrl = s"http://localhost:${localCluster.masterWebUIPort}"
try {
eventually(timeout(5.seconds), interval(100.milliseconds)) {
- val json = Source.fromURL(s"$masterUrl/json").getLines().mkString("\n")
+ val json = Utils.tryWithResource(
+ Source.fromURL(s"$masterUrl/json")) { source =>
+ source.getLines().mkString("\n")
Review comment:
nit: Indent (and perhaps `_.getLines...` too?)
##########
File path:
external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceOffsetSuite.scala
##########
@@ -99,7 +100,9 @@ class KafkaSourceOffsetSuite extends OffsetSuite with
SharedSparkSession {
private def readFromResource(file: String): SerializedOffset = {
import scala.io.Source
val input = getClass.getResource(s"/$file").toURI
- val str = Source.fromFile(input).mkString
- SerializedOffset(str)
+ Utils.tryWithResource(Source.fromFile(input)) { source =>
+ val str = source.mkString
Review comment:
Any reason for this `str` value?
##########
File path: core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
##########
@@ -327,22 +328,31 @@ class MasterSuite extends SparkFunSuite
val masterUrl = s"http://localhost:${localCluster.masterWebUIPort}"
try {
eventually(timeout(5.seconds), interval(100.milliseconds)) {
- val json = Source.fromURL(s"$masterUrl/json").getLines().mkString("\n")
+ val json = Utils.tryWithResource(
+ Source.fromURL(s"$masterUrl/json")) { source =>
+ source.getLines().mkString("\n")
+ }
val JArray(workers) = (parse(json) \ "workers")
workers.size should be (2)
workers.foreach { workerSummaryJson =>
val JString(workerWebUi) = workerSummaryJson \ "webuiaddress"
- val workerResponse = parse(Source.fromURL(s"${workerWebUi}/json")
- .getLines().mkString("\n"))
+ val workerResponse = Utils.tryWithResource(
+ Source.fromURL(s"$workerWebUi/json")) { source =>
+ parse(source.getLines().mkString("\n"))
Review comment:
nit: Indent
##########
File path:
examples/src/main/scala/org/apache/spark/examples/DFSReadWriteTest.scala
##########
@@ -46,7 +47,10 @@ object DFSReadWriteTest {
private val NPARAMS = 2
private def readFile(filename: String): List[String] = {
- val lineIter: Iterator[String] = fromFile(filename).getLines()
+ val lineIter: Iterator[String] =
Review comment:
While we're at it...
```
Utils.tryWithResource(fromFile(filename))(_.getLines).toList
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]