jaceklaskowski commented on a change in pull request #31323:
URL: https://github.com/apache/spark/pull/31323#discussion_r563724184



##########
File path: 
core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
##########
@@ -43,7 +43,9 @@ class LogUrlsStandaloneSuite extends SparkFunSuite with 
LocalSparkContext {
       assert(info.logUrlMap.nonEmpty)
       // Browse to each URL to check that it's valid
       info.logUrlMap.foreach { case (logType, logUrl) =>
-        val html = Source.fromURL(logUrl).mkString
+        val html = Utils.tryWithResource(Source.fromURL(logUrl)) { source =>
+          source.mkString

Review comment:
       nit: `_.mkString`

##########
File path: core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
##########
@@ -327,22 +328,31 @@ class MasterSuite extends SparkFunSuite
     val masterUrl = s"http://localhost:${localCluster.masterWebUIPort}";
     try {
       eventually(timeout(5.seconds), interval(100.milliseconds)) {
-        val json = Source.fromURL(s"$masterUrl/json").getLines().mkString("\n")
+        val json = Utils.tryWithResource(
+          Source.fromURL(s"$masterUrl/json")) { source =>
+          source.getLines().mkString("\n")

Review comment:
       nit: Indent (and perhaps `_.getLines...` too?)

##########
File path: 
external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceOffsetSuite.scala
##########
@@ -99,7 +100,9 @@ class KafkaSourceOffsetSuite extends OffsetSuite with 
SharedSparkSession {
   private def readFromResource(file: String): SerializedOffset = {
     import scala.io.Source
     val input = getClass.getResource(s"/$file").toURI
-    val str = Source.fromFile(input).mkString
-    SerializedOffset(str)
+    Utils.tryWithResource(Source.fromFile(input)) { source =>
+      val str = source.mkString

Review comment:
       Any reason for this `str` value?

##########
File path: core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
##########
@@ -327,22 +328,31 @@ class MasterSuite extends SparkFunSuite
     val masterUrl = s"http://localhost:${localCluster.masterWebUIPort}";
     try {
       eventually(timeout(5.seconds), interval(100.milliseconds)) {
-        val json = Source.fromURL(s"$masterUrl/json").getLines().mkString("\n")
+        val json = Utils.tryWithResource(
+          Source.fromURL(s"$masterUrl/json")) { source =>
+          source.getLines().mkString("\n")
+        }
         val JArray(workers) = (parse(json) \ "workers")
         workers.size should be (2)
         workers.foreach { workerSummaryJson =>
           val JString(workerWebUi) = workerSummaryJson \ "webuiaddress"
-          val workerResponse = parse(Source.fromURL(s"${workerWebUi}/json")
-            .getLines().mkString("\n"))
+          val workerResponse = Utils.tryWithResource(
+            Source.fromURL(s"$workerWebUi/json")) { source =>
+            parse(source.getLines().mkString("\n"))

Review comment:
       nit: Indent

##########
File path: 
examples/src/main/scala/org/apache/spark/examples/DFSReadWriteTest.scala
##########
@@ -46,7 +47,10 @@ object DFSReadWriteTest {
   private val NPARAMS = 2
 
   private def readFile(filename: String): List[String] = {
-    val lineIter: Iterator[String] = fromFile(filename).getLines()
+    val lineIter: Iterator[String] =

Review comment:
       While we're at it...
   
   ```
   Utils.tryWithResource(fromFile(filename))(_.getLines).toList
   ```

##########
File path: core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
##########
@@ -327,22 +328,25 @@ class MasterSuite extends SparkFunSuite
     val masterUrl = s"http://localhost:${localCluster.masterWebUIPort}";
     try {
       eventually(timeout(5.seconds), interval(100.milliseconds)) {
-        val json = Source.fromURL(s"$masterUrl/json").getLines().mkString("\n")
+        val json = Utils
+          
.tryWithResource(Source.fromURL(s"$masterUrl/json"))(_.getLines().mkString("\n"))
         val JArray(workers) = (parse(json) \ "workers")
         workers.size should be (2)
         workers.foreach { workerSummaryJson =>
           val JString(workerWebUi) = workerSummaryJson \ "webuiaddress"
-          val workerResponse = parse(Source.fromURL(s"${workerWebUi}/json")
-            .getLines().mkString("\n"))
+          val workerResponse = parse(Utils
+            
.tryWithResource(Source.fromURL(s"$workerWebUi/json"))(_.getLines().mkString("\n")))

Review comment:
       Are the `getLines()` and `\n` actually needed? Review other changes.

##########
File path: core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
##########
@@ -327,22 +328,25 @@ class MasterSuite extends SparkFunSuite
     val masterUrl = s"http://localhost:${localCluster.masterWebUIPort}";
     try {
       eventually(timeout(5.seconds), interval(100.milliseconds)) {
-        val json = Source.fromURL(s"$masterUrl/json").getLines().mkString("\n")
+        val json = Utils
+          
.tryWithResource(Source.fromURL(s"$masterUrl/json"))(_.getLines().mkString("\n"))

Review comment:
       Are the `getLines()` and `\n` actually needed?

##########
File path: core/src/test/scala/org/apache/spark/SparkContextSuite.scala
##########
@@ -376,7 +377,9 @@ class SparkContextSuite extends SparkFunSuite with 
LocalSparkContext with Eventu
       sc.addFile(file1.getAbsolutePath)
       def getAddedFileContents(): String = {
         sc.parallelize(Seq(0)).map { _ =>
-          scala.io.Source.fromFile(SparkFiles.get("file")).mkString
+          Utils.tryWithResource(Source.fromFile(SparkFiles.get("file"))) { 
source =>
+            source.mkString

Review comment:
       `_.mkString`?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to