mridulm commented on code in PR #43194:
URL: https://github.com/apache/spark/pull/43194#discussion_r1342182541
##########
core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala:
##########
@@ -33,7 +33,7 @@ class StatusTrackerSuite extends SparkFunSuite with Matchers
with LocalSparkCont
val jobId: Int = eventually(timeout(10.seconds)) {
val jobIds = jobFuture.jobIds
jobIds.size should be(1)
- jobIds.head
+ jobIds.max
Review Comment:
Revert ? `jobIds.size` is 1, so `head` == `max`
##########
core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala:
##########
@@ -121,7 +121,7 @@ class StatusTrackerSuite extends SparkFunSuite with
Matchers with LocalSparkCont
// countAsync()
val firstJobFuture = sc.parallelize(1 to 1000).countAsync()
val firstJobId = eventually(timeout(10.seconds)) {
- firstJobFuture.jobIds.head
+ firstJobFuture.jobIds.max
}
Review Comment:
Revert - same as above.
##########
core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala:
##########
@@ -105,7 +105,7 @@ class StatusTrackerSuite extends SparkFunSuite with
Matchers with LocalSparkCont
sc.statusTracker.getJobIdsForGroup("my-job-group2") shouldBe empty
val firstJobFuture = sc.parallelize(1 to 1000, 2).takeAsync(999)
eventually(timeout(10.seconds)) {
- firstJobFuture.jobIds.head
+ firstJobFuture.jobIds.max
}
Review Comment:
Revert, same as above.
##########
core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala:
##########
@@ -61,7 +61,7 @@ class StatusTrackerSuite extends SparkFunSuite with Matchers
with LocalSparkCont
// Passing `null` should return jobs that were not run in a job group:
val defaultJobGroupFuture = sc.parallelize(1 to 1000).countAsync()
val defaultJobGroupJobId = eventually(timeout(10.seconds)) {
- defaultJobGroupFuture.jobIds.head
+ defaultJobGroupFuture.jobIds.max
}
Review Comment:
Revert ? There should only be a single job submitted here, so `head` will be
`max`
##########
core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala:
##########
@@ -92,7 +92,7 @@ class StatusTrackerSuite extends SparkFunSuite with Matchers
with LocalSparkCont
sc.statusTracker.getJobIdsForGroup("my-job-group2") shouldBe empty
val firstJobFuture = sc.parallelize(1 to 1000, 1).takeAsync(1)
val firstJobId = eventually(timeout(10.seconds)) {
- firstJobFuture.jobIds.head
+ firstJobFuture.jobIds.max
}
Review Comment:
We can revert this as well - same as above.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]