asfgit closed pull request #23452: [SPARK-26536][BUILD][TEST] Upgrade Mockito
to 2.23.4
URL: https://github.com/apache/spark/pull/23452
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git
a/common/network-common/src/test/java/org/apache/spark/network/sasl/SparkSaslSuite.java
b/common/network-common/src/test/java/org/apache/spark/network/sasl/SparkSaslSuite.java
index 6f15718bd8705..59adf9704cbf6 100644
---
a/common/network-common/src/test/java/org/apache/spark/network/sasl/SparkSaslSuite.java
+++
b/common/network-common/src/test/java/org/apache/spark/network/sasl/SparkSaslSuite.java
@@ -347,10 +347,10 @@ public void testRpcHandlerDelegate() throws Exception {
verify(handler).getStreamManager();
saslHandler.channelInactive(null);
- verify(handler).channelInactive(any(TransportClient.class));
+ verify(handler).channelInactive(isNull());
saslHandler.exceptionCaught(null, null);
- verify(handler).exceptionCaught(any(Throwable.class),
any(TransportClient.class));
+ verify(handler).exceptionCaught(isNull(), isNull());
}
@Test
diff --git
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java
index 7846b71d5a8b1..4cc9a16e1449f 100644
---
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java
+++
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java
@@ -27,7 +27,7 @@
import org.mockito.ArgumentCaptor;
import static org.junit.Assert.*;
-import static org.mockito.Matchers.any;
+import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import org.apache.spark.network.buffer.ManagedBuffer;
@@ -79,6 +79,8 @@ public void testRegisterExecutor() {
@SuppressWarnings("unchecked")
@Test
public void testOpenShuffleBlocks() {
+ when(client.getClientId()).thenReturn("app0");
+
RpcResponseCallback callback = mock(RpcResponseCallback.class);
ManagedBuffer block0Marker = new NioManagedBuffer(ByteBuffer.wrap(new
byte[3]));
diff --git
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java
index dc947a619bf02..95460637db89d 100644
---
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java
+++
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java
@@ -28,10 +28,10 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyInt;
-import static org.mockito.Matchers.anyLong;
-import static org.mockito.Matchers.eq;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
diff --git
a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
index e5fbafc23d957..ecfebf8f8287e 100644
---
a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
+++
b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
@@ -50,8 +50,8 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.Answers.RETURNS_SMART_NULLS;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyInt;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.when;
diff --git
a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
index d0389235cb724..38f5e8c9f0ac8 100644
--- a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark
import scala.collection.mutable
-import org.mockito.Matchers.{any, eq => meq}
+import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{mock, never, verify, when}
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
diff --git a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
index de479db5fbc0f..a69e589743ef9 100644
--- a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
@@ -23,8 +23,7 @@ import scala.collection.mutable
import scala.concurrent.Future
import scala.concurrent.duration._
-import org.mockito.Matchers
-import org.mockito.Matchers._
+import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{mock, spy, verify, when}
import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
@@ -151,7 +150,7 @@ class HeartbeatReceiverSuite
heartbeatReceiverClock.advance(executorTimeout)
heartbeatReceiverRef.askSync[Boolean](ExpireDeadHosts)
// Only the second executor should be expired as a dead host
- verify(scheduler).executorLost(Matchers.eq(executorId2), any())
+ verify(scheduler).executorLost(meq(executorId2), any())
val trackedExecutors = getTrackedExecutors
assert(trackedExecutors.size === 1)
assert(trackedExecutors.contains(executorId1))
@@ -223,10 +222,10 @@ class HeartbeatReceiverSuite
assert(!response.reregisterBlockManager)
// Additionally verify that the scheduler callback is called with the
correct parameters
verify(scheduler).executorHeartbeatReceived(
- Matchers.eq(executorId),
- Matchers.eq(Array(1L -> metrics.accumulators())),
- Matchers.eq(blockManagerId),
- Matchers.eq(executorUpdates))
+ meq(executorId),
+ meq(Array(1L -> metrics.accumulators())),
+ meq(blockManagerId),
+ meq(executorUpdates))
}
}
diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index 3e1a3d4f73069..c088da8fbf3ba 100644
--- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark
import scala.collection.mutable.ArrayBuffer
-import org.mockito.Matchers.any
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.apache.spark.LocalSparkContext._
diff --git
a/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala
index 8567dd1f08233..8c3c38dbc7ea0 100644
---
a/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala
+++
b/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.deploy
import scala.collection.mutable
import scala.concurrent.duration._
-import org.mockito.Matchers.any
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, verify, when}
import org.scalatest.{BeforeAndAfterAll, PrivateMethodTester}
import org.scalatest.concurrent.Eventually._
diff --git
a/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
index 44f9c566a380d..0402d949e9042 100644
---
a/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
+++
b/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
@@ -24,7 +24,7 @@ import scala.collection.mutable
import com.codahale.metrics.Counter
import org.eclipse.jetty.servlet.ServletContextHandler
-import org.mockito.Matchers._
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git
a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index 6d2e329094ae2..7d6efd95fbabe 100644
---
a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++
b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -34,7 +34,7 @@ import org.apache.hadoop.hdfs.{DFSInputStream,
DistributedFileSystem}
import org.apache.hadoop.security.AccessControlException
import org.json4s.jackson.JsonMethods._
import org.mockito.ArgumentMatcher
-import org.mockito.Matchers.{any, argThat}
+import org.mockito.ArgumentMatchers.{any, argThat}
import org.mockito.Mockito.{doThrow, mock, spy, verify, when}
import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers
@@ -933,7 +933,7 @@ class FsHistoryProviderSuite extends SparkFunSuite with
BeforeAndAfter with Matc
val mockedFs = spy(provider.fs)
doThrow(new AccessControlException("Cannot read accessDenied
file")).when(mockedFs).open(
argThat(new ArgumentMatcher[Path]() {
- override def matches(path: Any): Boolean = {
+ override def matches(path: Path): Boolean = {
path.asInstanceOf[Path].getName.toLowerCase(Locale.ROOT) ==
"accessdenied"
}
}))
diff --git
a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerDiskManagerSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerDiskManagerSuite.scala
index 341a1e2443df0..f78469e132490 100644
---
a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerDiskManagerSuite.scala
+++
b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerDiskManagerSuite.scala
@@ -20,8 +20,8 @@ package org.apache.spark.deploy.history
import java.io.File
import org.mockito.AdditionalAnswers
-import org.mockito.Matchers.{any, anyBoolean, anyLong, eq => meq}
-import org.mockito.Mockito._
+import org.mockito.ArgumentMatchers.{anyBoolean, anyLong, eq => meq}
+import org.mockito.Mockito.{doAnswer, spy}
import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkConf, SparkFunSuite}
@@ -32,6 +32,8 @@ import org.apache.spark.util.kvstore.KVStore
class HistoryServerDiskManagerSuite extends SparkFunSuite with BeforeAndAfter {
+ private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value,
Seq.empty: _*)
+
private val MAX_USAGE = 3L
private var testDir: File = _
diff --git
a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
index 52956045d5985..1deac43897f90 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
@@ -21,7 +21,7 @@ import java.io.File
import scala.concurrent.duration._
-import org.mockito.Matchers._
+import org.mockito.ArgumentMatchers.{any, anyInt}
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git
a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
index e3fe2b696aa1f..e5e5b5e428c49 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
@@ -22,7 +22,7 @@ import java.util.function.Supplier
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Answers.RETURNS_SMART_NULLS
-import org.mockito.Matchers._
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git a/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala
b/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala
index 32a94e60484e3..a5fe2026c0f77 100644
--- a/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala
@@ -30,7 +30,7 @@ import scala.concurrent.duration._
import scala.language.postfixOps
import org.mockito.ArgumentCaptor
-import org.mockito.Matchers.{any, eq => meq}
+import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{inOrder, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git
a/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
b/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
index 85eeb5055ae03..8b35f1dfddb08 100644
--- a/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
@@ -23,7 +23,7 @@ import scala.collection.mutable
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration.Duration
-import org.mockito.Matchers.{any, anyLong}
+import org.mockito.ArgumentMatchers.{any, anyLong}
import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
index 5cb2b561d6bce..558b7fa49832b 100644
--- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
@@ -29,7 +29,7 @@ import scala.concurrent.duration._
import scala.language.postfixOps
import com.google.common.io.Files
-import org.mockito.Matchers.any
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, never, verify, when}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually._
diff --git
a/core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala
b/core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala
index a71d8726e7066..4bc001fe8f7c5 100644
--- a/core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala
@@ -21,7 +21,7 @@ import java.net.InetSocketAddress
import java.nio.ByteBuffer
import io.netty.channel.Channel
-import org.mockito.Matchers._
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.apache.spark.SparkFunSuite
diff --git
a/core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala
b/core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala
index 96c8404327e24..aea4c5f96bbe6 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.scheduler
-import org.mockito.Matchers.any
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{never, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git
a/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
b/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
index 2155a0f2b6c21..f41ffb7f2c0b4 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
@@ -21,7 +21,7 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream,
ObjectInputStream,
import scala.util.Random
-import org.mockito.Mockito._
+import org.mockito.Mockito.mock
import org.roaringbitmap.RoaringBitmap
import org.apache.spark.{SparkConf, SparkContext, SparkEnv, SparkFunSuite}
@@ -31,6 +31,7 @@ import org.apache.spark.serializer.{JavaSerializer,
KryoSerializer}
import org.apache.spark.storage.BlockManagerId
class MapStatusSuite extends SparkFunSuite {
+ private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value,
Seq.empty: _*)
test("compressSize") {
assert(MapStatus.compressSize(0L) === 0)
diff --git
a/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala
b/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala
index 158c9eb75f2b6..a560013dba963 100644
---
a/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala
+++
b/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala
@@ -26,8 +26,8 @@ import scala.language.postfixOps
import org.apache.hadoop.mapred._
import org.apache.hadoop.mapreduce.TaskType
-import org.mockito.Matchers
-import org.mockito.Mockito._
+import org.mockito.ArgumentMatchers.{any, eq => meq}
+import org.mockito.Mockito.{doAnswer, spy, times, verify}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.BeforeAndAfter
@@ -71,6 +71,8 @@ import org.apache.spark.util.{ThreadUtils, Utils}
*/
class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
+ private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value,
Seq.empty: _*)
+
var outputCommitCoordinator: OutputCommitCoordinator = null
var tempDir: File = null
var sc: SparkContext = null
@@ -103,7 +105,7 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite
with BeforeAndAfter {
invoke.callRealMethod()
mockTaskScheduler.backend.reviveOffers()
}
- }).when(mockTaskScheduler).submitTasks(Matchers.any())
+ }).when(mockTaskScheduler).submitTasks(any())
doAnswer(new Answer[TaskSetManager]() {
override def answer(invoke: InvocationOnMock): TaskSetManager = {
@@ -123,7 +125,7 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite
with BeforeAndAfter {
}
}
}
- }).when(mockTaskScheduler).createTaskSetManager(Matchers.any(),
Matchers.any())
+ }).when(mockTaskScheduler).createTaskSetManager(any(), any())
sc.taskScheduler = mockTaskScheduler
val dagSchedulerWithMockTaskScheduler = new DAGScheduler(sc,
mockTaskScheduler)
@@ -154,7 +156,7 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite
with BeforeAndAfter {
test("Job should not complete if all commits are denied") {
// Create a mock OutputCommitCoordinator that denies all attempts to commit
doReturn(false).when(outputCommitCoordinator).handleAskPermissionToCommit(
- Matchers.any(), Matchers.any(), Matchers.any(), Matchers.any())
+ any(), any(), any(), any())
val rdd: RDD[Int] = sc.parallelize(Seq(1), 1)
def resultHandler(x: Int, y: Unit): Unit = {}
val futureAction: SimpleFutureAction[Unit] = sc.submitJob[Int, Unit,
Unit](rdd,
@@ -268,8 +270,8 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite
with BeforeAndAfter {
assert(retriedStage.size === 1)
assert(sc.dagScheduler.outputCommitCoordinator.isEmpty)
verify(sc.env.outputCommitCoordinator, times(2))
- .stageStart(Matchers.eq(retriedStage.head), Matchers.any())
-
verify(sc.env.outputCommitCoordinator).stageEnd(Matchers.eq(retriedStage.head))
+ .stageStart(meq(retriedStage.head), any())
+ verify(sc.env.outputCommitCoordinator).stageEnd(meq(retriedStage.head))
}
}
diff --git
a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
index aa9c36c0aaacb..3bfc97b80184c 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.scheduler
import java.util.Properties
-import org.mockito.Matchers.any
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.scalatest.BeforeAndAfter
diff --git
a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
index efb8b15cf6b4d..ea1439cfebca2 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
@@ -28,7 +28,7 @@ import scala.util.control.NonFatal
import com.google.common.util.concurrent.MoreExecutors
import org.mockito.ArgumentCaptor
-import org.mockito.Matchers.{any, anyLong}
+import org.mockito.ArgumentMatchers.{any, anyLong}
import org.mockito.Mockito.{spy, times, verify}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually._
diff --git
a/core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala
b/core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala
index 29172b4664e32..9c555a923d625 100644
---
a/core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala
+++
b/core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala
@@ -22,7 +22,7 @@ import java.nio.ByteBuffer
import scala.collection.mutable.HashMap
import scala.concurrent.duration._
-import org.mockito.Matchers.{anyInt, anyObject, anyString, eq => meq}
+import org.mockito.ArgumentMatchers.{any, anyInt, anyString, eq => meq}
import org.mockito.Mockito.{atLeast, atMost, never, spy, times, verify, when}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.Eventually
@@ -430,7 +430,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with
LocalSparkContext with B
verify(blacklist, never).updateBlacklistForSuccessfulTaskSet(
stageId = meq(2),
stageAttemptId = anyInt(),
- failuresByExec = anyObject())
+ failuresByExec = any())
}
test("scheduled tasks obey node and executor blacklists") {
@@ -504,7 +504,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with
LocalSparkContext with B
WorkerOffer("executor3", "host1", 2)
)).flatten.size === 0)
assert(tsm.isZombie)
- verify(tsm).abort(anyString(), anyObject())
+ verify(tsm).abort(anyString(), any())
}
test("SPARK-22148 abort timer should kick in when task is completely
blacklisted & no new " +
@@ -1184,7 +1184,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with
LocalSparkContext with B
assert(finalTsm.isZombie)
// no taskset has completed all of its tasks, so no updates to the
blacklist tracker yet
- verify(blacklist, never).updateBlacklistForSuccessfulTaskSet(anyInt(),
anyInt(), anyObject())
+ verify(blacklist, never).updateBlacklistForSuccessfulTaskSet(anyInt(),
anyInt(), any())
// finally, lets complete all the tasks. We simulate failures in attempt
1, but everything
// else succeeds, to make sure we get the right updates to the blacklist
in all cases.
@@ -1202,7 +1202,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with
LocalSparkContext with B
// we update the blacklist for the stage attempts with all successful
tasks. Even though
// some tasksets had failures, we still consider them all successful
from a blacklisting
// perspective, as the failures weren't from a problem w/ the tasks
themselves.
- verify(blacklist).updateBlacklistForSuccessfulTaskSet(meq(0),
meq(stageAttempt), anyObject())
+ verify(blacklist).updateBlacklistForSuccessfulTaskSet(meq(0),
meq(stageAttempt), any())
}
}
diff --git
a/core/src/test/scala/org/apache/spark/scheduler/TaskSetBlacklistSuite.scala
b/core/src/test/scala/org/apache/spark/scheduler/TaskSetBlacklistSuite.scala
index 6e2709dbe1e8b..b3bc76687ce1b 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskSetBlacklistSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskSetBlacklistSuite.scala
@@ -16,7 +16,7 @@
*/
package org.apache.spark.scheduler
-import org.mockito.Matchers.isA
+import org.mockito.ArgumentMatchers.isA
import org.mockito.Mockito.{never, verify}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.mockito.MockitoSugar
diff --git
a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
index f73ff67837c6d..f9dfd2c456c52 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
@@ -22,7 +22,7 @@ import java.util.{Properties, Random}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
-import org.mockito.Matchers.{any, anyInt, anyString}
+import org.mockito.ArgumentMatchers.{any, anyInt, anyString}
import org.mockito.Mockito.{mock, never, spy, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@@ -1319,7 +1319,7 @@ class TaskSetManagerSuite extends SparkFunSuite with
LocalSparkContext with Logg
when(taskSetManagerSpy.addPendingTask(anyInt())).thenAnswer(
new Answer[Unit] {
override def answer(invocationOnMock: InvocationOnMock): Unit = {
- val task = invocationOnMock.getArgumentAt(0, classOf[Int])
+ val task: Int = invocationOnMock.getArgument(0)
assert(taskSetManager.taskSetBlacklistHelperOpt.get.
isExecutorBlacklistedForTask(exec, task))
}
diff --git
a/core/src/test/scala/org/apache/spark/security/CryptoStreamUtilsSuite.scala
b/core/src/test/scala/org/apache/spark/security/CryptoStreamUtilsSuite.scala
index 0d3611c80b8d0..e5d1bf4fde9e4 100644
--- a/core/src/test/scala/org/apache/spark/security/CryptoStreamUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/security/CryptoStreamUtilsSuite.scala
@@ -24,7 +24,7 @@ import java.nio.file.Files
import java.util.{Arrays, Random, UUID}
import com.google.common.io.ByteStreams
-import org.mockito.Matchers.any
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.apache.spark._
diff --git
a/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
b/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
index 4467c3241a947..7f956c26d0ff0 100644
---
a/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
+++
b/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
@@ -25,7 +25,7 @@ import scala.collection.mutable.ArrayBuffer
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Answers.RETURNS_SMART_NULLS
-import org.mockito.Matchers._
+import org.mockito.ArgumentMatchers.{any, anyInt}
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git
a/core/src/test/scala/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
b/core/src/test/scala/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
index 4ce379b76b551..0154d0b6ef6f9 100644
---
a/core/src/test/scala/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
+++
b/core/src/test/scala/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
@@ -21,7 +21,7 @@ import java.io.{DataInputStream, File, FileInputStream,
FileOutputStream}
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Answers.RETURNS_SMART_NULLS
-import org.mockito.Matchers._
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git
a/core/src/test/scala/org/apache/spark/shuffle/sort/SortShuffleManagerSuite.scala
b/core/src/test/scala/org/apache/spark/shuffle/sort/SortShuffleManagerSuite.scala
index f29dac965c803..e5f3aab6a6a1a 100644
---
a/core/src/test/scala/org/apache/spark/shuffle/sort/SortShuffleManagerSuite.scala
+++
b/core/src/test/scala/org/apache/spark/shuffle/sort/SortShuffleManagerSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.shuffle.sort
-import org.mockito.Mockito._
+import org.mockito.Mockito.{mock, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.Matchers
@@ -31,6 +31,8 @@ import org.apache.spark.serializer.{JavaSerializer,
KryoSerializer, Serializer}
*/
class SortShuffleManagerSuite extends SparkFunSuite with Matchers {
+ private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value,
Seq.empty: _*)
+
import SortShuffleManager.canUseSerializedShuffle
private class RuntimeExceptionAnswer extends Answer[Object] {
diff --git
a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
index e866342e4472c..a7bb2a03360aa 100644
--- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
@@ -27,7 +27,7 @@ import scala.language.{implicitConversions, postfixOps}
import scala.reflect.ClassTag
import org.apache.commons.lang3.RandomUtils
-import org.mockito.{Matchers => mc}
+import org.mockito.{ArgumentMatchers => mc}
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest._
import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}
diff --git
a/core/src/test/scala/org/apache/spark/storage/PartiallyUnrolledIteratorSuite.scala
b/core/src/test/scala/org/apache/spark/storage/PartiallyUnrolledIteratorSuite.scala
index cbc903f17ad75..56860b2e55709 100644
---
a/core/src/test/scala/org/apache/spark/storage/PartiallyUnrolledIteratorSuite.scala
+++
b/core/src/test/scala/org/apache/spark/storage/PartiallyUnrolledIteratorSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.storage
-import org.mockito.Matchers
+import org.mockito.ArgumentMatchers.{eq => meq}
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
@@ -45,7 +45,7 @@ class PartiallyUnrolledIteratorSuite extends SparkFunSuite
with MockitoSugar {
joinIterator.hasNext
joinIterator.hasNext
verify(memoryStore, times(1))
- .releaseUnrollMemoryForThisTask(Matchers.eq(ON_HEAP),
Matchers.eq(unrollSize.toLong))
+ .releaseUnrollMemoryForThisTask(meq(ON_HEAP), meq(unrollSize.toLong))
// Secondly, iterate over rest iterator
(unrollSize until unrollSize + restSize).foreach { value =>
diff --git
a/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
b/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
index 01ee9ef0825f8..6b83243fe496c 100644
---
a/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
+++
b/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
@@ -24,8 +24,8 @@ import java.util.concurrent.Semaphore
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
-import org.mockito.Matchers.{any, eq => meq}
-import org.mockito.Mockito._
+import org.mockito.ArgumentMatchers.{any, eq => meq}
+import org.mockito.Mockito.{mock, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.PrivateMethodTester
@@ -40,6 +40,9 @@ import org.apache.spark.util.Utils
class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite with
PrivateMethodTester {
+
+ private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value,
Seq.empty: _*)
+
// Some of the tests are quite tricky because we are testing the cleanup
behavior
// in the presence of faults.
diff --git
a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaDelegationTokenTest.scala
b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaDelegationTokenTest.scala
index 1899c65c721bb..31247ab219082 100644
---
a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaDelegationTokenTest.scala
+++
b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaDelegationTokenTest.scala
@@ -22,7 +22,7 @@ import javax.security.auth.login.{AppConfigurationEntry,
Configuration}
import org.apache.hadoop.security.{Credentials, UserGroupInformation}
import org.apache.hadoop.security.token.Token
-import org.mockito.Mockito.{doReturn, mock}
+import org.mockito.Mockito.mock
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.{SparkConf, SparkEnv, SparkFunSuite}
@@ -35,6 +35,8 @@ import
org.apache.spark.deploy.security.KafkaTokenUtil.KafkaDelegationTokenIdent
trait KafkaDelegationTokenTest extends BeforeAndAfterEach {
self: SparkFunSuite =>
+ private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value,
Seq.empty: _*)
+
protected val tokenId = "tokenId" + ju.UUID.randomUUID().toString
protected val tokenPassword = "tokenPassword" + ju.UUID.randomUUID().toString
diff --git
a/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisCheckpointerSuite.scala
b/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisCheckpointerSuite.scala
index e26f4477d1d7d..bd31b7dc49a64 100644
---
a/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisCheckpointerSuite.scala
+++
b/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisCheckpointerSuite.scala
@@ -24,7 +24,7 @@ import scala.concurrent.duration._
import scala.language.postfixOps
import
com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer
-import org.mockito.Matchers._
+import org.mockito.ArgumentMatchers._
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git
a/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisReceiverSuite.scala
b/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisReceiverSuite.scala
index 2fadda271ea28..7531a9cc400d9 100644
---
a/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisReceiverSuite.scala
+++
b/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisReceiverSuite.scala
@@ -24,9 +24,8 @@ import
com.amazonaws.services.kinesis.clientlibrary.exceptions._
import
com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.ShutdownReason
import com.amazonaws.services.kinesis.model.Record
-import org.mockito.Matchers._
-import org.mockito.Matchers.{eq => meq}
-import org.mockito.Mockito._
+import org.mockito.ArgumentMatchers.{anyListOf, anyString, eq => meq}
+import org.mockito.Mockito.{never, times, verify, when}
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.mockito.MockitoSugar
diff --git
a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitOptionParserSuite.java
b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitOptionParserSuite.java
index 9ff7aceb581f4..4e26cf6c109c8 100644
---
a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitOptionParserSuite.java
+++
b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitOptionParserSuite.java
@@ -23,6 +23,7 @@
import org.junit.Before;
import org.junit.Test;
+import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.*;
public class SparkSubmitOptionParserSuite extends BaseSuite {
@@ -48,14 +49,17 @@ public void testAllOptions() {
}
}
+ int nullCount = 0;
for (String[] switchNames : parser.switches) {
int switchCount = 0;
for (String name : switchNames) {
parser.parse(Arrays.asList(name));
count++;
+ nullCount++;
switchCount++;
verify(parser, times(switchCount)).handle(eq(switchNames[0]),
same(null));
- verify(parser, times(count)).handle(anyString(), any(String.class));
+ verify(parser, times(nullCount)).handle(anyString(), isNull());
+ verify(parser, times(count - nullCount)).handle(anyString(),
any(String.class));
verify(parser,
times(count)).handleExtraArgs(eq(Collections.emptyList()));
}
}
diff --git a/mllib/src/test/scala/org/apache/spark/ml/PipelineSuite.scala
b/mllib/src/test/scala/org/apache/spark/ml/PipelineSuite.scala
index 7848eae931a06..1183cb0617610 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/PipelineSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/PipelineSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.ml
import scala.collection.JavaConverters._
import org.apache.hadoop.fs.Path
-import org.mockito.Matchers.{any, eq => meq}
+import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.when
import org.scalatest.mockito.MockitoSugar.mock
diff --git a/pom.xml b/pom.xml
index a433659cd2002..965f14f68bbec 100644
--- a/pom.xml
+++ b/pom.xml
@@ -764,7 +764,7 @@
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
- <version>1.10.19</version>
+ <version>2.23.4</version>
<scope>test</scope>
</dependency>
<dependency>
diff --git
a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
index ac528ecb829b0..e9ed01ff22338 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
@@ -30,7 +30,7 @@ import scala.io.Source
import scala.language.implicitConversions
import com.google.common.io.Files
-import org.mockito.Matchers.anyString
+import org.mockito.ArgumentMatchers.anyString
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
diff --git
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/PodBuilderSuite.scala
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/PodBuilderSuite.scala
index 7dde0c1377168..707c823d69cf0 100644
---
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/PodBuilderSuite.scala
+++
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/PodBuilderSuite.scala
@@ -21,7 +21,7 @@ import java.io.File
import io.fabric8.kubernetes.api.model.{Config => _, _}
import io.fabric8.kubernetes.client.KubernetesClient
import io.fabric8.kubernetes.client.dsl.{MixedOperation, PodResource}
-import org.mockito.Matchers.any
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, never, verify, when}
import scala.collection.JavaConverters._
diff --git
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KubernetesFeaturesTestUtils.scala
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KubernetesFeaturesTestUtils.scala
index 076b681be2397..95de7d9059540 100644
---
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KubernetesFeaturesTestUtils.scala
+++
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KubernetesFeaturesTestUtils.scala
@@ -20,8 +20,8 @@ import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import io.fabric8.kubernetes.api.model.{Container, HasMetadata, PodBuilder,
SecretBuilder}
-import org.mockito.Matchers
-import org.mockito.Mockito._
+import org.mockito.ArgumentMatchers.any
+import org.mockito.Mockito.{mock, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@@ -37,10 +37,10 @@ object KubernetesFeaturesTestUtils {
when(mockStep.getAdditionalPodSystemProperties())
.thenReturn(Map(stepType -> stepType))
- when(mockStep.configurePod(Matchers.any(classOf[SparkPod])))
+ when(mockStep.configurePod(any(classOf[SparkPod])))
.thenAnswer(new Answer[SparkPod]() {
override def answer(invocation: InvocationOnMock): SparkPod = {
- val originalPod = invocation.getArgumentAt(0, classOf[SparkPod])
+ val originalPod: SparkPod = invocation.getArgument(0)
val configuredPod = new PodBuilder(originalPod.pod)
.editOrNewMetadata()
.addToLabels(stepType, stepType)
diff --git
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
index 1bb926cbca23d..aa421be6e8412 100644
---
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
+++
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
@@ -20,7 +20,7 @@ import io.fabric8.kubernetes.api.model._
import io.fabric8.kubernetes.client.{KubernetesClient, Watch}
import io.fabric8.kubernetes.client.dsl.PodResource
import org.mockito.{ArgumentCaptor, Mock, MockitoAnnotations}
-import org.mockito.Mockito.{doReturn, verify, when}
+import org.mockito.Mockito.{verify, when}
import org.scalatest.BeforeAndAfter
import org.scalatest.mockito.MockitoSugar._
@@ -31,6 +31,8 @@ import org.apache.spark.deploy.k8s.Fabric8Aliases._
class ClientSuite extends SparkFunSuite with BeforeAndAfter {
+ private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value,
Seq.empty: _*)
+
private val DRIVER_POD_UID = "pod-id"
private val DRIVER_POD_API_VERSION = "v1"
private val DRIVER_POD_KIND = "pod"
diff --git
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala
index 278a3821a6f3d..55d9adc212f92 100644
---
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala
+++
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala
@@ -20,7 +20,7 @@ import io.fabric8.kubernetes.api.model.{DoneablePod, Pod,
PodBuilder}
import io.fabric8.kubernetes.client.KubernetesClient
import io.fabric8.kubernetes.client.dsl.PodResource
import org.mockito.{ArgumentMatcher, Matchers, Mock, MockitoAnnotations}
-import org.mockito.Matchers.{any, eq => meq}
+import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{never, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@@ -156,7 +156,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with
BeforeAndAfter {
private def executorPodAnswer(): Answer[SparkPod] = {
new Answer[SparkPod] {
override def answer(invocation: InvocationOnMock): SparkPod = {
- val k8sConf = invocation.getArgumentAt(0,
classOf[KubernetesExecutorConf])
+ val k8sConf: KubernetesExecutorConf = invocation.getArgument(0)
executorPodWithId(k8sConf.executorId.toInt)
}
}
diff --git
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsLifecycleManagerSuite.scala
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsLifecycleManagerSuite.scala
index 7411f8f9d69e9..b20ed4799e325 100644
---
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsLifecycleManagerSuite.scala
+++
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsLifecycleManagerSuite.scala
@@ -21,7 +21,7 @@ import io.fabric8.kubernetes.api.model.{DoneablePod, Pod}
import io.fabric8.kubernetes.client.KubernetesClient
import io.fabric8.kubernetes.client.dsl.PodResource
import org.mockito.{Mock, MockitoAnnotations}
-import org.mockito.Matchers.any
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, never, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@@ -128,7 +128,7 @@ class ExecutorPodsLifecycleManagerSuite extends
SparkFunSuite with BeforeAndAfte
private def namedPodsAnswer(): Answer[PodResource[Pod, DoneablePod]] = {
new Answer[PodResource[Pod, DoneablePod]] {
override def answer(invocation: InvocationOnMock): PodResource[Pod,
DoneablePod] = {
- val podName = invocation.getArgumentAt(0, classOf[String])
+ val podName: String = invocation.getArgument(0)
namedExecutorPods.getOrElseUpdate(
podName, mock(classOf[PodResource[Pod, DoneablePod]]))
}
diff --git
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterSchedulerBackendSuite.scala
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterSchedulerBackendSuite.scala
index 6e182bed459f8..8ed934d91dd7e 100644
---
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterSchedulerBackendSuite.scala
+++
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterSchedulerBackendSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.scheduler.cluster.k8s
import io.fabric8.kubernetes.client.KubernetesClient
import org.jmock.lib.concurrent.DeterministicScheduler
import org.mockito.{ArgumentCaptor, Mock, MockitoAnnotations}
-import org.mockito.Matchers.{eq => mockitoEq}
+import org.mockito.ArgumentMatchers.{eq => mockitoEq}
import org.mockito.Mockito.{never, verify, when}
import org.scalatest.BeforeAndAfter
diff --git
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
index 082d4bcfdf83a..7adac1964e010 100644
---
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
+++
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
@@ -24,7 +24,8 @@ import scala.collection.JavaConverters._
import org.apache.mesos.Protos.{TaskState => MesosTaskState, _}
import org.apache.mesos.Protos.Value.{Scalar, Type}
import org.apache.mesos.SchedulerDriver
-import org.mockito.{ArgumentCaptor, Matchers}
+import org.mockito.ArgumentCaptor
+import org.mockito.ArgumentMatchers.{eq => meq}
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
@@ -133,7 +134,7 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with
LocalSparkContext wi
when(
driver.launchTasks(
- Matchers.eq(Collections.singleton(offer.getId)),
+ meq(Collections.singleton(offer.getId)),
capture.capture())
).thenReturn(Status.valueOf(1))
@@ -156,7 +157,7 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with
LocalSparkContext wi
assert(mem.exists(_.getRole() == "*"))
verify(driver, times(1)).launchTasks(
- Matchers.eq(Collections.singleton(offer.getId)),
+ meq(Collections.singleton(offer.getId)),
capture.capture()
)
}
diff --git
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackendSuite.scala
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackendSuite.scala
index da33d85d8fb2e..0cfaa0a0c9a60 100644
---
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackendSuite.scala
+++
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackendSuite.scala
@@ -24,9 +24,8 @@ import scala.concurrent.duration._
import org.apache.mesos.{Protos, Scheduler, SchedulerDriver}
import org.apache.mesos.Protos._
-import org.mockito.Matchers
-import org.mockito.Matchers._
-import org.mockito.Mockito._
+import org.mockito.ArgumentMatchers.{any, anyInt, anyLong, anyString, eq =>
meq}
+import org.mockito.Mockito.{times, verify, when}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.mockito.MockitoSugar
@@ -697,9 +696,9 @@ class MesosCoarseGrainedSchedulerBackendSuite extends
SparkFunSuite
offerId: OfferID,
filter: Boolean = false): Unit = {
if (filter) {
- verify(driver, times(1)).declineOffer(Matchers.eq(offerId),
anyObject[Filters])
+ verify(driver, times(1)).declineOffer(meq(offerId), any[Filters]())
} else {
- verify(driver, times(1)).declineOffer(Matchers.eq(offerId))
+ verify(driver, times(1)).declineOffer(meq(offerId))
}
}
diff --git
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala
index 1ead4b1ed7c7e..c9b7e6c439c4b 100644
---
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala
+++
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala
@@ -30,8 +30,8 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.mesos.{Protos, Scheduler, SchedulerDriver}
import org.apache.mesos.Protos._
import org.apache.mesos.Protos.Value.Scalar
-import org.mockito.{ArgumentCaptor, Matchers}
-import org.mockito.Matchers._
+import org.mockito.ArgumentCaptor
+import org.mockito.ArgumentMatchers.{any, anyLong, eq => meq}
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
@@ -264,7 +264,7 @@ class MesosFineGrainedSchedulerBackendSuite
val capture = ArgumentCaptor.forClass(classOf[Collection[TaskInfo]])
when(
driver.launchTasks(
- Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
+ meq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
@@ -275,7 +275,7 @@ class MesosFineGrainedSchedulerBackendSuite
backend.resourceOffers(driver, mesosOffers)
verify(driver, times(1)).launchTasks(
- Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
+ meq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
@@ -373,7 +373,7 @@ class MesosFineGrainedSchedulerBackendSuite
val capture = ArgumentCaptor.forClass(classOf[Collection[TaskInfo]])
when(
driver.launchTasks(
- Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
+ meq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
@@ -382,7 +382,7 @@ class MesosFineGrainedSchedulerBackendSuite
backend.resourceOffers(driver, mesosOffers)
verify(driver, times(1)).launchTasks(
- Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
+ meq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
diff --git
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/Utils.scala
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/Utils.scala
index c9f47471cd75e..65e595e3cf2bf 100644
---
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/Utils.scala
+++
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/Utils.scala
@@ -25,8 +25,9 @@ import org.apache.mesos.Protos._
import org.apache.mesos.Protos.Value.{Range => MesosRange, Ranges, Scalar}
import org.apache.mesos.SchedulerDriver
import org.apache.mesos.protobuf.ByteString
-import org.mockito.{ArgumentCaptor, Matchers}
-import org.mockito.Mockito._
+import org.mockito.ArgumentCaptor
+import org.mockito.ArgumentMatchers.{any, eq => meq}
+import org.mockito.Mockito.{times, verify}
import org.apache.spark.deploy.mesos.config.MesosSecretConfig
@@ -84,15 +85,15 @@ object Utils {
def verifyTaskLaunched(driver: SchedulerDriver, offerId: String):
List[TaskInfo] = {
val captor =
ArgumentCaptor.forClass(classOf[java.util.Collection[TaskInfo]])
verify(driver, times(1)).launchTasks(
- Matchers.eq(Collections.singleton(createOfferId(offerId))),
+ meq(Collections.singleton(createOfferId(offerId))),
captor.capture())
captor.getValue.asScala.toList
}
def verifyTaskNotLaunched(driver: SchedulerDriver, offerId: String): Unit = {
verify(driver, times(0)).launchTasks(
- Matchers.eq(Collections.singleton(createOfferId(offerId))),
- Matchers.any(classOf[java.util.Collection[TaskInfo]]))
+ meq(Collections.singleton(createOfferId(offerId))),
+ any(classOf[java.util.Collection[TaskInfo]]))
}
def createOfferId(offerId: String): OfferID = {
diff --git
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
index a6f57fcdb2461..9acd99546c036 100644
---
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
+++
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
@@ -34,8 +34,8 @@ import org.apache.hadoop.yarn.api.records._
import org.apache.hadoop.yarn.client.api.YarnClientApplication
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.apache.hadoop.yarn.util.Records
-import org.mockito.Matchers.{eq => meq, _}
-import org.mockito.Mockito._
+import org.mockito.ArgumentMatchers.{any, anyBoolean, anyShort, eq => meq}
+import org.mockito.Mockito.{spy, verify}
import org.scalatest.Matchers
import org.apache.spark.{SparkConf, SparkFunSuite, TestUtils}
@@ -43,6 +43,7 @@ import org.apache.spark.deploy.yarn.config._
import org.apache.spark.util.{SparkConfWithEnv, Utils}
class ClientSuite extends SparkFunSuite with Matchers {
+ private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value,
Seq.empty: _*)
import Client._
diff --git
a/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala
b/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala
index 952fd0b70bb7b..f538cbc5b7657 100644
---
a/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala
+++
b/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.network.yarn
import scala.collection.JavaConverters._
import org.apache.hadoop.metrics2.MetricsRecordBuilder
-import org.mockito.Matchers._
+import org.mockito.ArgumentMatchers.{any, anyDouble, anyInt, anyLong}
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.Matchers
@@ -56,8 +56,8 @@ class YarnShuffleServiceMetricsSuite extends SparkFunSuite
with Matchers {
YarnShuffleServiceMetrics.collectMetric(builder, testname,
metrics.getMetrics.get(testname))
- verify(builder).addCounter(anyObject(), anyLong())
- verify(builder, times(4)).addGauge(anyObject(), anyDouble())
+ verify(builder).addCounter(any(), anyLong())
+ verify(builder, times(4)).addGauge(any(), anyDouble())
}
}
@@ -69,6 +69,6 @@ class YarnShuffleServiceMetricsSuite extends SparkFunSuite
with Matchers {
metrics.getMetrics.get("registeredExecutorsSize"))
// only one
- verify(builder).addGauge(anyObject(), anyInt())
+ verify(builder).addGauge(any(), anyInt())
}
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala
index 3c973d8ebc704..e644c16ddfeab 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala
@@ -17,9 +17,9 @@
package org.apache.spark.sql.streaming.continuous
+import org.mockito.ArgumentMatchers.{any, eq => eqTo}
import org.mockito.InOrder
-import org.mockito.Matchers.{any, eq => eqTo}
-import org.mockito.Mockito._
+import org.mockito.Mockito.{inOrder, never, verify}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.mockito.MockitoSugar
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
index 8212fb912ec57..4d3a54a048e8e 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
@@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit
import scala.concurrent.duration._
import org.apache.hadoop.fs.Path
-import org.mockito.Matchers.{any, eq => meq}
+import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito._
import org.scalatest.BeforeAndAfter
diff --git
a/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala
b/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala
index fd7e00b1de25f..bdaef94949159 100644
---
a/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala
+++
b/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala
@@ -26,7 +26,7 @@ import scala.language.{implicitConversions, postfixOps}
import scala.util.Random
import org.apache.hadoop.conf.Configuration
-import org.mockito.Matchers.any
+import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{doThrow, reset, spy}
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._
diff --git
a/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ExecutorAllocationManagerSuite.scala
b/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ExecutorAllocationManagerSuite.scala
index 8d81b582e4d30..7ec02c4782e42 100644
---
a/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ExecutorAllocationManagerSuite.scala
+++
b/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ExecutorAllocationManagerSuite.scala
@@ -17,8 +17,8 @@
package org.apache.spark.streaming.scheduler
-import org.mockito.Matchers.{eq => meq}
-import org.mockito.Mockito._
+import org.mockito.ArgumentMatchers.{eq => meq}
+import org.mockito.Mockito.{never, reset, times, verify, when}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, PrivateMethodTester}
import org.scalatest.concurrent.Eventually.{eventually, timeout}
import org.scalatest.mockito.MockitoSugar
diff --git
a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
index 4a2549fc0a96d..c20380d8490df 100644
---
a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
+++
b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala
@@ -31,8 +31,8 @@ import scala.language.{implicitConversions, postfixOps}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.mockito.ArgumentCaptor
-import org.mockito.Matchers.{eq => meq, _}
-import org.mockito.Mockito._
+import org.mockito.ArgumentMatchers.{any, anyLong, eq => meq}
+import org.mockito.Mockito.{times, verify, when}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterEach, PrivateMethodTester}
import org.scalatest.concurrent.Eventually
import org.scalatest.concurrent.Eventually._
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]