divijvaidya commented on code in PR #12459:
URL: https://github.com/apache/kafka/pull/12459#discussion_r935547915


##########
streams/src/test/java/org/apache/kafka/streams/state/internals/metrics/RocksDBMetricsRecorderTest.java:
##########
@@ -36,22 +38,23 @@
 import org.rocksdb.StatsLevel;
 import org.rocksdb.TickerType;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.mock;
-import static org.easymock.EasyMock.niceMock;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertThrows;
-import static org.powermock.api.easymock.PowerMock.reset;
-import static org.powermock.api.easymock.PowerMock.createMock;
-import static org.powermock.api.easymock.PowerMock.mockStatic;
-import static org.powermock.api.easymock.PowerMock.replay;
-import static org.powermock.api.easymock.PowerMock.verify;
-
-@RunWith(PowerMockRunner.class)
-@PrepareForTest({RocksDBMetrics.class, Sensor.class})
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.ArgumentMatchers.isA;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoInteractions;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
+
+@RunWith(MockitoJUnitRunner.class)

Review Comment:
   You were not completely wrong. I have found the following idiosyncrasies:
   1. `StrictStubs` does not verify invocation of mocks for `mockstatic` cases. 
You need an explicit `verify` call to do that.
   2. When you run a test `StrictStubs` does not perform verification but when 
you run all together, it does ensure that all stubs are being invoked.
   
   I think it's safer to keep `StrictStubs` for now, so let's keep the change 
as you suggested.



##########
streams/src/test/java/org/apache/kafka/streams/state/internals/metrics/RocksDBMetricsRecorderTest.java:
##########
@@ -70,105 +73,133 @@ public class RocksDBMetricsRecorderTest {
     private final Statistics statisticsToAdd2 = mock(Statistics.class);
     private final Statistics statisticsToAdd3 = mock(Statistics.class);
 
-    private final Sensor bytesWrittenToDatabaseSensor = 
createMock(Sensor.class);
-    private final Sensor bytesReadFromDatabaseSensor = 
createMock(Sensor.class);
-    private final Sensor memtableBytesFlushedSensor = createMock(Sensor.class);
-    private final Sensor memtableHitRatioSensor = createMock(Sensor.class);
-    private final Sensor memtableAvgFlushTimeSensor = createMock(Sensor.class);
-    private final Sensor memtableMinFlushTimeSensor = createMock(Sensor.class);
-    private final Sensor memtableMaxFlushTimeSensor = createMock(Sensor.class);
-    private final Sensor writeStallDurationSensor = createMock(Sensor.class);
-    private final Sensor blockCacheDataHitRatioSensor = 
createMock(Sensor.class);
-    private final Sensor blockCacheIndexHitRatioSensor = 
createMock(Sensor.class);
-    private final Sensor blockCacheFilterHitRatioSensor = 
createMock(Sensor.class);
-    private final Sensor bytesReadDuringCompactionSensor = 
createMock(Sensor.class);
-    private final Sensor bytesWrittenDuringCompactionSensor = 
createMock(Sensor.class);
-    private final Sensor compactionTimeAvgSensor = createMock(Sensor.class);
-    private final Sensor compactionTimeMinSensor = createMock(Sensor.class);
-    private final Sensor compactionTimeMaxSensor = createMock(Sensor.class);
-    private final Sensor numberOfOpenFilesSensor = createMock(Sensor.class);
-    private final Sensor numberOfFileErrorsSensor = createMock(Sensor.class);
-
-    private final StreamsMetricsImpl streamsMetrics = 
niceMock(StreamsMetricsImpl.class);
+    private final Sensor bytesWrittenToDatabaseSensor = mock(Sensor.class);
+    private final Sensor bytesReadFromDatabaseSensor = mock(Sensor.class);
+    private final Sensor memtableBytesFlushedSensor = mock(Sensor.class);
+    private final Sensor memtableHitRatioSensor = mock(Sensor.class);
+    private final Sensor memtableAvgFlushTimeSensor = mock(Sensor.class);
+    private final Sensor memtableMinFlushTimeSensor = mock(Sensor.class);
+    private final Sensor memtableMaxFlushTimeSensor = mock(Sensor.class);
+    private final Sensor writeStallDurationSensor = mock(Sensor.class);
+    private final Sensor blockCacheDataHitRatioSensor = mock(Sensor.class);
+    private final Sensor blockCacheIndexHitRatioSensor = mock(Sensor.class);
+    private final Sensor blockCacheFilterHitRatioSensor = mock(Sensor.class);
+    private final Sensor bytesReadDuringCompactionSensor = mock(Sensor.class);
+    private final Sensor bytesWrittenDuringCompactionSensor = 
mock(Sensor.class);
+    private final Sensor compactionTimeAvgSensor = mock(Sensor.class);
+    private final Sensor compactionTimeMinSensor = mock(Sensor.class);
+    private final Sensor compactionTimeMaxSensor = mock(Sensor.class);
+    private final Sensor numberOfOpenFilesSensor = mock(Sensor.class);
+    private final Sensor numberOfFileErrorsSensor = mock(Sensor.class);
+
+    private final StreamsMetricsImpl streamsMetrics = 
mock(StreamsMetricsImpl.class);
     private final RocksDBMetricsRecordingTrigger recordingTrigger = 
mock(RocksDBMetricsRecordingTrigger.class);
 
     private final RocksDBMetricsRecorder recorder = new 
RocksDBMetricsRecorder(METRICS_SCOPE, STORE_NAME);
 
+    private MockedStatic<RocksDBMetrics> dbMetrics;
+
     @Before
     public void setUp() {
-        setUpMetricsStubMock();
-        
expect(streamsMetrics.rocksDBMetricsRecordingTrigger()).andStubReturn(recordingTrigger);
-        replay(streamsMetrics);
+        setUpMetricsMock();
+        
when(streamsMetrics.rocksDBMetricsRecordingTrigger()).thenReturn(recordingTrigger);
         recorder.init(streamsMetrics, TASK_ID1);
     }
 
+    @After
+    public void cleanUpMocks() {
+        dbMetrics.close();
+    }
+
     @Test
     public void shouldInitMetricsRecorder() {
-        setUpMetricsMock();
-
-        recorder.init(streamsMetrics, TASK_ID1);
-
-        verify(RocksDBMetrics.class);
+        dbMetrics.verify(() -> 
RocksDBMetrics.bytesWrittenToDatabaseSensor(any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.bytesReadFromDatabaseSensor(any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.memtableBytesFlushedSensor(any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.memtableHitRatioSensor(any(), 
any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.memtableAvgFlushTimeSensor(any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.memtableMinFlushTimeSensor(any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.memtableMaxFlushTimeSensor(any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.writeStallDurationSensor(any(), 
any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.blockCacheDataHitRatioSensor(any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.blockCacheIndexHitRatioSensor(any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.blockCacheFilterHitRatioSensor(any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.bytesWrittenDuringCompactionSensor(any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.bytesReadDuringCompactionSensor(any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.compactionTimeAvgSensor(any(), 
any()));
+        dbMetrics.verify(() -> RocksDBMetrics.compactionTimeMinSensor(any(), 
any()));
+        dbMetrics.verify(() -> RocksDBMetrics.compactionTimeMaxSensor(any(), 
any()));
+        dbMetrics.verify(() -> RocksDBMetrics.numberOfOpenFilesSensor(any(), 
any()));
+        dbMetrics.verify(() -> RocksDBMetrics.numberOfFileErrorsSensor(any(), 
any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addNumImmutableMemTableMetric(any(), any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.addCurSizeActiveMemTable(any(), 
any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.addCurSizeAllMemTables(any(), 
any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.addSizeAllMemTables(any(), 
any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addNumEntriesActiveMemTableMetric(any(), any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addNumEntriesImmMemTablesMetric(any(), any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addNumDeletesActiveMemTableMetric(any(), any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addNumDeletesImmMemTablesMetric(any(), any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.addMemTableFlushPending(any(), 
any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addNumRunningFlushesMetric(any(), any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addCompactionPendingMetric(any(), any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addNumRunningCompactionsMetric(any(), any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addEstimatePendingCompactionBytesMetric(any(), any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addTotalSstFilesSizeMetric(any(), any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.addLiveSstFilesSizeMetric(any(), 
any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.addNumLiveVersionMetric(any(), 
any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addBlockCacheCapacityMetric(any(), any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.addBlockCacheUsageMetric(any(), 
any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addBlockCachePinnedUsageMetric(any(), any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.addEstimateNumKeysMetric(any(), 
any(), any()));
+        dbMetrics.verify(() -> 
RocksDBMetrics.addEstimateTableReadersMemMetric(any(), any(), any()));
+        dbMetrics.verify(() -> RocksDBMetrics.addBackgroundErrorsMetric(any(), 
any(), any()));
         assertThat(recorder.taskId(), is(TASK_ID1));
     }
 
     @Test
     public void shouldThrowIfMetricRecorderIsReInitialisedWithDifferentTask() {
-        setUpMetricsStubMock();
-        recorder.init(streamsMetrics, TASK_ID1);
-
         assertThrows(
-            IllegalStateException.class,
-            () -> recorder.init(streamsMetrics, TASK_ID2)
+                IllegalStateException.class,
+                () -> recorder.init(streamsMetrics, TASK_ID2)
         );
     }
 
     @Test
     public void 
shouldThrowIfMetricRecorderIsReInitialisedWithDifferentStreamsMetrics() {
-        setUpMetricsStubMock();
-        recorder.init(streamsMetrics, TASK_ID1);
-
         assertThrows(
-            IllegalStateException.class,
-            () -> recorder.init(
-                new StreamsMetricsImpl(new Metrics(), "test-client", 
StreamsConfig.METRICS_LATEST, new MockTime()),
-                TASK_ID1
-            )
+                IllegalStateException.class,
+                () -> recorder.init(
+                        new StreamsMetricsImpl(new Metrics(), "test-client", 
StreamsConfig.METRICS_LATEST, new MockTime()),
+                        TASK_ID1
+                )
         );
     }
 
     @Test
     public void 
shouldSetStatsLevelToExceptDetailedTimersWhenValueProvidersWithStatisticsAreAdded()
 {
-        statisticsToAdd1.setStatsLevel(StatsLevel.EXCEPT_DETAILED_TIMERS);
-        replay(statisticsToAdd1);
-
         recorder.addValueProviders(SEGMENT_STORE_NAME_1, dbToAdd1, 
cacheToAdd1, statisticsToAdd1);
-
-        verify(statisticsToAdd1);
+        
verify(statisticsToAdd1).setStatsLevel(eq(StatsLevel.EXCEPT_DETAILED_TIMERS));

Review Comment:
   Thanks for catching. Fixed.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: jira-unsubscr...@kafka.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to