philipnee commented on code in PR #13477:
URL: https://github.com/apache/kafka/pull/13477#discussion_r1167093584


##########
streams/src/test/java/org/apache/kafka/streams/processor/internals/RecordCollectorTest.java:
##########
@@ -1391,21 +1390,57 @@ public void shouldNotCloseInternalProducerForNonEOS() {
     }
 
     @Test
-    public void 
testCollectorFlush_ThrowsStreamsExceptionUsingDefaultExceptionHandler() {
-        final ErrorStringSerializer errorSerializer = new 
ErrorStringSerializer();
-        final RecordCollector collector = newRecordCollector(new 
DefaultProductionExceptionHandler());
-        collector.initialize();
-        collector.send(topic, "key", "val", null, 0, null, stringSerializer, 
errorSerializer, sinkNodeName, context);
-        assertThrows(StreamsException.class, collector::flush);
+    public void shouldThrowStreamsExceptionUsingDefaultExceptionHandler() {
+        try (final ErrorStringSerializer errorSerializer = new 
ErrorStringSerializer()) {
+            final RecordCollector collector = newRecordCollector(new 
DefaultProductionExceptionHandler());
+            collector.initialize();
+
+            final StreamsException error = assertThrows(
+                StreamsException.class,
+                () -> collector.send(topic, "key", "val", null, 0, null, 
stringSerializer, errorSerializer, sinkNodeName, context)
+            );
+
+            assertThat(error.getCause(), 
instanceOf(SerializationException.class));
+        }
     }
 
     @Test
-    public void 
testCollectorFlush_DoesNotThrowStreamsExceptionUsingAlwaysContinueExceptionHandler()
 {
-        final ErrorStringSerializer errorSerializer = new 
ErrorStringSerializer();
-        final RecordCollector collector = newRecordCollector(new 
AlwaysContinueProductionExceptionHandler());
-        collector.initialize();
-        collector.send(topic, "key", "val", null, 0, null, errorSerializer, 
stringSerializer, sinkNodeName, context);
-        assertDoesNotThrow(collector::flush);
+    public void shouldDropRecordExceptionUsingAlwaysContinueExceptionHandler() 
{
+        try (final ErrorStringSerializer errorSerializer = new 
ErrorStringSerializer()) {
+            final RecordCollector collector = newRecordCollector(new 
AlwaysContinueProductionExceptionHandler());
+            collector.initialize();
+
+            collector.send(topic, "key", "val", null, 0, null, 
errorSerializer, stringSerializer, sinkNodeName, context);
+
+            assertThat(mockProducer.history().isEmpty(), equalTo(true));
+            assertThat(
+                streamsMetrics.metrics().get(new MetricName(
+                    "dropped-records-total",
+                    "stream-task-metrics",
+                    "The total number of dropped records",
+                    mkMap(
+                        mkEntry("thread-id", Thread.currentThread().getName()),
+                        mkEntry("task-id", taskId.toString())
+                    ))).metricValue(),
+                equalTo(1.0)
+            );
+        }
+    }
+
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    @Test
+    public void shouldNotCallProductionExceptionHandlerOnClassCastException() {
+        try (final ErrorStringSerializer errorSerializer = new 
ErrorStringSerializer()) {
+            final RecordCollector collector = newRecordCollector(new 
AlwaysContinueProductionExceptionHandler());
+            collector.initialize();
+

Review Comment:
   Here's a minor PR: https://github.com/apache/kafka/pull/13576
   
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: jira-unsubscr...@kafka.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to