This is an automated email from the ASF dual-hosted git repository.

davidarthur pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/kafka.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 4fadec527da MINOR: Quarantine the worst flaky tests (#17639)
4fadec527da is described below

commit 4fadec527daedb035c893ddf2c5a178572f78a4f
Author: David Arthur <[email protected]>
AuthorDate: Wed Oct 30 17:02:04 2024 -0400

    MINOR: Quarantine the worst flaky tests (#17639)
    
    Using the last 7 days of data on Oct 30 2024, this patch marks all flaky 
tests with more than 10% flakiness on trunk.
    
    Reviewers: Chia-Ping Tsai <[email protected]>
---
 .../apache/kafka/connect/integration/OffsetsApiIntegrationTest.java   | 4 ++++
 core/src/test/java/kafka/log/remote/RemoteLogManagerTest.java         | 2 ++
 core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala            | 2 +-
 3 files changed, 7 insertions(+), 1 deletion(-)

diff --git 
a/connect/runtime/src/test/java/org/apache/kafka/connect/integration/OffsetsApiIntegrationTest.java
 
b/connect/runtime/src/test/java/org/apache/kafka/connect/integration/OffsetsApiIntegrationTest.java
index bd339b9a754..78c9a614065 100644
--- 
a/connect/runtime/src/test/java/org/apache/kafka/connect/integration/OffsetsApiIntegrationTest.java
+++ 
b/connect/runtime/src/test/java/org/apache/kafka/connect/integration/OffsetsApiIntegrationTest.java
@@ -190,6 +190,7 @@ public class OffsetsApiIntegrationTest {
         }
     }
 
+    @Flaky("KAFKA-14956")
     @Test
     public void testGetSinkConnectorOffsetsDifferentKafkaClusterTargeted() 
throws Exception {
         EmbeddedKafkaCluster kafkaCluster = new EmbeddedKafkaCluster(1, new 
Properties());
@@ -348,6 +349,7 @@ public class OffsetsApiIntegrationTest {
         }
     }
 
+    @Flaky("KAFKA-16492")
     @Test
     public void testAlterSinkConnectorOffsetsDifferentKafkaClusterTargeted() 
throws Exception {
         EmbeddedKafkaCluster kafkaCluster = new EmbeddedKafkaCluster(1, new 
Properties());
@@ -704,11 +706,13 @@ public class OffsetsApiIntegrationTest {
         }
     }
 
+    @Flaky("KAFKA-15918")
     @Test
     public void testResetSinkConnectorOffsets() throws Exception {
         resetAndVerifySinkConnectorOffsets(baseSinkConnectorConfigs(), 
connect.kafka());
     }
 
+    @Flaky("KAFKA-15891")
     @Test
     public void testResetSinkConnectorOffsetsOverriddenConsumerGroupId() 
throws Exception {
         Map<String, String> connectorConfigs = baseSinkConnectorConfigs();
diff --git a/core/src/test/java/kafka/log/remote/RemoteLogManagerTest.java 
b/core/src/test/java/kafka/log/remote/RemoteLogManagerTest.java
index d9ff616b15b..4ea373327d9 100644
--- a/core/src/test/java/kafka/log/remote/RemoteLogManagerTest.java
+++ b/core/src/test/java/kafka/log/remote/RemoteLogManagerTest.java
@@ -37,6 +37,7 @@ import org.apache.kafka.common.record.RemoteLogInputStream;
 import org.apache.kafka.common.record.SimpleRecord;
 import org.apache.kafka.common.requests.FetchRequest;
 import org.apache.kafka.common.security.auth.SecurityProtocol;
+import org.apache.kafka.common.test.api.Flaky;
 import org.apache.kafka.common.utils.MockTime;
 import org.apache.kafka.common.utils.Time;
 import org.apache.kafka.server.common.OffsetAndEpoch;
@@ -1587,6 +1588,7 @@ public class RemoteLogManagerTest {
         
remoteLogManager.onLeadershipChange(Collections.singleton(mockPartition(leaderTopicIdPartition)),
 Collections.emptySet(), topicIds);
     }
 
+    @Flaky("KAFKA-17779")
     @Test
     void 
testFetchOffsetByTimestampWithTieredStorageDoesNotFetchIndexWhenExistsLocally() 
throws Exception {
         TopicPartition tp = new TopicPartition("sample", 0);
diff --git a/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala 
b/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala
index 2ddbfa0c23c..96b6373a359 100644
--- a/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala
+++ b/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala
@@ -324,7 +324,7 @@ class DeleteTopicTest extends QuorumTestHarness {
   }
 
   @ParameterizedTest
-  @ValueSource(strings = Array("zk", "kraft"))
+  @ValueSource(strings = Array("kraft"))
   def testAddPartitionDuringDeleteTopic(quorum: String): Unit = {
     brokers = createTestTopicAndCluster(topic)
     // partitions to be added to the topic later

Reply via email to