This is an automated email from the ASF dual-hosted git repository.
fmariani pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel-spring-boot.git
The following commit(s) were added to refs/heads/main by this push:
new c28fc000eb9 Fix flaky Kafka test
c28fc000eb9 is described below
commit c28fc000eb912abceffb5462190bdd44efa537d9
Author: Croway <[email protected]>
AuthorDate: Wed Jul 16 18:01:47 2025 +0200
Fix flaky Kafka test
---
.../component/kafka/integration/KafkaProducerFullIT.java | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git
a/components-starter/camel-kafka-starter/src/test/java/org/apache/camel/component/kafka/integration/KafkaProducerFullIT.java
b/components-starter/camel-kafka-starter/src/test/java/org/apache/camel/component/kafka/integration/KafkaProducerFullIT.java
index 8087d0c73f9..5a369f29e7d 100644
---
a/components-starter/camel-kafka-starter/src/test/java/org/apache/camel/component/kafka/integration/KafkaProducerFullIT.java
+++
b/components-starter/camel-kafka-starter/src/test/java/org/apache/camel/component/kafka/integration/KafkaProducerFullIT.java
@@ -82,24 +82,24 @@ public class KafkaProducerFullIT extends
BaseEmbeddedKafkaTestSupport {
private static KafkaConsumer<String, String> stringsConsumerConn;
private static KafkaConsumer<byte[], byte[]> bytesConsumerConn;
- private final String toStrings = "kafka:" + TOPIC_STRINGS +
"?requestRequiredAcks=-1";
+ private final String toStrings = "kafka:" + TOPIC_STRINGS +
"?requestRequiredAcks=-1&recordMetadata=true";
- private final String toStrings2 = "kafka:" + TOPIC_STRINGS +
"?requestRequiredAcks=-1&partitionKey=0";
+ private final String toStrings2 = "kafka:" + TOPIC_STRINGS +
"?requestRequiredAcks=-1&partitionKey=0&recordMetadata=true";
private final String toStringsWithInterceptor = "kafka:" +
TOPIC_INTERCEPTED + "?requestRequiredAcks=-1"
- +
"&interceptorClasses=org.apache.camel.component.kafka.integration.MockProducerInterceptor";
+ +
"&interceptorClasses=org.apache.camel.component.kafka.integration.MockProducerInterceptor&recordMetadata=true";
@EndpointInject("mock:kafkaAck")
private MockEndpoint mockEndpoint;
private final String toBytes = "kafka:" + TOPIC_BYTES +
"?requestRequiredAcks=-1"
+
"&valueSerializer=org.apache.kafka.common.serialization.ByteArraySerializer&"
- +
"keySerializer=org.apache.kafka.common.serialization.ByteArraySerializer";
+ +
"keySerializer=org.apache.kafka.common.serialization.ByteArraySerializer&recordMetadata=true";
- private final String toPropagatedHeaders = "kafka:" +
TOPIC_PROPAGATED_HEADERS + "?requestRequiredAcks=-1";
+ private final String toPropagatedHeaders = "kafka:" +
TOPIC_PROPAGATED_HEADERS + "?requestRequiredAcks=-1&recordMetadata=true";
private final String toNoRecordSpecificHeaders = "kafka:" +
TOPIC_NO_RECORD_SPECIFIC_HEADERS
- + "?requestRequiredAcks=-1";
+ + "?requestRequiredAcks=-1&recordMetadata=true";
@Produce("direct:startStrings")
private ProducerTemplate stringsTemplate;