mjsax commented on code in PR #21666:
URL: https://github.com/apache/kafka/pull/21666#discussion_r2902562855
##########
streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/HeadersStoreUpgradeIntegrationTest.java:
##########
@@ -233,6 +234,128 @@ public void
shouldProxyTimestampedKeyValueStoreToTimestampedKeyValueStoreWithHea
kafkaStreams.close();
}
+ @Test
+ public void
shouldMigrateInMemoryPlainKeyValueStoreToTimestampedKeyValueStoreWithHeadersUsingPapi()
throws Exception {
+
shouldMigratePlainKeyValueStoreToTimestampedKeyValueStoreWithHeadersUsingPapi(false);
+ }
+
+ @Test
+ public void
shouldMigratePersistentPlainKeyValueStoreToTimestampedKeyValueStoreWithHeadersUsingPapi()
throws Exception {
+
shouldMigratePlainKeyValueStoreToTimestampedKeyValueStoreWithHeadersUsingPapi(true);
+ }
+
+ private void
shouldMigratePlainKeyValueStoreToTimestampedKeyValueStoreWithHeadersUsingPapi(final
boolean persistentStore) throws Exception {
+ final StreamsBuilder streamsBuilderForOldStore = new StreamsBuilder();
+
+ streamsBuilderForOldStore.addStateStore(
+ Stores.keyValueStoreBuilder(
+ persistentStore ?
Stores.persistentKeyValueStore(STORE_NAME) :
Stores.inMemoryKeyValueStore(STORE_NAME),
+ Serdes.String(),
+ Serdes.String()))
+ .stream(inputStream, Consumed.with(Serdes.String(),
Serdes.String()))
+ .process(KeyValueProcessor::new, STORE_NAME);
+
+ final Properties props = props();
+ kafkaStreams = new KafkaStreams(streamsBuilderForOldStore.build(),
props);
+ kafkaStreams.start();
+
+ processKeyValueAndVerifyValue("key1", "value1");
+ processKeyValueAndVerifyValue("key2", "value2");
+ processKeyValueAndVerifyValue("key3", "value3");
+
+ kafkaStreams.close();
+ kafkaStreams = null;
+
+ final StreamsBuilder streamsBuilderForNewStore = new StreamsBuilder();
+
+ streamsBuilderForNewStore.addStateStore(
+ Stores.timestampedKeyValueStoreBuilderWithHeaders(
+ persistentStore ?
Stores.persistentTimestampedKeyValueStoreWithHeaders(STORE_NAME) :
Stores.inMemoryKeyValueStore(STORE_NAME),
+ Serdes.String(),
+ Serdes.String()))
+ .stream(inputStream, Consumed.with(Serdes.String(),
Serdes.String()))
+ .process(TimestampedKeyValueWithHeadersProcessor::new, STORE_NAME);
+
+ kafkaStreams = new KafkaStreams(streamsBuilderForNewStore.build(),
props);
+ kafkaStreams.start();
+
+ if (persistentStore) {
+ // Verify legacy data can be read with empty headers and timestamp
= -1
+ verifyLegacyValuesWithEmptyHeaders("key1", "value1", -1);
+ verifyLegacyValuesWithEmptyHeaders("key2", "value2", -1);
+ verifyLegacyValuesWithEmptyHeaders("key3", "value3", -1);
+ } else {
+ // Verify legacy data can be read with empty headers.
+ // When data is read from the changelog, the timestamp is set to
record.timestamp.
+ verifyLegacyValuesWithEmptyHeaders("key1", "value1");
Review Comment:
We could still verify which ts was used on-write, using
`CLUSTER.time.milliseconds()` -- compare
`TimestampedStoreUpgradeIntegrationTest#shouldMigrateKeyValueStoreToTimestampedKeyValueStoreUsingPapi`
for details how to do this.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]