[
https://issues.apache.org/jira/browse/BEAM-6751?focusedWorklogId=208920&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-208920
]
ASF GitHub Bot logged work on BEAM-6751:
----------------------------------------
Author: ASF GitHub Bot
Created on: 06/Mar/19 16:10
Start Date: 06/Mar/19 16:10
Worklog Time Spent: 10m
Work Description: mxm commented on pull request #7991: [BEAM-6751] Add
KafkaIO EOS support to Flink via @RequiresStableInput
URL: https://github.com/apache/beam/pull/7991#discussion_r263014401
##########
File path:
runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperatorTest.java
##########
@@ -1362,6 +1367,291 @@ public void finishBundle(FinishBundleContext context) {
testHarness.close();
}
+ @Test
+ public void testExactlyOnceBuffering() throws Exception {
+ FlinkPipelineOptions options =
PipelineOptionsFactory.as(FlinkPipelineOptions.class);
+ options.setMaxBundleSize(2L);
+ options.setCheckpointingInterval(1L);
+
+ TupleTag<String> outputTag = new TupleTag<>("main-output");
+ WindowedValue.ValueOnlyWindowedValueCoder<String> windowedValueCoder =
+ WindowedValue.getValueOnlyCoder(StringUtf8Coder.of());
+
+ numStartBundleCalled = 0;
+ DoFn<String, String> doFn =
+ new DoFn<String, String>() {
+ @StartBundle
+ public void startBundle(StartBundleContext context) {
+ numStartBundleCalled += 1;
+ }
+
+ @ProcessElement
+ // Use RequiresStableInput to force buffering elements
+ @RequiresStableInput
+ public void processElement(ProcessContext context) {
+ context.output(context.element());
+ }
+
+ @FinishBundle
+ public void finishBundle(FinishBundleContext context) {
+ context.output(
+ "finishBundle", BoundedWindow.TIMESTAMP_MIN_VALUE,
GlobalWindow.INSTANCE);
+ }
+ };
+
+ DoFnOperator.MultiOutputOutputManagerFactory<String> outputManagerFactory =
+ new DoFnOperator.MultiOutputOutputManagerFactory(
+ outputTag,
+ WindowedValue.getFullCoder(StringUtf8Coder.of(),
GlobalWindow.Coder.INSTANCE));
+
+ Supplier<DoFnOperator<String, String>> doFnOperatorSupplier =
+ () ->
+ new DoFnOperator<>(
+ doFn,
+ "stepName",
+ windowedValueCoder,
+ null,
+ Collections.emptyMap(),
+ outputTag,
+ Collections.emptyList(),
+ outputManagerFactory,
+ WindowingStrategy.globalDefault(),
+ new HashMap<>(), /* side-input mapping */
+ Collections.emptyList(), /* side inputs */
+ options,
+ null,
+ null,
+ DoFnSchemaInformation.create());
+
+ DoFnOperator<String, String> doFnOperator = doFnOperatorSupplier.get();
+ OneInputStreamOperatorTestHarness<WindowedValue<String>,
WindowedValue<String>> testHarness =
+ new OneInputStreamOperatorTestHarness<>(doFnOperator);
+
+ testHarness.open();
+
+ testHarness.processElement(new
StreamRecord<>(WindowedValue.valueInGlobalWindow("a")));
+ testHarness.processElement(new
StreamRecord<>(WindowedValue.valueInGlobalWindow("b")));
+
+ assertThat(Iterables.size(testHarness.getOutput()), is(0));
+ assertThat(numStartBundleCalled, is(0));
+
+ // create a backup and then
+ OperatorSubtaskState backup = testHarness.snapshot(0, 0);
+ doFnOperator.notifyCheckpointComplete(0L);
+
+ assertThat(numStartBundleCalled, is(1));
+ assertThat(
+ stripStreamRecordFromWindowedValue(testHarness.getOutput()),
+ contains(
+ WindowedValue.valueInGlobalWindow("a"),
+ WindowedValue.valueInGlobalWindow("b"),
+ WindowedValue.valueInGlobalWindow("finishBundle")));
+
+ doFnOperator = doFnOperatorSupplier.get();
+ testHarness = new OneInputStreamOperatorTestHarness<>(doFnOperator);
+
+ // restore from the snapshot
+ testHarness.initializeState(backup);
+ testHarness.open();
+
+ doFnOperator.notifyCheckpointComplete(0L);
+
+ assertThat(numStartBundleCalled, is(2));
+ assertThat(
+ stripStreamRecordFromWindowedValue(testHarness.getOutput()),
+ contains(
+ WindowedValue.valueInGlobalWindow("a"),
+ WindowedValue.valueInGlobalWindow("b"),
+ WindowedValue.valueInGlobalWindow("finishBundle")));
+
+ // repeat to see if elements are evicted
+ doFnOperator.notifyCheckpointComplete(1L);
+
+ assertThat(numStartBundleCalled, is(2));
+ assertThat(
+ stripStreamRecordFromWindowedValue(testHarness.getOutput()),
+ contains(
+ WindowedValue.valueInGlobalWindow("a"),
+ WindowedValue.valueInGlobalWindow("b"),
+ WindowedValue.valueInGlobalWindow("finishBundle")));
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testExactlyOnceBufferingKeyed() throws Exception {
+ FlinkPipelineOptions options =
PipelineOptionsFactory.as(FlinkPipelineOptions.class);
+ options.setMaxBundleSize(2L);
+ options.setCheckpointingInterval(1L);
+
+ TupleTag<String> outputTag = new TupleTag<>("main-output");
+
+ StringUtf8Coder keyCoder = StringUtf8Coder.of();
+ KvToByteBufferKeySelector keySelector = new
KvToByteBufferKeySelector<>(keyCoder);
+ KvCoder<String, String> kvCoder = KvCoder.of(keyCoder,
StringUtf8Coder.of());
+ WindowedValue.ValueOnlyWindowedValueCoder<KV<String, String>>
windowedValueCoder =
+ WindowedValue.getValueOnlyCoder(kvCoder);
+
+ DoFn<KV<String, String>, KV<String, String>> doFn =
+ new DoFn<KV<String, String>, KV<String, String>>() {
+ @StartBundle
+ public void startBundle(StartBundleContext context) {
+ numStartBundleCalled++;
+ }
+
+ @ProcessElement
+ // Use RequiresStableInput to force buffering elements
+ @RequiresStableInput
+ public void processElement(ProcessContext context) {
+ context.output(context.element());
+ }
+
+ @FinishBundle
+ public void finishBundle(FinishBundleContext context) {
+ context.output(
+ KV.of("key3", "finishBundle"),
+ BoundedWindow.TIMESTAMP_MIN_VALUE,
+ GlobalWindow.INSTANCE);
+ }
+ };
+
+ DoFnOperator.MultiOutputOutputManagerFactory<String> outputManagerFactory =
+ new DoFnOperator.MultiOutputOutputManagerFactory(
+ outputTag,
+ WindowedValue.getFullCoder(StringUtf8Coder.of(),
GlobalWindow.Coder.INSTANCE));
+
+ Supplier<DoFnOperator<KV<String, String>, KV<String, String>>>
doFnOperatorSupplier =
+ () ->
+ new DoFnOperator(
+ doFn,
+ "stepName",
+ windowedValueCoder,
+ null,
+ Collections.emptyMap(),
+ outputTag,
+ Collections.emptyList(),
+ outputManagerFactory,
+ WindowingStrategy.globalDefault(),
+ new HashMap<>(), /* side-input mapping */
+ Collections.emptyList(), /* side inputs */
+ options,
+ keyCoder,
+ keySelector,
+ DoFnSchemaInformation.create());
+
+ DoFnOperator<KV<String, String>, KV<String, String>> doFnOperator =
doFnOperatorSupplier.get();
+ OneInputStreamOperatorTestHarness<
+ WindowedValue<KV<String, String>>, WindowedValue<KV<String,
String>>>
+ testHarness =
+ new KeyedOneInputStreamOperatorTestHarness(
+ doFnOperator, keySelector, keySelector.getProducedType());
+
+ testHarness.open();
+
+ testHarness.processElement(
+ new StreamRecord<>(WindowedValue.valueInGlobalWindow(KV.of("key",
"a"))));
+ testHarness.processElement(
+ new StreamRecord<>(WindowedValue.valueInGlobalWindow(KV.of("key",
"b"))));
+ testHarness.processElement(
+ new StreamRecord<>(WindowedValue.valueInGlobalWindow(KV.of("key2",
"c"))));
+ testHarness.processElement(
+ new StreamRecord<>(WindowedValue.valueInGlobalWindow(KV.of("key2",
"d"))));
+
+ assertThat(Iterables.size(testHarness.getOutput()), is(0));
+
+ OperatorSubtaskState backup = testHarness.snapshot(0, 0);
+ doFnOperator.notifyCheckpointComplete(0L);
+
+ assertThat(numStartBundleCalled, is(1));
+ assertThat(
+ stripStreamRecordFromWindowedValue(testHarness.getOutput()),
+ contains(
+ WindowedValue.valueInGlobalWindow(KV.of("key2", "c")),
+ WindowedValue.valueInGlobalWindow(KV.of("key2", "d")),
+ WindowedValue.valueInGlobalWindow(KV.of("key", "a")),
+ WindowedValue.valueInGlobalWindow(KV.of("key", "b")),
+ WindowedValue.valueInGlobalWindow(KV.of("key3", "finishBundle"))));
+
+ doFnOperator = doFnOperatorSupplier.get();
+ testHarness =
+ new KeyedOneInputStreamOperatorTestHarness(
+ doFnOperator, keySelector, keySelector.getProducedType());
+
+ // restore from the snapshot
+ testHarness.initializeState(backup);
+ testHarness.open();
+
+ doFnOperator.notifyCheckpointComplete(0L);
+
+ assertThat(numStartBundleCalled, is(2));
+ assertThat(
+ stripStreamRecordFromWindowedValue(testHarness.getOutput()),
+ contains(
+ WindowedValue.valueInGlobalWindow(KV.of("key2", "c")),
+ WindowedValue.valueInGlobalWindow(KV.of("key2", "d")),
+ WindowedValue.valueInGlobalWindow(KV.of("key", "a")),
+ WindowedValue.valueInGlobalWindow(KV.of("key", "b")),
+ WindowedValue.valueInGlobalWindow(KV.of("key3", "finishBundle"))));
+
+ // repeat to see if elements are evicted
+ doFnOperator.notifyCheckpointComplete(1L);
+
+ assertThat(numStartBundleCalled, is(2));
+ assertThat(
+ stripStreamRecordFromWindowedValue(testHarness.getOutput()),
+ contains(
+ WindowedValue.valueInGlobalWindow(KV.of("key2", "c")),
+ WindowedValue.valueInGlobalWindow(KV.of("key2", "d")),
+ WindowedValue.valueInGlobalWindow(KV.of("key", "a")),
+ WindowedValue.valueInGlobalWindow(KV.of("key", "b")),
+ WindowedValue.valueInGlobalWindow(KV.of("key3", "finishBundle"))));
+ }
+
+ @Test(expected = IllegalStateException.class)
+ public void testFailOnRequiresStableInputAndDisabledCheckpointing() {
Review comment:
Test to check that execution fails if checkpointing is disabled.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
Issue Time Tracking
-------------------
Worklog Id: (was: 208920)
Time Spent: 4.5h (was: 4h 20m)
> KafkaIO's EOS mode does not work with FlinkRunner
> -------------------------------------------------
>
> Key: BEAM-6751
> URL: https://issues.apache.org/jira/browse/BEAM-6751
> Project: Beam
> Issue Type: Bug
> Components: io-java-kafka, runner-flink
> Reporter: Maximilian Michels
> Assignee: Maximilian Michels
> Priority: Major
> Fix For: 2.12.0
>
> Time Spent: 4.5h
> Remaining Estimate: 0h
>
> KafkaIO has a validation check which whitelists certain runners capable of
> provide exactly-once semantics:
> {noformat}
> if ("org.apache.beam.runners.direct.DirectRunner".equals(runner)
> || runner.startsWith("org.apache.beam.runners.dataflow.")
> || runner.startsWith("org.apache.beam.runners.spark.") {
> ...
> {noformat}
> The Flink supports exactly-once checkpointing but the Flink Runner can't
> utilize it in the way KafkaIO intends it.
> I think we should remove the check in favor of checking for translation of
> {{@RequiresStableInput}}. Changes to KafkaIO might have to be made to support
> EOS efficiently with the Flink Runner.
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)