[
https://issues.apache.org/jira/browse/BEAM-7516?focusedWorklogId=376022&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-376022
]
ASF GitHub Bot logged work on BEAM-7516:
----------------------------------------
Author: ASF GitHub Bot
Created on: 23/Jan/20 01:28
Start Date: 23/Jan/20 01:28
Worklog Time Spent: 10m
Work Description: robertwb commented on pull request #10291:
[BEAM-7516][BEAM-8823] FnApiRunner works with work queues, and a primitive
watermark manager
URL: https://github.com/apache/beam/pull/10291#discussion_r369855425
##########
File path: sdks/python/apache_beam/runners/portability/fn_api_runner.py
##########
@@ -249,141 +250,68 @@ def done(self):
in self._req_worker_mapping.items()])
-class _ListBuffer(list):
- """Used to support parititioning of a list."""
- def partition(self, n):
- # type: (int) -> List[List[bytes]]
- return [self[k::n] for k in range(n)]
+class _ProcessingQueueManager(object):
+ """Manages the queues for ProcessBundle inputs.
+ There are three queues:
+ - ready_inputs(_ProcessingQueueManager.KeyedQueue). This queue contains
input
+ data that is ready to be processed. These are data such as timers past
+ their trigger time, and data to be processed.
+ The ready_inputs_queue contains tuples of (stage_name, inputs), where
+ inputs are dictionaries mapping PCollection name to data buffers.
-class _GroupingBuffer(object):
- """Used to accumulate groupded (shuffled) results."""
- def __init__(self,
- pre_grouped_coder, # type: coders.Coder
- post_grouped_coder, # type: coders.Coder
- windowing
- ):
- # type: (...) -> None
- self._key_coder = pre_grouped_coder.key_coder()
- self._pre_grouped_coder = pre_grouped_coder
- self._post_grouped_coder = post_grouped_coder
- self._table = collections.defaultdict(list) # type:
Optional[DefaultDict[bytes, List[Any]]]
- self._windowing = windowing
- self._grouped_output = None # type: Optional[List[List[bytes]]]
-
- def append(self, elements_data):
- # type: (bytes) -> None
- if self._grouped_output:
- raise RuntimeError('Grouping table append after read.')
- input_stream = create_InputStream(elements_data)
- coder_impl = self._pre_grouped_coder.get_impl()
- key_coder_impl = self._key_coder.get_impl()
- # TODO(robertwb): We could optimize this even more by using a
- # window-dropping coder for the data plane.
- is_trivial_windowing = self._windowing.is_default()
- while input_stream.size() > 0:
- windowed_key_value = coder_impl.decode_from_stream(input_stream, True)
- key, value = windowed_key_value.value
- self._table[key_coder_impl.encode(key)].append(
- value if is_trivial_windowing
- else windowed_key_value.with_value(value))
-
- def partition(self, n):
- # type: (int) -> List[List[bytes]]
- """ It is used to partition _GroupingBuffer to N parts. Once it is
- partitioned, it would not be re-partitioned with diff N. Re-partition
- is not supported now.
- """
- if not self._grouped_output:
- if self._windowing.is_default():
- globally_window = GlobalWindows.windowed_value(
- None,
- timestamp=GlobalWindow().max_timestamp(),
- pane_info=windowed_value.PaneInfo(
- is_first=True,
- is_last=True,
- timing=windowed_value.PaneInfoTiming.ON_TIME,
- index=0,
- nonspeculative_index=0)).with_value
- windowed_key_values = lambda key, values: [
- globally_window((key, values))]
+ - watermark_pending_inputs(_ProcessingQueueManager.KeyedQueue). This queue
+ contains input data that is not yet ready to be processed, and is
blocked
+ on the watermark advancing. ((stage_name, watermark), inputs), where
+ the watermark is the watermark at which the inputs should be scheudled,
+ and inputs are dictionaries mapping PCollection name to data buffers.
+ """
+ class KeyedQueue(object):
+ def __init__(self):
+ self._q = collections.deque()
+ self._keyed_elements = {}
+
+ def enque(
+ self,
+ elm # type: Tuple[str, Dict[str, Union[_ListBuffer, _GroupingBuffer]]]
+ ):
+ # type: (...) -> None
+ key = elm[0]
+ incoming_inputs = elm[1]
+ if key in self._keyed_elements:
+ existing_inputs = self._keyed_elements[key][1]
+ for pcoll in incoming_inputs:
+ if incoming_inputs[pcoll] and pcoll in existing_inputs:
+ existing_inputs[pcoll].extend(incoming_inputs[pcoll])
+ elif incoming_inputs[pcoll]:
+ existing_inputs[pcoll] = incoming_inputs[pcoll]
else:
- # TODO(pabloem, BEAM-7514): Trigger driver needs access to the clock
- # note that this only comes through if windowing is default - but
what
- # about having multiple firings on the global window.
- # May need to revise.
- trigger_driver = trigger.create_trigger_driver(self._windowing, True)
- windowed_key_values = trigger_driver.process_entire_key
- coder_impl = self._post_grouped_coder.get_impl()
- key_coder_impl = self._key_coder.get_impl()
- self._grouped_output = [[] for _ in range(n)]
- output_stream_list = []
- for _ in range(n):
- output_stream_list.append(create_OutputStream())
- for idx, (encoded_key, windowed_values) in
enumerate(self._table.items()):
- key = key_coder_impl.decode(encoded_key)
- for wkvs in windowed_key_values(key, windowed_values):
- coder_impl.encode_to_stream(wkvs, output_stream_list[idx % n], True)
- for ix, output_stream in enumerate(output_stream_list):
- self._grouped_output[ix] = [output_stream.get()]
- self._table = None
- return self._grouped_output
-
- def __iter__(self):
- # type: () -> Iterator[bytes]
- """ Since partition() returns a list of lists, add this __iter__ to return
- a list to simplify code when we need to iterate through ALL elements of
- _GroupingBuffer.
- """
- return itertools.chain(*self.partition(1))
+ self._keyed_elements[key] = elm
+ self._q.appendleft(elm)
+ def deque(self):
+ elm = self._q.pop()
+ key = elm[0]
+ del self._keyed_elements[key]
+ return elm
-class _WindowGroupingBuffer(object):
- """Used to partition windowed side inputs."""
- def __init__(self,
- access_pattern,
- coder # type: coders.WindowedValueCoder
- ):
- # type: (...) -> None
- # Here's where we would use a different type of partitioning
- # (e.g. also by key) for a different access pattern.
- if access_pattern.urn == common_urns.side_inputs.ITERABLE.urn:
- self._kv_extractor = lambda value: ('', value)
- self._key_coder = coders.SingletonCoder('') # type: coders.Coder
- self._value_coder = coder.wrapped_value_coder
- elif access_pattern.urn == common_urns.side_inputs.MULTIMAP.urn:
- self._kv_extractor = lambda value: value
- self._key_coder = coder.wrapped_value_coder.key_coder()
- self._value_coder = (
- coder.wrapped_value_coder.value_coder())
- else:
- raise ValueError(
- "Unknown access pattern: '%s'" % access_pattern.urn)
- self._windowed_value_coder = coder
- self._window_coder = coder.window_coder
- self._values_by_window = collections.defaultdict(list) # type:
DefaultDict[Tuple[str, BoundedWindow], List[Any]]
-
- def append(self, elements_data):
- # type: (bytes) -> None
- input_stream = create_InputStream(elements_data)
- while input_stream.size() > 0:
- windowed_value = self._windowed_value_coder.get_impl(
- ).decode_from_stream(input_stream, True)
- key, value = self._kv_extractor(windowed_value.value)
- for window in windowed_value.windows:
- self._values_by_window[key, window].append(value)
-
- def encoded_items(self):
- # type: () -> Iterator[Tuple[bytes, bytes, bytes]]
- value_coder_impl = self._value_coder.get_impl()
- key_coder_impl = self._key_coder.get_impl()
- for (key, window), values in self._values_by_window.items():
- encoded_window = self._window_coder.encode(window)
- encoded_key = key_coder_impl.encode_nested(key)
- output_stream = create_OutputStream()
- for value in values:
- value_coder_impl.encode_to_stream(value, output_stream, True)
- yield encoded_key, encoded_window, output_stream.get()
+ def __len__(self):
+ return len(self._q)
+
+ def __repr__(self):
Review comment:
Remove.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
Issue Time Tracking
-------------------
Worklog Id: (was: 376022)
Time Spent: 5h 10m (was: 5h)
> Add a watermark manager for the fn_api_runner
> ---------------------------------------------
>
> Key: BEAM-7516
> URL: https://issues.apache.org/jira/browse/BEAM-7516
> Project: Beam
> Issue Type: Sub-task
> Components: sdk-py-core
> Reporter: Pablo Estrada
> Assignee: Pablo Estrada
> Priority: Major
> Time Spent: 5h 10m
> Remaining Estimate: 0h
>
> To track watermarks for each stage
--
This message was sent by Atlassian Jira
(v8.3.4#803005)