dawidwys commented on a change in pull request #6205: [FLINK-9642]Reduce the 
count to deal with state during a CEP process
URL: https://github.com/apache/flink/pull/6205#discussion_r209248515
 
 

 ##########
 File path: 
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/nfa/sharedbuffer/SharedBuffer.java
 ##########
 @@ -204,227 +133,78 @@ public NodeId put(
         * @throws Exception Thrown if the system cannot access the state.
         */
        public boolean isEmpty() throws Exception {
-               return Iterables.isEmpty(eventsBuffer.keys());
+               return Iterables.isEmpty(eventsBuffer.keys()) && 
Iterables.isEmpty(eventsBufferCache.keySet());
        }
 
        /**
-        * Returns all elements from the previous relation starting at the 
given entry.
-        *
-        * @param nodeId  id of the starting entry
-        * @param version Version of the previous relation which shall be 
extracted
-        * @return Collection of previous relations starting with the given 
value
-        * @throws Exception Thrown if the system cannot access the state.
+        * Put an event to cache.
+        * @param eventId id of the event
+        * @param event event body
         */
-       public List<Map<String, List<EventId>>> extractPatterns(
-                       final NodeId nodeId,
-                       final DeweyNumber version) throws Exception {
-
-               List<Map<String, List<EventId>>> result = new ArrayList<>();
-
-               // stack to remember the current extraction states
-               Stack<ExtractionState> extractionStates = new Stack<>();
-
-               // get the starting shared buffer entry for the previous 
relation
-               Lockable<SharedBufferNode> entryLock = entries.get(nodeId);
-
-               if (entryLock != null) {
-                       SharedBufferNode entry = entryLock.getElement();
-                       extractionStates.add(new 
ExtractionState(Tuple2.of(nodeId, entry), version, new Stack<>()));
-
-                       // use a depth first search to reconstruct the previous 
relations
-                       while (!extractionStates.isEmpty()) {
-                               final ExtractionState extractionState = 
extractionStates.pop();
-                               // current path of the depth first search
-                               final Stack<Tuple2<NodeId, SharedBufferNode>> 
currentPath = extractionState.getPath();
-                               final Tuple2<NodeId, SharedBufferNode> 
currentEntry = extractionState.getEntry();
-
-                               // termination criterion
-                               if (currentEntry == null) {
-                                       final Map<String, List<EventId>> 
completePath = new LinkedHashMap<>();
-
-                                       while (!currentPath.isEmpty()) {
-                                               final NodeId currentPathEntry = 
currentPath.pop().f0;
-
-                                               String page = 
currentPathEntry.getPageName();
-                                               List<EventId> values = 
completePath
-                                                       .computeIfAbsent(page, 
k -> new ArrayList<>());
-                                               
values.add(currentPathEntry.getEventId());
-                                       }
-                                       result.add(completePath);
-                               } else {
-
-                                       // append state to the path
-                                       currentPath.push(currentEntry);
-
-                                       boolean firstMatch = true;
-                                       for (SharedBufferEdge edge : 
currentEntry.f1.getEdges()) {
-                                               // we can only proceed if the 
current version is compatible to the version
-                                               // of this previous relation
-                                               final DeweyNumber 
currentVersion = extractionState.getVersion();
-                                               if 
(currentVersion.isCompatibleWith(edge.getDeweyNumber())) {
-                                                       final NodeId target = 
edge.getTarget();
-                                                       Stack<Tuple2<NodeId, 
SharedBufferNode>> newPath;
-
-                                                       if (firstMatch) {
-                                                               // for the 
first match we don't have to copy the current path
-                                                               newPath = 
currentPath;
-                                                               firstMatch = 
false;
-                                                       } else {
-                                                               newPath = new 
Stack<>();
-                                                               
newPath.addAll(currentPath);
-                                                       }
-
-                                                       
extractionStates.push(new ExtractionState(
-                                                               target != null 
? Tuple2.of(target, entries.get(target).getElement()) : null,
-                                                               
edge.getDeweyNumber(),
-                                                               newPath));
-                                               }
-                                       }
-                               }
-
-                       }
-               }
-               return result;
+       void cacheEvent(EventId eventId, Lockable<V> event) {
+               this.eventsBufferCache.put(eventId, event);
        }
 
-       public Map<String, List<V>> materializeMatch(Map<String, List<EventId>> 
match) {
-               return materializeMatch(match, new HashMap<>());
+       /**
+        * Put a ShareBufferNode to cache.
+        * @param nodeId id of the event
+        * @param entry SharedBufferNode
+        */
+       void cacheEntry(NodeId nodeId, Lockable<SharedBufferNode> entry) {
+               this.entryCache.put(nodeId, entry);
        }
 
-       public Map<String, List<V>> materializeMatch(Map<String, List<EventId>> 
match, Map<EventId, V> cache) {
-
-               Map<String, List<V>> materializedMatch = new 
LinkedHashMap<>(match.size());
-
-               for (Map.Entry<String, List<EventId>> pattern : 
match.entrySet()) {
-                       List<V> events = new 
ArrayList<>(pattern.getValue().size());
-                       for (EventId eventId : pattern.getValue()) {
-                               V event = cache.computeIfAbsent(eventId, id -> {
-                                       try {
-                                               return 
eventsBuffer.get(id).getElement();
-                                       } catch (Exception ex) {
-                                               throw new 
WrappingRuntimeException(ex);
-                                       }
-                               });
-                               events.add(event);
-                       }
-                       materializedMatch.put(pattern.getKey(), events);
-               }
-
-               return materializedMatch;
+       /**
+        * Remove an event from cache and state.
+        * @param eventId id of the event
+        */
+       void removeEvent(EventId eventId) throws Exception {
+               this.eventsBufferCache.remove(eventId);
+               this.eventsBuffer.remove(eventId);
        }
 
        /**
-        * Increases the reference counter for the given entry so that it is not
-        * accidentally removed.
-        *
-        * @param node id of the entry
-        * @throws Exception Thrown if the system cannot access the state.
+        * remove a ShareBufferNode from cache and state.
 
 Review comment:
   remove -> Removes
   
   Empty line

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to