This is an automated email from the ASF dual-hosted git repository. akshayrai09 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-pinot.git
The following commit(s) were added to refs/heads/master by this push: new ab6dd9f [TE] Trigger expression based grouping of anomalies - AND, OR and combinations (#4354) ab6dd9f is described below commit ab6dd9fa1b05ad2e395998e5dca3343c78983df3 Author: Akshay Rai <akshayra...@gmail.com> AuthorDate: Tue Jun 25 16:25:52 2019 -0700 [TE] Trigger expression based grouping of anomalies - AND, OR and combinations (#4354) Supports grouping of anomalies across multiple metrics and entities on the following criteria - 'AND' criteria - Entity has anomaly if both sub-entities A and B have anomalies at the same time. - 'OR' criteria - Entity has anomaly if either sub-entity A or B have anomalies. - Other nested combinations are also supported. For example, A && (B || C) - The expression input is currently fed by explicitly defining the hierarchy(parse tree) in the form of a map in the yaml config. String expression parser will be supported in the near future. --- .../pinot/thirdeye/detection/DetectionUtils.java | 57 +++++ .../components/TriggerConditionGrouper.java | 202 ++++++++++++++++++ .../spec/TriggerConditionGrouperSpec.java | 62 ++++++ .../thirdeye/detection/wrapper/GrouperWrapper.java | 19 +- .../components/TriggerConditionGrouperTest.java | 230 +++++++++++++++++++++ .../yaml/translator/pipeline-config-5.yaml | 4 +- 6 files changed, 570 insertions(+), 4 deletions(-) diff --git a/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/DetectionUtils.java b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/DetectionUtils.java index 1b5096b..c81d7ce 100644 --- a/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/DetectionUtils.java +++ b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/DetectionUtils.java @@ -24,6 +24,7 @@ import java.lang.reflect.ParameterizedType; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -53,6 +54,13 @@ import static org.apache.pinot.thirdeye.dataframe.util.DataFrameUtils.*; public class DetectionUtils { private static final String PROP_BASELINE_PROVIDER_COMPONENT_NAME = "baselineProviderComponentName"; + private static final Comparator<MergedAnomalyResultDTO> COMPARATOR = new Comparator<MergedAnomalyResultDTO>() { + @Override + public int compare(MergedAnomalyResultDTO o1, MergedAnomalyResultDTO o2) { + return Long.compare(o1.getStartTime(), o2.getStartTime()); + } + }; + // TODO anomaly should support multimap public static DimensionMap toFilterMap(Multimap<String, String> filters) { DimensionMap map = new DimensionMap(); @@ -173,6 +181,55 @@ public class DetectionUtils { return anomaly; } + public static void setEntityChildMapping(MergedAnomalyResultDTO parent, MergedAnomalyResultDTO child1) { + if (child1 != null) { + parent.getChildren().add(child1); + child1.setChild(true); + } + + parent.setChild(false); + } + + public static MergedAnomalyResultDTO makeEntityAnomaly() { + MergedAnomalyResultDTO entityAnomaly = new MergedAnomalyResultDTO(); + // TODO: define anomaly type + //entityAnomaly.setType(); + entityAnomaly.setChild(false); + + return entityAnomaly; + } + + public static MergedAnomalyResultDTO makeEntityCopy(MergedAnomalyResultDTO anomaly) { + MergedAnomalyResultDTO anomalyCopy = makeEntityAnomaly(); + anomalyCopy.setStartTime(anomaly.getStartTime()); + anomalyCopy.setEndTime(anomaly.getEndTime()); + anomalyCopy.setChild(anomaly.isChild()); + anomalyCopy.setChildren(anomaly.getChildren()); + return anomalyCopy; + } + + public static MergedAnomalyResultDTO makeParentEntityAnomaly(MergedAnomalyResultDTO childAnomaly) { + MergedAnomalyResultDTO newEntityAnomaly = makeEntityAnomaly(); + newEntityAnomaly.setStartTime(childAnomaly.getStartTime()); + newEntityAnomaly.setEndTime(childAnomaly.getEndTime()); + setEntityChildMapping(newEntityAnomaly, childAnomaly); + return newEntityAnomaly; + } + + public static List<MergedAnomalyResultDTO> mergeAndSortAnomalies(List<MergedAnomalyResultDTO> anomalyListA, List<MergedAnomalyResultDTO> anomalyListB) { + List<MergedAnomalyResultDTO> anomalies = new ArrayList<>(); + if (anomalyListA != null) { + anomalies.addAll(anomalyListA); + } + if (anomalyListB != null) { + anomalies.addAll(anomalyListB); + } + + // Sort by increasing order of anomaly start time + Collections.sort(anomalies, COMPARATOR); + return anomalies; + } + /** * Helper for consolidate last time stamps in all nested detection pipelines * @param nestedLastTimeStamps all nested last time stamps diff --git a/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/components/TriggerConditionGrouper.java b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/components/TriggerConditionGrouper.java new file mode 100644 index 0000000..a68ae86 --- /dev/null +++ b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/components/TriggerConditionGrouper.java @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.pinot.thirdeye.detection.components; + +import com.google.common.base.Preconditions; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.commons.collections4.MapUtils; +import org.apache.pinot.thirdeye.datalayer.dto.MergedAnomalyResultDTO; +import org.apache.pinot.thirdeye.detection.ConfigUtils; +import org.apache.pinot.thirdeye.detection.InputDataFetcher; +import org.apache.pinot.thirdeye.detection.annotation.Components; +import org.apache.pinot.thirdeye.detection.annotation.DetectionTag; +import org.apache.pinot.thirdeye.detection.spec.TriggerConditionGrouperSpec; +import org.apache.pinot.thirdeye.detection.spi.components.Grouper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.apache.pinot.thirdeye.detection.DetectionUtils.*; + + +/** + * Expression based grouper - supports AND, OR and nested combinations of grouping + */ +@Components(title = "TriggerCondition", type = "GROUPER", + tags = {DetectionTag.GROUPER}, description = "An expression based grouper") +public class TriggerConditionGrouper implements Grouper<TriggerConditionGrouperSpec> { + protected static final Logger LOG = LoggerFactory.getLogger(TriggerConditionGrouper.class); + + private String expression; + private String operator; + private Map<String, Object> leftOp; + private Map<String, Object> rightOp; + private InputDataFetcher dataFetcher; + + static final String PROP_DETECTOR_COMPONENT_NAME = "detectorComponentName"; + static final String PROP_AND = "and"; + static final String PROP_OR = "or"; + static final String PROP_OPERATOR = "operator"; + static final String PROP_LEFT_OP = "leftOp"; + static final String PROP_RIGHT_OP = "rightOp"; + + /** + * Group based on 'AND' criteria - Entity has anomaly if both sub-entities A and B have anomalies + * at the same time. This means we find anomaly overlapping interval. + * + * Since the anomalies from the respective entities/metrics are merged + * before calling the grouper, we do not have to deal with overlapping + * anomalies within an entity/metric + * + * Sort anomalies and incrementally compare two anomalies for overlap criteria; break when no overlap + */ + private List<MergedAnomalyResultDTO> andGrouping( + List<MergedAnomalyResultDTO> anomalyListA, List<MergedAnomalyResultDTO> anomalyListB) { + Set<MergedAnomalyResultDTO> groupedAnomalies = new HashSet<>(); + List<MergedAnomalyResultDTO> anomalies = mergeAndSortAnomalies(anomalyListA, anomalyListB); + if (anomalies.isEmpty()) { + return anomalies; + } + + for (int i = 0; i < anomalies.size(); i++) { + for (int j = i + 1; j < anomalies.size(); j++) { + // Check for overlap and output it + if (anomalies.get(j).getStartTime() <= anomalies.get(i).getEndTime()) { + MergedAnomalyResultDTO currentAnomaly = makeParentEntityAnomaly(anomalies.get(i)); + currentAnomaly.setEndTime(Math.min(currentAnomaly.getEndTime(), anomalies.get(j). getEndTime())); + currentAnomaly.setStartTime(anomalies.get(j). getStartTime()); + setEntityChildMapping(currentAnomaly, anomalies.get(j)); + + groupedAnomalies.add(currentAnomaly); + } else { + break; + } + } + } + + return new ArrayList<>(groupedAnomalies); + } + + /** + * Group based on 'OR' criteria - Entity has anomaly if either sub-entity A or B have anomalies. + * This means we find the total anomaly coverage. + * + * Since the anomalies from the respective entities/metrics are merged + * before calling the grouper, we do not have to deal with overlapping + * anomalies within an entity/metric + * + * Sort anomalies by start time and incrementally merge anomalies + */ + private List<MergedAnomalyResultDTO> orGrouping( + List<MergedAnomalyResultDTO> anomalyListA, List<MergedAnomalyResultDTO> anomalyListB) { + Set<MergedAnomalyResultDTO> groupedAnomalies = new HashSet<>(); + List<MergedAnomalyResultDTO> anomalies = mergeAndSortAnomalies(anomalyListA, anomalyListB); + if (anomalies.isEmpty()) { + return anomalies; + } + + MergedAnomalyResultDTO currentAnomaly = makeParentEntityAnomaly(anomalies.get(0)); + for (int i = 1; i < anomalies.size(); i++) { + if (anomalies.get(i).getStartTime() <= currentAnomaly.getEndTime()) { + // Partial or full overlap + currentAnomaly.setEndTime(Math.max(anomalies.get(i).getEndTime(), currentAnomaly.getEndTime())); + setEntityChildMapping(currentAnomaly, anomalies.get(i)); + } else { + // No overlap + groupedAnomalies.add(currentAnomaly); + currentAnomaly = makeParentEntityAnomaly(anomalies.get(i)); + } + } + groupedAnomalies.add(currentAnomaly); + + return new ArrayList<>(groupedAnomalies); + } + + /** + * Groups the anomalies based on the parsed operator tree + */ + private List<MergedAnomalyResultDTO> groupAnomaliesByOperator(Map<String, Object> operatorNode, List<MergedAnomalyResultDTO> anomalies) { + Preconditions.checkNotNull(operatorNode); + + // Base condition - If reached leaf node, then return the anomalies corresponding to the entity/metric + String value = MapUtils.getString(operatorNode, "value"); + if (value != null) { + return anomalies.stream().filter(anomaly -> + anomaly.getProperties() != null && anomaly.getProperties().get(PROP_DETECTOR_COMPONENT_NAME) != null + && anomaly.getProperties().get(PROP_DETECTOR_COMPONENT_NAME).startsWith(value) + ).collect(Collectors.toList()); + } + + String operator = MapUtils.getString(operatorNode, PROP_OPERATOR); + Preconditions.checkNotNull(operator, "No operator provided!"); + Map<String, Object> leftOp = ConfigUtils.getMap(operatorNode.get(PROP_LEFT_OP)); + Map<String, Object> rightOp = ConfigUtils.getMap(operatorNode.get(PROP_RIGHT_OP)); + + // Post-order traversal - find anomalies from left subtree and right sub-tree and then group them + List<MergedAnomalyResultDTO> leftAnomalies = groupAnomaliesByOperator(leftOp, anomalies); + List<MergedAnomalyResultDTO> rightAnomalies = groupAnomaliesByOperator(rightOp, anomalies); + if (operator.equalsIgnoreCase(PROP_AND)) { + return andGrouping(leftAnomalies, rightAnomalies); + } else if (operator.equalsIgnoreCase(PROP_OR)) { + return orGrouping(leftAnomalies, rightAnomalies); + } else { + throw new RuntimeException("Unsupported operator"); + } + } + + /** + * Groups the anomalies based on the operator string expression + */ + private List<MergedAnomalyResultDTO> groupAnomaliesByExpression(String expression, List<MergedAnomalyResultDTO> anomalies) { + groupAnomaliesByOperator(buildOperatorTree(expression), anomalies); + return anomalies; + } + + // TODO: Build parse tree from string expression and execute + private Map<String, Object> buildOperatorTree(String expression) { + return new HashMap<>(); + } + + @Override + public List<MergedAnomalyResultDTO> group(List<MergedAnomalyResultDTO> anomalies) { + if (operator != null) { + Map<String, Object> operatorTreeRoot = new HashMap<>(); + operatorTreeRoot.put(PROP_OPERATOR, operator); + operatorTreeRoot.put(PROP_LEFT_OP, leftOp); + operatorTreeRoot.put(PROP_RIGHT_OP, rightOp); + return groupAnomaliesByOperator(operatorTreeRoot, anomalies); + } else { + return groupAnomaliesByExpression(expression, anomalies); + } + } + + @Override + public void init(TriggerConditionGrouperSpec spec, InputDataFetcher dataFetcher) { + this.expression = spec.getExpression(); + this.operator = spec.getOperator(); + this.leftOp = spec.getLeftOp(); + this.rightOp = spec.getRightOp(); + this.dataFetcher = dataFetcher; + } +} diff --git a/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/spec/TriggerConditionGrouperSpec.java b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/spec/TriggerConditionGrouperSpec.java new file mode 100644 index 0000000..543caf0 --- /dev/null +++ b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/spec/TriggerConditionGrouperSpec.java @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2014-2018 LinkedIn Corp. (pinot-c...@linkedin.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.pinot.thirdeye.detection.spec; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import java.util.Map; + + +@JsonIgnoreProperties(ignoreUnknown = true) +public class TriggerConditionGrouperSpec extends AbstractSpec { + private String expression; + private String operator; + private Map<String, Object> leftOp; + private Map<String, Object> rightOp; + + public String getExpression() { + return expression; + } + + public void setExpression(String expression) { + this.expression = expression; + } + + public String getOperator() { + return operator; + } + + public void setOperator(String operator) { + this.operator = operator; + } + + public Map<String, Object> getLeftOp() { + return leftOp; + } + + public void setLeftOp(Map<String, Object> leftOp) { + this.leftOp = leftOp; + } + + public Map<String, Object> getRightOp() { + return rightOp; + } + + public void setRightOp(Map<String, Object> rightOp) { + this.rightOp = rightOp; + } +} + diff --git a/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/wrapper/GrouperWrapper.java b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/wrapper/GrouperWrapper.java index a839b7f..e23e37b 100644 --- a/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/wrapper/GrouperWrapper.java +++ b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/detection/wrapper/GrouperWrapper.java @@ -47,11 +47,15 @@ public class GrouperWrapper extends DetectionPipeline { private static final String PROP_NESTED = "nested"; private static final String PROP_CLASS_NAME = "className"; private static final String PROP_GROUPER = "grouper"; + private static final String PROP_DETECTOR = "detector"; + private static final String PROP_DETECTOR_COMPONENT_NAME = "detectorComponentName"; private final List<Map<String, Object>> nestedProperties; private final Grouper grouper; private final String grouperName; + private final String detectorName; + private final String entityName; public GrouperWrapper(DataProvider provider, DetectionConfigDTO config, long startTime, long endTime) throws Exception { @@ -64,6 +68,9 @@ public class GrouperWrapper extends DetectionPipeline { this.grouperName = DetectionUtils.getComponentKey(MapUtils.getString(config.getProperties(), PROP_GROUPER)); Preconditions.checkArgument(this.config.getComponents().containsKey(this.grouperName)); this.grouper = (Grouper) this.config.getComponents().get(this.grouperName); + + this.entityName = MapUtils.getString(config.getProperties(), PROP_DETECTOR); + this.detectorName = DetectionUtils.getComponentKey(entityName); } /** @@ -74,7 +81,6 @@ public class GrouperWrapper extends DetectionPipeline { public final DetectionPipelineResult run() throws Exception { List<MergedAnomalyResultDTO> candidates = new ArrayList<>(); Map<String, Object> diagnostics = new HashMap<>(); - List<MergedAnomalyResultDTO> generated = new ArrayList<>(); List<PredictionResult> predictionResults = new ArrayList<>(); List<EvaluationDTO> evaluations = new ArrayList<>(); @@ -94,7 +100,6 @@ public class GrouperWrapper extends DetectionPipeline { DetectionPipelineResult intermediate = pipeline.run(); lastTimeStamps.add(intermediate.getLastTimestamp()); - generated.addAll(intermediate.getAnomalies()); predictionResults.addAll(intermediate.getPredictions()); evaluations.addAll(intermediate.getEvaluations()); diagnostics.putAll(intermediate.getDiagnostics()); @@ -103,6 +108,16 @@ public class GrouperWrapper extends DetectionPipeline { List<MergedAnomalyResultDTO> anomalies = this.grouper.group(candidates); + for (MergedAnomalyResultDTO anomaly : anomalies) { + if (anomaly.isChild()) { + throw new RuntimeException("Child anomalies returned by grouper. It should always return parent anomalies" + + " with child mapping. Detection id: " + this.config.getId() + ", detector name: " + this.detectorName); + } + + anomaly.setDetectionConfigId(this.config.getId()); + anomaly.getProperties().put(PROP_DETECTOR_COMPONENT_NAME, this.detectorName); + } + return new DetectionPipelineResult(anomalies, DetectionUtils.consolidateNestedLastTimeStamps(lastTimeStamps), predictionResults, evaluations).setDiagnostics(diagnostics); } diff --git a/thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/detection/components/TriggerConditionGrouperTest.java b/thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/detection/components/TriggerConditionGrouperTest.java new file mode 100644 index 0000000..19870f3 --- /dev/null +++ b/thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/detection/components/TriggerConditionGrouperTest.java @@ -0,0 +1,230 @@ +/* + * Copyright (C) 2014-2018 LinkedIn Corp. (pinot-c...@linkedin.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.pinot.thirdeye.detection.components; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.pinot.thirdeye.datalayer.dto.MergedAnomalyResultDTO; +import org.apache.pinot.thirdeye.detection.DetectionTestUtils; +import org.apache.pinot.thirdeye.detection.spec.TriggerConditionGrouperSpec; +import org.testng.Assert; +import org.testng.annotations.Test; + +import static org.apache.pinot.thirdeye.detection.DetectionUtils.*; +import static org.apache.pinot.thirdeye.detection.components.TriggerConditionGrouper.*; + + +public class TriggerConditionGrouperTest { + + private static final String PROP_VALUE = "value"; + + public static MergedAnomalyResultDTO makeAnomaly(long start, long end, String entity) { + MergedAnomalyResultDTO anomaly = DetectionTestUtils.makeAnomaly(1000l, start, end, null, null, Collections.<String, String>emptyMap()); + Map<String, String> props = new HashMap<>(); + props.put(PROP_DETECTOR_COMPONENT_NAME, entity); + anomaly.setProperties(props); + return anomaly; + } + + /** + * + * 0 1000 1500 2000 + * A |-------------| |-----------| + * + * 500 2000 2500 3000 + * B |--------------------------| |---------| + * + * 500 1000 1500 2000 + * A && B |-------| |-----------| + * + */ + @Test + public void testAndGrouping() { + TriggerConditionGrouper grouper = new TriggerConditionGrouper(); + + List<MergedAnomalyResultDTO> anomalies = new ArrayList<>(); + anomalies.add(makeAnomaly(0, 1000, "entityA")); + anomalies.add(makeAnomaly(500, 2000, "entityB")); + anomalies.add(makeAnomaly(1500, 2000, "entityA")); + anomalies.add(makeAnomaly(2500, 3000, "entityB")); + + TriggerConditionGrouperSpec spec = new TriggerConditionGrouperSpec(); + spec.setOperator(PROP_AND); + Map<String, Object> leftOp = new HashMap<>(); + leftOp.put(PROP_VALUE, "entityA"); + spec.setLeftOp(leftOp); + + Map<String, Object> rigthOp = new HashMap<>(); + rigthOp.put(PROP_VALUE, "entityB"); + spec.setRightOp(rigthOp); + + grouper.init(spec, null); + List<MergedAnomalyResultDTO> groupedAnomalies = grouper.group(anomalies); + + Assert.assertEquals(groupedAnomalies.size(), 2); + + Set<MergedAnomalyResultDTO> children = new HashSet<>(); + for (MergedAnomalyResultDTO anomaly : groupedAnomalies) { + if (anomaly.getChildren() != null) { + children.addAll(anomaly.getChildren()); + } + } + Assert.assertEquals(children.size(), 3); + + groupedAnomalies = mergeAndSortAnomalies(groupedAnomalies, null); + Assert.assertEquals(groupedAnomalies.get(0).getStartTime(), 500); + Assert.assertEquals(groupedAnomalies.get(0).getEndTime(), 1000); + Assert.assertEquals(groupedAnomalies.get(1).getStartTime(), 1500); + Assert.assertEquals(groupedAnomalies.get(1).getEndTime(), 2000); + } + + /** + * + * 0 1000 1500 2000 + * A |-------------| |-----------| + * + * 500 2000 2500 3000 + * B |--------------------------| |---------| + * + * 0 2000 2500 3000 + * A || B |--------------------------------| |---------| + * + */ + @Test + public void testOrGrouping() { + TriggerConditionGrouper grouper = new TriggerConditionGrouper(); + + List<MergedAnomalyResultDTO> anomalies = new ArrayList<>(); + anomalies.add(makeAnomaly(0, 1000, "entityA")); + anomalies.add(makeAnomaly(500, 2000, "entityB")); + anomalies.add(makeAnomaly(1500, 2000, "entityA")); + anomalies.add(makeAnomaly(2500, 3000, "entityB")); + + TriggerConditionGrouperSpec spec = new TriggerConditionGrouperSpec(); + spec.setOperator(PROP_OR); + Map<String, Object> leftOp = new HashMap<>(); + leftOp.put(PROP_VALUE, "entityA"); + spec.setLeftOp(leftOp); + + Map<String, Object> rigthOp = new HashMap<>(); + rigthOp.put(PROP_VALUE, "entityB"); + spec.setRightOp(rigthOp); + + grouper.init(spec, null); + List<MergedAnomalyResultDTO> groupedAnomalies = grouper.group(anomalies); + + Assert.assertEquals(groupedAnomalies.size(), 2); + + Set<MergedAnomalyResultDTO> children = new HashSet<>(); + for (MergedAnomalyResultDTO anomaly : groupedAnomalies) { + if (anomaly.getChildren() != null) { + children.addAll(anomaly.getChildren()); + } + } + Assert.assertEquals(children.size(), 4); + + groupedAnomalies = mergeAndSortAnomalies(groupedAnomalies, null); + Assert.assertEquals(groupedAnomalies.get(0).getStartTime(), 0); + Assert.assertEquals(groupedAnomalies.get(0).getEndTime(), 2000); + Assert.assertEquals(groupedAnomalies.get(1).getStartTime(), 2500); + Assert.assertEquals(groupedAnomalies.get(1).getEndTime(), 3000); + } + + /** + * + * 0 1000 1500 2000 + * A |-------------| |-----------| + * + * 500 2000 2500 3000 + * B |-------------------------| |---------| + * + * 1600 1900 + * C |----| + * + * 500 2000 2500 3000 + * B || C |-------------------------| |---------| + * + * 500 1000 1500 2000 + * A && (B || C) |------| |----------| + * + */ + @Test + public void testAndOrGrouping() { + TriggerConditionGrouper grouper = new TriggerConditionGrouper(); + + List<MergedAnomalyResultDTO> anomalies = new ArrayList<>(); + anomalies.add(makeAnomaly(0, 1000, "entityA")); + anomalies.add(makeAnomaly(1500, 2000, "entityA")); + anomalies.add(makeAnomaly(500, 2000, "entityB")); + anomalies.add(makeAnomaly(2500, 3000, "entityB")); + anomalies.add(makeAnomaly(1600, 1900, "entityC")); + + TriggerConditionGrouperSpec spec = new TriggerConditionGrouperSpec(); + + Map<String, Object> leftOp = new HashMap<>(); + leftOp.put(PROP_VALUE, "entityA"); + Map<String, Object> leftSubOp = new HashMap<>(); + leftSubOp.put(PROP_VALUE, "entityB"); + Map<String, Object> rightSubOp = new HashMap<>(); + rightSubOp.put(PROP_VALUE, "entityC"); + + Map<String, Object> rigthOp = new HashMap<>(); + rigthOp.put(PROP_OPERATOR, PROP_OR); + rigthOp.put(PROP_LEFT_OP, leftSubOp); + rigthOp.put(PROP_RIGHT_OP, rightSubOp); + + spec.setOperator(PROP_AND); + spec.setLeftOp(leftOp); + spec.setRightOp(rigthOp); + + grouper.init(spec, null); + List<MergedAnomalyResultDTO> groupedAnomalies = grouper.group(anomalies); + + Assert.assertEquals(groupedAnomalies.size(), 2); + + Set<MergedAnomalyResultDTO> children = new HashSet<>(); + for (MergedAnomalyResultDTO anomaly : groupedAnomalies) { + children.addAll(getAllChildAnomalies(anomaly)); + } + Assert.assertEquals(children.size(), 5); + + groupedAnomalies = mergeAndSortAnomalies(groupedAnomalies, null); + Assert.assertEquals(groupedAnomalies.get(0).getStartTime(), 500); + Assert.assertEquals(groupedAnomalies.get(0).getEndTime(), 1000); + Assert.assertEquals(groupedAnomalies.get(1).getStartTime(), 1500); + Assert.assertEquals(groupedAnomalies.get(1).getEndTime(), 2000); + } + + private List<MergedAnomalyResultDTO> getAllChildAnomalies(MergedAnomalyResultDTO anomaly) { + List<MergedAnomalyResultDTO> childAnomalies = new ArrayList<>(); + if (anomaly == null || anomaly.getChildren() == null) { + return childAnomalies; + } + + for (MergedAnomalyResultDTO childAnomaly : anomaly.getChildren()) { + childAnomalies.add(childAnomaly); + childAnomalies.addAll(getAllChildAnomalies(childAnomaly)); + } + + return childAnomalies; + } +} \ No newline at end of file diff --git a/thirdeye/thirdeye-pinot/src/test/resources/org/apache/pinot/thirdeye/detection/yaml/translator/pipeline-config-5.yaml b/thirdeye/thirdeye-pinot/src/test/resources/org/apache/pinot/thirdeye/detection/yaml/translator/pipeline-config-5.yaml index 11faacb..19484e8 100644 --- a/thirdeye/thirdeye-pinot/src/test/resources/org/apache/pinot/thirdeye/detection/yaml/translator/pipeline-config-5.yaml +++ b/thirdeye/thirdeye-pinot/src/test/resources/org/apache/pinot/thirdeye/detection/yaml/translator/pipeline-config-5.yaml @@ -42,7 +42,7 @@ alerts: name: composite_alert_on_entity alerts: - type: METRIC_ALERT - name: metric alert on test_metric + name: metric_alert_on_test_metric metric: test_metric dataset: test_dataset rules: @@ -60,4 +60,4 @@ alerts: - type: THRESHOLD name: maxThreshold_1 params: - max: 100 + max: 100 \ No newline at end of file --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org For additional commands, e-mail: commits-h...@pinot.apache.org