This is an automated email from the ASF dual-hosted git repository.

akshayrai09 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-pinot.git


The following commit(s) were added to refs/heads/master by this push:
     new 983c5d8  [TE] Update TE-Raptor API to support multiple metrics (#4140)
983c5d8 is described below

commit 983c5d879e0cc9ccf02e0428bd48fa2da51d2970
Author: Akshay Rai <akshayra...@gmail.com>
AuthorDate: Fri Apr 19 15:12:11 2019 -0700

    [TE] Update TE-Raptor API to support multiple metrics (#4140)
    
    * Leverage JAX-RS automatic parameter conversion
---
 .../api/user/dashboard/UserDashboardResource.java  | 153 ++++++++++++++-------
 .../user/dashboard}/UserDashboardResourceTest.java |  58 ++++++--
 2 files changed, 149 insertions(+), 62 deletions(-)

diff --git 
a/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/api/user/dashboard/UserDashboardResource.java
 
b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/api/user/dashboard/UserDashboardResource.java
index 48612f2..dab31bb 100644
--- 
a/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/api/user/dashboard/UserDashboardResource.java
+++ 
b/thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/api/user/dashboard/UserDashboardResource.java
@@ -25,10 +25,13 @@ import com.wordnik.swagger.annotations.Api;
 import com.wordnik.swagger.annotations.ApiOperation;
 import com.wordnik.swagger.annotations.ApiParam;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
 import javax.annotation.Nullable;
@@ -38,6 +41,7 @@ import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
 import org.apache.commons.lang.StringUtils;
 import org.apache.pinot.thirdeye.api.Constants;
 import org.apache.pinot.thirdeye.constant.AnomalyFeedbackType;
@@ -85,6 +89,65 @@ public class UserDashboardResource {
     this.detectionAlertDAO = detectionAlertDAO;
   }
 
+  List<AnomalySummary> queryAnomalies(Long start, Long end, String 
application, String group, String metric,
+      String dataset, List<MetricDatasetPair> metricDatasetPairs, boolean 
fetchTrueAnomaly, Integer limit) {
+    if (limit == null) {
+      LOG.warn("No upper limit specified while fetching anomalies. Defaulting 
to " + ANOMALIES_LIMIT_DEFAULT);
+      limit = ANOMALIES_LIMIT_DEFAULT;
+    }
+    Preconditions.checkNotNull(start, "Please specify the start time of the 
anomaly retrieval window");
+
+    // TODO: Prefer to have intersection of anomalies rather than union
+    List<MergedAnomalyResultDTO> anomalies = new ArrayList<>();
+    // Fetch anomalies by group
+    anomalies.addAll(fetchAnomaliesBySubsGroup(start, end, group));
+    // Fetch anomalies by application
+    anomalies.addAll(fetchAnomaliesByApplication(start, end, application));
+    // Fetch anomalies by metric and/or dataset
+    anomalies.addAll(fetchAnomaliesByMetricDataset(start, end, metric, 
dataset));
+    // Fetch anomalies by metric dataset pairs
+    anomalies.addAll(fetchAnomaliesByMetricDatasetPairs(start, end, 
metricDatasetPairs));
+
+    // sort descending by start time
+    Collections.sort(anomalies, (o1, o2) -> -1 * 
Long.compare(o1.getStartTime(), o2.getStartTime()));
+
+    if (fetchTrueAnomaly) {
+      // Filter and retain only true anomalies
+      List<MergedAnomalyResultDTO> trueAnomalies = new ArrayList<>();
+      for (MergedAnomalyResultDTO anomaly : anomalies) {
+        if (anomaly.getFeedback() != null && 
anomaly.getFeedback().getFeedbackType().isAnomaly()) {
+          trueAnomalies.add(anomaly);
+        }
+      }
+      anomalies = trueAnomalies;
+    }
+    // filter child anomalies
+    anomalies = anomalies.stream().filter(anomaly -> 
!anomaly.isChild()).collect(Collectors.toList());
+    // limit result size
+    anomalies = anomalies.subList(0, Math.min(anomalies.size(), limit));
+
+    return getAnomalyFormattedOutput(anomalies);
+  }
+
+  protected static class MetricDatasetPair {
+    String datasetName;
+    String metricName;
+
+    MetricDatasetPair(String dataset, String metric) {
+      this.datasetName = dataset;
+      this.metricName = metric;
+    }
+
+    public static MetricDatasetPair fromString(String metricDatasetPair){
+      String[] metricDataset = metricDatasetPair.trim().split("::");
+      if (metricDataset.length != 2) {
+        throw new RuntimeException("Unable to parse dataset::metric pair " + 
metricDatasetPair);
+      }
+
+      return new MetricDatasetPair(metricDataset[0], metricDataset[1]);
+    }
+  }
+
   /**
    * Returns a list of AnomalySummary for a set of query parameters. Anomalies 
are
    * sorted by start time (descending).
@@ -122,7 +185,7 @@ public class UserDashboardResource {
   @GET
   @Path("/anomalies")
   @ApiOperation(value = "Query anomalies")
-  public List<AnomalySummary> queryAnomalies(
+  public Response fetchAnomalies(
       @ApiParam(value = "start time of anomaly retrieval window")
       @QueryParam("start") Long start,
       @ApiParam(value = "end time of anomaly retrieval window")
@@ -135,57 +198,25 @@ public class UserDashboardResource {
       @QueryParam("metric") String metric,
       @ApiParam(value = "The name of the pinot table to which this metric 
belongs")
       @QueryParam("dataset") String dataset,
+      @ApiParam(value = "Specify multiple dataset::metric pairs")
+      @QueryParam("metricAlias") List<MetricDatasetPair> metricDatasetPairs,
       @ApiParam(value = "Specify if you want to only fetch true anomalies")
       @QueryParam("fetchTrueAnomaly") @DefaultValue("false") boolean 
fetchTrueAnomaly,
       @ApiParam(value = "max number of results")
-      @QueryParam("limit") Integer limit) throws Exception {
-    LOG.info("[USER DASHBOARD] Fetching anomalies with filters. Start: " + 
start + " end: " + end + " metric: "
-        + metric + " dataset: " + dataset + " application: " + application + " 
group: " + group
-        + " fetchTrueAnomaly: " + fetchTrueAnomaly + " limit: " + limit);
-
-    // Safety conditions
-    if (limit == null) {
-      LOG.warn("No upper limit specified while fetching anomalies. Defaulting 
to " + ANOMALIES_LIMIT_DEFAULT);
-      limit = ANOMALIES_LIMIT_DEFAULT;
-    }
-    Preconditions.checkNotNull(start, "Please specify the start time of the 
anomaly retrieval window");
-
-    // TODO support index select on user-reported anomalies
-//    predicates.add(Predicate.OR(
-//        Predicate.EQ("notified", true),
-//        Predicate.EQ("anomalyResultSource", 
AnomalyResultSource.USER_LABELED_ANOMALY)));
-
-    // TODO: Prefer to have intersection of anomalies rather than union
-    List<MergedAnomalyResultDTO> anomalies = new ArrayList<>();
-    // Fetch anomalies by group
-    anomalies.addAll(fetchAnomaliesBySubsGroup(start, end, group));
-    // Fetch anomalies by application
-    anomalies.addAll(fetchAnomaliesByApplication(start, end, application));
-    // Fetch anomalies by metric and/or dataset
-    anomalies.addAll(fetchAnomaliesByMetricDataset(start, end, metric, 
dataset));
-
-    // sort descending by start time
-    Collections.sort(anomalies, (o1, o2) -> -1 * 
Long.compare(o1.getStartTime(), o2.getStartTime()));
-
-    if (fetchTrueAnomaly) {
-      // Filter and retain only true anomalies
-      List<MergedAnomalyResultDTO> trueAnomalies = new ArrayList<>();
-      for (MergedAnomalyResultDTO anomaly : anomalies) {
-        if (anomaly.getFeedback() != null && 
anomaly.getFeedback().getFeedbackType().isAnomaly()) {
-          trueAnomalies.add(anomaly);
-        }
-      }
-      anomalies = trueAnomalies;
+      @QueryParam("limit") Integer limit) {
+    Map<String, String> responseMessage = new HashMap<>();
+    List<AnomalySummary> output;
+    try {
+      output = queryAnomalies(start, end, application, group, metric, dataset, 
metricDatasetPairs, fetchTrueAnomaly, limit);
+    } catch (Exception e) {
+      LOG.warn("Error while fetching anomalies.", e.getMessage());
+      responseMessage.put("message", "Failed to fetch all the anomalies.");
+      responseMessage.put("more-info", "Error = " + e.getMessage());
+      return Response.serverError().entity(responseMessage).build();
     }
-    // filter child anomalies
-    anomalies = anomalies.stream().filter(anomaly -> 
!anomaly.isChild()).collect(Collectors.toList());
-    // limit result size
-    anomalies = anomalies.subList(0, Math.min(anomalies.size(), limit));
-
-    List<AnomalySummary> output = getAnomalyFormattedOutput(anomalies);
 
     LOG.info("Successfully returned " + output.size() + " anomalies.");
-    return output;
+    return Response.ok(output).build();
   }
 
   private List<AnomalySummary> 
getAnomalyFormattedOutput(List<MergedAnomalyResultDTO> anomalies) {
@@ -238,6 +269,30 @@ public class UserDashboardResource {
     return output;
   }
 
+  private Collection<MergedAnomalyResultDTO> 
fetchAnomaliesByMetricDatasetPairs(Long start, Long end, 
List<MetricDatasetPair> metricsRef) {
+    List<MergedAnomalyResultDTO> anomalies = new ArrayList<>();
+
+    if (metricsRef == null) {
+      return Collections.emptyList();
+    }
+
+    List<Predicate> predicates = new ArrayList<>();
+    predicates.add(Predicate.GE("endTime", start));
+    if (end != null) {
+      predicates.add(Predicate.LT("startTime", end));
+    }
+
+    for (MetricDatasetPair metricDatasetPair : metricsRef) {
+      List<Predicate> metricDatasetPred = new ArrayList<>(predicates);
+
+      metricDatasetPred.add(Predicate.EQ("collection", 
metricDatasetPair.datasetName));
+      metricDatasetPred.add(Predicate.EQ("metric", 
metricDatasetPair.metricName));
+      
anomalies.addAll(this.anomalyDAO.findByPredicate(Predicate.AND(metricDatasetPred.toArray(new
 Predicate[0]))));
+    }
+
+    return anomalies;
+  }
+
   private Collection<MergedAnomalyResultDTO> 
fetchAnomaliesByMetricDataset(Long start, Long end, String metric, String 
dataset) {
     if (StringUtils.isBlank(metric) && StringUtils.isBlank(dataset)) {
       return Collections.emptyList();
@@ -260,7 +315,7 @@ public class UserDashboardResource {
     return 
this.anomalyDAO.findByPredicate(Predicate.AND(predicates.toArray(new 
Predicate[predicates.size()])));
   }
 
-  private Collection<MergedAnomalyResultDTO> fetchAnomaliesByApplication(Long 
start, Long end, String application) throws Exception {
+  private Collection<MergedAnomalyResultDTO> fetchAnomaliesByApplication(Long 
start, Long end, String application) {
     if (StringUtils.isBlank(application)) {
       return Collections.emptyList();
     }
@@ -276,7 +331,7 @@ public class UserDashboardResource {
     return fetchAnomaliesByConfigIds(start, end, detectionConfigIds);
   }
 
-  private Collection<MergedAnomalyResultDTO> fetchAnomaliesBySubsGroup(Long 
start, Long end, String group) throws Exception {
+  private Collection<MergedAnomalyResultDTO> fetchAnomaliesBySubsGroup(Long 
start, Long end, String group) {
     if (StringUtils.isBlank(group)) {
       return Collections.emptyList();
     }
@@ -292,7 +347,7 @@ public class UserDashboardResource {
     return fetchAnomaliesByConfigIds(start, end, detectionConfigIds);
   }
 
-  private Collection<MergedAnomalyResultDTO> fetchAnomaliesByConfigIds(Long 
start, Long end, Set<Long> detectionConfigIds) throws Exception {
+  private Collection<MergedAnomalyResultDTO> fetchAnomaliesByConfigIds(Long 
start, Long end, Set<Long> detectionConfigIds) {
     if (detectionConfigIds.isEmpty()) {
       return Collections.emptyList();
     }
diff --git 
a/thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/dashboard/resource/v2/UserDashboardResourceTest.java
 
b/thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/api/user/dashboard/UserDashboardResourceTest.java
similarity index 76%
rename from 
thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/dashboard/resource/v2/UserDashboardResourceTest.java
rename to 
thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/api/user/dashboard/UserDashboardResourceTest.java
index 40a3efc..1d06469 100644
--- 
a/thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/dashboard/resource/v2/UserDashboardResourceTest.java
+++ 
b/thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/api/user/dashboard/UserDashboardResourceTest.java
@@ -1,23 +1,38 @@
-package org.apache.pinot.thirdeye.dashboard.resource.v2;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.pinot.thirdeye.api.user.dashboard;
 
 import java.util.HashMap;
 import java.util.Map;
-import org.apache.pinot.thirdeye.api.user.dashboard.UserDashboardResource;
 import org.apache.pinot.thirdeye.dashboard.resources.v2.pojo.AnomalySummary;
-import org.apache.pinot.thirdeye.datalayer.bao.AlertConfigManager;
 import org.apache.pinot.thirdeye.datalayer.bao.DAOTestBase;
 import org.apache.pinot.thirdeye.datalayer.bao.DatasetConfigManager;
 import org.apache.pinot.thirdeye.datalayer.bao.DetectionAlertConfigManager;
 import org.apache.pinot.thirdeye.datalayer.bao.DetectionConfigManager;
 import org.apache.pinot.thirdeye.datalayer.bao.MergedAnomalyResultManager;
 import org.apache.pinot.thirdeye.datalayer.bao.MetricConfigManager;
-import org.apache.pinot.thirdeye.datalayer.dto.AlertConfigDTO;
 import org.apache.pinot.thirdeye.datalayer.dto.DatasetConfigDTO;
 import org.apache.pinot.thirdeye.datalayer.dto.DetectionAlertConfigDTO;
 import org.apache.pinot.thirdeye.datalayer.dto.DetectionConfigDTO;
 import org.apache.pinot.thirdeye.datalayer.dto.MergedAnomalyResultDTO;
 import org.apache.pinot.thirdeye.datalayer.dto.MetricConfigDTO;
-import org.apache.pinot.thirdeye.datalayer.pojo.AlertConfigBean;
 import org.apache.pinot.thirdeye.datasource.DAORegistry;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -72,7 +87,6 @@ public class UserDashboardResourceTest {
 
     // anomalies
     this.anomalyDAO = DAORegistry.getInstance().getMergedAnomalyResultDAO();
-    this.anomalyDAO = DAORegistry.getInstance().getMergedAnomalyResultDAO();
     this.anomalyIds = new ArrayList<>();
     this.anomalyIds.add(this.anomalyDAO.save(makeAnomaly(100, 500, 
this.detectionIds.get(0), "test_metric", "test_dataset"))); // myDetectionA
     this.anomalyIds.add(this.anomalyDAO.save(makeAnomaly(800, 1200, 
this.detectionIds.get(0), "test_metric", "test_dataset"))); // myDetectionA
@@ -110,47 +124,65 @@ public class UserDashboardResourceTest {
 
   @Test
   public void testAnomaliesByApplication() throws Exception {
-    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
"myApplicationA", null, null, null, false, null);
+    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
"myApplicationA", null, null, null, null, false, null);
     Assert.assertEquals(anomalies.size(), 2);
     Assert.assertEquals(extractIds(anomalies), makeSet(this.anomalyIds.get(1), 
this.anomalyIds.get(2)));
   }
 
   @Test
   public void testAnomaliesByApplicationInvalid() throws Exception {
-    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
"Invalid", null, null, null, false, null);
+    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
"Invalid", null, null, null, null, false, null);
     Assert.assertEquals(anomalies.size(), 0);
   }
 
   @Test
   public void testAnomaliesByGroup() throws Exception {
-    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, "myAlertB", null, null, false, null);
+    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, "myAlertB", null, null, null, false, null);
     Assert.assertEquals(anomalies.size(), 2);
     Assert.assertEquals(extractIds(anomalies), makeSet(this.anomalyIds.get(3), 
this.anomalyIds.get(4)));
   }
 
   @Test
   public void testAnomaliesByGroupInvalid() throws Exception {
-    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, "Invalid", null, null, false, null);
+    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, "Invalid", null, null, null, false, null);
     Assert.assertEquals(anomalies.size(), 0);
   }
 
   @Test
   public void testAnomaliesLimit() throws Exception {
-    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
"myApplicationA", null, null, null, false, 1);
+    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
"myApplicationA", null, null, null, null, false, 1);
     Assert.assertEquals(anomalies.size(), 1);
     Assert.assertEquals(extractIds(anomalies), 
makeSet(this.anomalyIds.get(1)));
   }
 
   @Test
   public void testAnomaliesByMetric() throws Exception {
-    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, null, "test_metric", "test_dataset", false, null);
+    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, null, "test_metric", "test_dataset", null, false, null);
     Assert.assertEquals(anomalies.size(), 3);
     Assert.assertEquals(extractIds(anomalies), makeSet(this.anomalyIds.get(1), 
this.anomalyIds.get(2), this.anomalyIds.get(3)));
   }
 
   @Test
   public void testAnomaliesByDataset() throws Exception {
-    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, null, null, "test_dataset", false, null);
+    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, null, null, "test_dataset", null, false, null);
+    Assert.assertEquals(anomalies.size(), 4);
+    Assert.assertEquals(extractIds(anomalies), makeSet(this.anomalyIds.get(1), 
this.anomalyIds.get(2), this.anomalyIds.get(3), this.anomalyIds.get(4)));
+  }
+
+  @Test
+  public void testAnomaliesByMetricDatasetPairs() throws Exception {
+    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, null, null, null,
+        Collections.singletonList(new 
UserDashboardResource.MetricDatasetPair("test_dataset", "test_metric")), false, 
null);
+    Assert.assertEquals(anomalies.size(), 3);
+    Assert.assertEquals(extractIds(anomalies), makeSet(this.anomalyIds.get(1), 
this.anomalyIds.get(2), this.anomalyIds.get(3)));
+  }
+
+  @Test
+  public void testAnomaliesByMultipleMetricDatasetPairs() throws Exception {
+    List<UserDashboardResource.MetricDatasetPair> metricPairs = new 
ArrayList<>();
+    metricPairs.add(new 
UserDashboardResource.MetricDatasetPair("test_dataset", "test_metric"));
+    metricPairs.add(new 
UserDashboardResource.MetricDatasetPair("test_dataset", "test_metric_2"));
+    List<AnomalySummary> anomalies = this.resource.queryAnomalies(1000L, null, 
null, null, null, null, metricPairs, false, null);
     Assert.assertEquals(anomalies.size(), 4);
     Assert.assertEquals(extractIds(anomalies), makeSet(this.anomalyIds.get(1), 
this.anomalyIds.get(2), this.anomalyIds.get(3), this.anomalyIds.get(4)));
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org
For additional commands, e-mail: commits-h...@pinot.apache.org

Reply via email to