[GitHub] [incubator-pinot] jihaozh commented on a change in pull request #6001: [TE] entity anomaly logging for ad-hoc debugging

2020-09-12 Thread GitBox


jihaozh commented on a change in pull request #6001:
URL: https://github.com/apache/incubator-pinot/pull/6001#discussion_r487263049



##
File path: 
thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/tools/RunAdhocDatabaseQueriesTool.java
##
@@ -723,9 +741,65 @@ private void rollbackMigrateSubscriptionWatermarks() {
 }
   }
 
-  public static void main(String[] args) throws Exception {
+  private void printEntityAnomalyDetails(MergedAnomalyResultDTO anomaly, 
String indent, int index) {
+LOG.info("");
+LOG.info("Exploring Entity Anomaly {} with id {}", index, anomaly.getId());
+LOG.info(ENTITY_STATS_TEMPLATE, anomaly.getChildren().size(), 
anomaly.getProperties());
+LOG.info(ENTITY_TIME_TEMPLATE,
+new DateTime(anomaly.getCreatedTime(), TIMEZONE),
+DATE_FORMAT.print(new DateTime(anomaly.getStartTime(), TIMEZONE)),
+DATE_FORMAT.print(new DateTime(anomaly.getEndTime(), TIMEZONE)));
+  }
 
-File persistenceFile = new File("/Users/akrai/persistence-linux.yml");
+  /**
+   * Visualizes the entity anomalies by printing them
+   *
+   * Eg: dq.printEntityAnomalyTrees(158750221, 0, System.currentTimeMillis())
+   *
+   * @param detectionId The detection id whose anomalies need to be printed
+   * @param start The start time of the anomaly slice
+   * @param end The end time of the anomaly slice
+   */
+  private void printEntityAnomalyTrees(long detectionId, long start, long end) 
{
+TimeSeriesLoader timeseriesLoader =
+new DefaultTimeSeriesLoader(metricConfigDAO, datasetConfigDAO,
+ThirdEyeCacheRegistry.getInstance().getQueryCache(),
+ThirdEyeCacheRegistry.getInstance().getTimeSeriesCache());
+AggregationLoader aggregationLoader =
+new DefaultAggregationLoader(metricConfigDAO, datasetConfigDAO,
+ThirdEyeCacheRegistry.getInstance().getQueryCache(),
+ThirdEyeCacheRegistry.getInstance().getDatasetMaxDataTimeCache());
+DefaultDataProvider provider =
+new DefaultDataProvider(metricConfigDAO, datasetConfigDAO, eventDAO, 
mergedResultDAO,
+DAORegistry.getInstance().getEvaluationManager(), 
timeseriesLoader, aggregationLoader,
+new DetectionPipelineLoader(), 
TimeSeriesCacheBuilder.getInstance(), AnomaliesCacheBuilder.getInstance());
+
+AnomalySlice anomalySlice = new AnomalySlice();
+anomalySlice = 
anomalySlice.withDetectionId(detectionId).withStart(start).withEnd(end);
+Multimap
+sliceToAnomaliesMap = 
provider.fetchAnomalies(Collections.singletonList(anomalySlice));
+
+LOG.info("Total number of entity anomalies = " + 
sliceToAnomaliesMap.values().size());
+
+int i = 1;
+for (MergedAnomalyResultDTO parentAnomaly : sliceToAnomaliesMap.values()) {
+  printEntityAnomalyDetails(parentAnomaly, "", i);
+  int j = 1;
+  for (MergedAnomalyResultDTO child : parentAnomaly.getChildren()) {
+printEntityAnomalyDetails(parentAnomaly, "\t", j);
+int k = 1;
+for (MergedAnomalyResultDTO grandchild : child.getChildren()) {
+  printEntityAnomalyDetails(grandchild, "\t\t", k);
+  k++;
+}
+j++;
+  }
+  i++;
+}
+  }
+
+  public static void main(String[] args) throws Exception {
+File persistenceFile = new File("/Users/akrai/persistence-local.yml");

Review comment:
   hide the user name here?

##
File path: 
thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/tools/RunAdhocDatabaseQueriesTool.java
##
@@ -723,9 +741,65 @@ private void rollbackMigrateSubscriptionWatermarks() {
 }
   }
 
-  public static void main(String[] args) throws Exception {
+  private void printEntityAnomalyDetails(MergedAnomalyResultDTO anomaly, 
String indent, int index) {
+LOG.info("");
+LOG.info("Exploring Entity Anomaly {} with id {}", index, anomaly.getId());
+LOG.info(ENTITY_STATS_TEMPLATE, anomaly.getChildren().size(), 
anomaly.getProperties());
+LOG.info(ENTITY_TIME_TEMPLATE,
+new DateTime(anomaly.getCreatedTime(), TIMEZONE),
+DATE_FORMAT.print(new DateTime(anomaly.getStartTime(), TIMEZONE)),
+DATE_FORMAT.print(new DateTime(anomaly.getEndTime(), TIMEZONE)));
+  }
 
-File persistenceFile = new File("/Users/akrai/persistence-linux.yml");
+  /**
+   * Visualizes the entity anomalies by printing them
+   *
+   * Eg: dq.printEntityAnomalyTrees(158750221, 0, System.currentTimeMillis())
+   *
+   * @param detectionId The detection id whose anomalies need to be printed
+   * @param start The start time of the anomaly slice
+   * @param end The end time of the anomaly slice
+   */
+  private void printEntityAnomalyTrees(long detectionId, long start, long end) 
{
+TimeSeriesLoader timeseriesLoader =
+new DefaultTimeSeriesLoader(metricConfigDAO, datasetConfigDAO,
+ThirdEyeCacheRegistry.getInstance().getQueryCache(),
+

[GitHub] [incubator-pinot] jihaozh commented on a change in pull request #6001: [TE] entity anomaly logging for ad-hoc debugging

2020-09-12 Thread GitBox


jihaozh commented on a change in pull request #6001:
URL: https://github.com/apache/incubator-pinot/pull/6001#discussion_r487263049



##
File path: 
thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/tools/RunAdhocDatabaseQueriesTool.java
##
@@ -723,9 +741,65 @@ private void rollbackMigrateSubscriptionWatermarks() {
 }
   }
 
-  public static void main(String[] args) throws Exception {
+  private void printEntityAnomalyDetails(MergedAnomalyResultDTO anomaly, 
String indent, int index) {
+LOG.info("");
+LOG.info("Exploring Entity Anomaly {} with id {}", index, anomaly.getId());
+LOG.info(ENTITY_STATS_TEMPLATE, anomaly.getChildren().size(), 
anomaly.getProperties());
+LOG.info(ENTITY_TIME_TEMPLATE,
+new DateTime(anomaly.getCreatedTime(), TIMEZONE),
+DATE_FORMAT.print(new DateTime(anomaly.getStartTime(), TIMEZONE)),
+DATE_FORMAT.print(new DateTime(anomaly.getEndTime(), TIMEZONE)));
+  }
 
-File persistenceFile = new File("/Users/akrai/persistence-linux.yml");
+  /**
+   * Visualizes the entity anomalies by printing them
+   *
+   * Eg: dq.printEntityAnomalyTrees(158750221, 0, System.currentTimeMillis())
+   *
+   * @param detectionId The detection id whose anomalies need to be printed
+   * @param start The start time of the anomaly slice
+   * @param end The end time of the anomaly slice
+   */
+  private void printEntityAnomalyTrees(long detectionId, long start, long end) 
{
+TimeSeriesLoader timeseriesLoader =
+new DefaultTimeSeriesLoader(metricConfigDAO, datasetConfigDAO,
+ThirdEyeCacheRegistry.getInstance().getQueryCache(),
+ThirdEyeCacheRegistry.getInstance().getTimeSeriesCache());
+AggregationLoader aggregationLoader =
+new DefaultAggregationLoader(metricConfigDAO, datasetConfigDAO,
+ThirdEyeCacheRegistry.getInstance().getQueryCache(),
+ThirdEyeCacheRegistry.getInstance().getDatasetMaxDataTimeCache());
+DefaultDataProvider provider =
+new DefaultDataProvider(metricConfigDAO, datasetConfigDAO, eventDAO, 
mergedResultDAO,
+DAORegistry.getInstance().getEvaluationManager(), 
timeseriesLoader, aggregationLoader,
+new DetectionPipelineLoader(), 
TimeSeriesCacheBuilder.getInstance(), AnomaliesCacheBuilder.getInstance());
+
+AnomalySlice anomalySlice = new AnomalySlice();
+anomalySlice = 
anomalySlice.withDetectionId(detectionId).withStart(start).withEnd(end);
+Multimap
+sliceToAnomaliesMap = 
provider.fetchAnomalies(Collections.singletonList(anomalySlice));
+
+LOG.info("Total number of entity anomalies = " + 
sliceToAnomaliesMap.values().size());
+
+int i = 1;
+for (MergedAnomalyResultDTO parentAnomaly : sliceToAnomaliesMap.values()) {
+  printEntityAnomalyDetails(parentAnomaly, "", i);
+  int j = 1;
+  for (MergedAnomalyResultDTO child : parentAnomaly.getChildren()) {
+printEntityAnomalyDetails(parentAnomaly, "\t", j);
+int k = 1;
+for (MergedAnomalyResultDTO grandchild : child.getChildren()) {
+  printEntityAnomalyDetails(grandchild, "\t\t", k);
+  k++;
+}
+j++;
+  }
+  i++;
+}
+  }
+
+  public static void main(String[] args) throws Exception {
+File persistenceFile = new File("/Users/akrai/persistence-local.yml");

Review comment:
   hide the user name here?





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



-
To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org
For additional commands, e-mail: commits-h...@pinot.apache.org



[GitHub] [incubator-pinot] jihaozh commented on a change in pull request #6001: [TE] entity anomaly logging for ad-hoc debugging

2020-09-11 Thread GitBox


jihaozh commented on a change in pull request #6001:
URL: https://github.com/apache/incubator-pinot/pull/6001#discussion_r487263049



##
File path: 
thirdeye/thirdeye-pinot/src/test/java/org/apache/pinot/thirdeye/tools/RunAdhocDatabaseQueriesTool.java
##
@@ -723,9 +741,65 @@ private void rollbackMigrateSubscriptionWatermarks() {
 }
   }
 
-  public static void main(String[] args) throws Exception {
+  private void printEntityAnomalyDetails(MergedAnomalyResultDTO anomaly, 
String indent, int index) {
+LOG.info("");
+LOG.info("Exploring Entity Anomaly {} with id {}", index, anomaly.getId());
+LOG.info(ENTITY_STATS_TEMPLATE, anomaly.getChildren().size(), 
anomaly.getProperties());
+LOG.info(ENTITY_TIME_TEMPLATE,
+new DateTime(anomaly.getCreatedTime(), TIMEZONE),
+DATE_FORMAT.print(new DateTime(anomaly.getStartTime(), TIMEZONE)),
+DATE_FORMAT.print(new DateTime(anomaly.getEndTime(), TIMEZONE)));
+  }
 
-File persistenceFile = new File("/Users/akrai/persistence-linux.yml");
+  /**
+   * Visualizes the entity anomalies by printing them
+   *
+   * Eg: dq.printEntityAnomalyTrees(158750221, 0, System.currentTimeMillis())
+   *
+   * @param detectionId The detection id whose anomalies need to be printed
+   * @param start The start time of the anomaly slice
+   * @param end The end time of the anomaly slice
+   */
+  private void printEntityAnomalyTrees(long detectionId, long start, long end) 
{
+TimeSeriesLoader timeseriesLoader =
+new DefaultTimeSeriesLoader(metricConfigDAO, datasetConfigDAO,
+ThirdEyeCacheRegistry.getInstance().getQueryCache(),
+ThirdEyeCacheRegistry.getInstance().getTimeSeriesCache());
+AggregationLoader aggregationLoader =
+new DefaultAggregationLoader(metricConfigDAO, datasetConfigDAO,
+ThirdEyeCacheRegistry.getInstance().getQueryCache(),
+ThirdEyeCacheRegistry.getInstance().getDatasetMaxDataTimeCache());
+DefaultDataProvider provider =
+new DefaultDataProvider(metricConfigDAO, datasetConfigDAO, eventDAO, 
mergedResultDAO,
+DAORegistry.getInstance().getEvaluationManager(), 
timeseriesLoader, aggregationLoader,
+new DetectionPipelineLoader(), 
TimeSeriesCacheBuilder.getInstance(), AnomaliesCacheBuilder.getInstance());
+
+AnomalySlice anomalySlice = new AnomalySlice();
+anomalySlice = 
anomalySlice.withDetectionId(detectionId).withStart(start).withEnd(end);
+Multimap
+sliceToAnomaliesMap = 
provider.fetchAnomalies(Collections.singletonList(anomalySlice));
+
+LOG.info("Total number of entity anomalies = " + 
sliceToAnomaliesMap.values().size());
+
+int i = 1;
+for (MergedAnomalyResultDTO parentAnomaly : sliceToAnomaliesMap.values()) {
+  printEntityAnomalyDetails(parentAnomaly, "", i);
+  int j = 1;
+  for (MergedAnomalyResultDTO child : parentAnomaly.getChildren()) {
+printEntityAnomalyDetails(parentAnomaly, "\t", j);
+int k = 1;
+for (MergedAnomalyResultDTO grandchild : child.getChildren()) {
+  printEntityAnomalyDetails(grandchild, "\t\t", k);
+  k++;
+}
+j++;
+  }
+  i++;
+}
+  }
+
+  public static void main(String[] args) throws Exception {
+File persistenceFile = new File("/Users/akrai/persistence-local.yml");

Review comment:
   hide the user name here?





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



-
To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org
For additional commands, e-mail: commits-h...@pinot.apache.org