jasonyanwenl commented on a change in pull request #5769:
URL: https://github.com/apache/incubator-pinot/pull/5769#discussion_r467200428



##########
File path: 
thirdeye/thirdeye-pinot/src/main/java/org/apache/pinot/thirdeye/api/detection/AnomalyDetectionResource.java
##########
@@ -0,0 +1,756 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.pinot.thirdeye.api.detection;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+import io.dropwizard.auth.Auth;
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import io.swagger.annotations.ApiParam;
+import org.apache.pinot.thirdeye.anomaly.task.TaskConstants;
+import org.apache.pinot.thirdeye.api.Constants;
+import org.apache.pinot.thirdeye.api.user.dashboard.UserDashboardResource;
+import org.apache.pinot.thirdeye.auth.ThirdEyePrincipal;
+import org.apache.pinot.thirdeye.common.metric.MetricType;
+import org.apache.pinot.thirdeye.constant.MetricAggFunction;
+import org.apache.pinot.thirdeye.dashboard.resources.v2.pojo.AnomalySummary;
+import org.apache.pinot.thirdeye.datalayer.bao.*;
+import org.apache.pinot.thirdeye.datalayer.dto.DatasetConfigDTO;
+import org.apache.pinot.thirdeye.datalayer.dto.DetectionConfigDTO;
+import org.apache.pinot.thirdeye.datalayer.dto.MetricConfigDTO;
+import org.apache.pinot.thirdeye.datalayer.dto.TaskDTO;
+import org.apache.pinot.thirdeye.datalayer.util.Predicate;
+import org.apache.pinot.thirdeye.datasource.DAORegistry;
+import org.apache.pinot.thirdeye.datasource.ThirdEyeCacheRegistry;
+import org.apache.pinot.thirdeye.datasource.loader.AggregationLoader;
+import org.apache.pinot.thirdeye.datasource.loader.DefaultAggregationLoader;
+import org.apache.pinot.thirdeye.datasource.loader.DefaultTimeSeriesLoader;
+import org.apache.pinot.thirdeye.datasource.loader.TimeSeriesLoader;
+import org.apache.pinot.thirdeye.detection.*;
+import org.apache.pinot.thirdeye.detection.cache.builder.AnomaliesCacheBuilder;
+import 
org.apache.pinot.thirdeye.detection.cache.builder.TimeSeriesCacheBuilder;
+import org.apache.pinot.thirdeye.detection.validators.DatasetConfigValidator;
+import org.apache.pinot.thirdeye.detection.validators.DetectionConfigValidator;
+import org.apache.pinot.thirdeye.detection.validators.MetricConfigValidator;
+import org.apache.pinot.thirdeye.detection.yaml.DetectionConfigTuner;
+import 
org.apache.pinot.thirdeye.detection.yaml.translator.DetectionConfigTranslator;
+import org.apache.pinot.thirdeye.util.ThirdEyeUtils;
+import org.jfree.util.Log;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+import javax.ws.rs.*;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilder;
+import java.util.*;
+import java.util.concurrent.TimeUnit;
+
+@Path("/anomaly-detection")
+@Api(tags = { Constants.DETECTION_TAG })
+public class AnomalyDetectionResource {
+  protected static final Logger LOG = 
LoggerFactory.getLogger(AnomalyDetectionResource.class);
+
+  private static final String TEMPLATE_DETECTION_PATH = 
"detection-config-template.yml";
+
+  /* -------- Detection config fields -------- */
+  private static final String DETECTION_YAML_FIELD = "detectionName";
+  private static final String DEFAULT_DETECTION_NAME = "online_detection";
+
+  /* -------- Metric config fields -------- */
+  private static final String DATASET_YAML_FIELD = "dataset";
+  private static final String DEFAULT_DATASET_NAME = "online_dataset";
+  private static final String DATATYPE_YAML_FIELD = "datatype";
+  private static final MetricType DEFAULT_DATA_TYPE = MetricType.DOUBLE;
+
+  /* -------- Dataset config fields -------- */
+  private static final String METRIC_YAML_FIELD = "metric";
+  private static final String DEFAULT_METRIC_NAME = "online_metric";
+  private static final String DEFAULT_METRIC_COLUMN = "metric";
+  private static final String TIME_COLUMN_YAML_FIELD = "timeColumn";
+  private static final String DEFAULT_TIME_COLUMN = "date";
+  private static final String TIME_UNIT_YAML_FIELD = "timeUnit";
+  private static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.DAYS;
+  private static final String TIME_DURATION_YAML_FIELD = "timeDuration";
+  private static final String TIME_FORMAT_YAML_FIELD = "timeFormat";
+  private static final String DEFAULT_TIME_FORMAT = 
"SIMPLE_DATE_FORMAT:yyyyMMdd";
+  private static final String TIME_ZONE_YAML_FIELD = "timezone";
+  private static final String DEFAULT_TIME_ZONE = "US/Pacific";
+  private static final List<String> DEFAULT_DIMENSIONS =
+      Collections.unmodifiableList(new ArrayList<>());
+
+  /* -------- Request/Response field -------- */
+  private static final String DATA_FIELD = "data";
+  private static final String COLUMNS_FIELD = "columns";
+  private static final String ROWS_FIELD = "rows";
+  private static final String DATASET_FIELD = "datasetConfiguration";
+  private static final String METRIC_FIELD = "metricConfiguration";
+  private static final String DETECTION_FIELD = "detectionConfiguration";
+  private static final String ANOMALIES_FIELD = "anomalies";
+
+  /* -------- Others -------- */
+  private static final String ONLINE_DATASOURCE = "OnlineThirdEyeDataSource";
+  private static final String DETECTION_MYSQL_NAME_COLUMN = "name";
+  private static final String TASK_MYSQL_NAME_COLUMN = "name";
+  private static final String ANOMALY_ENDPOINT_URL = 
"/userdashboard/anomalies";
+  private static final long POLLING_SLEEP_TIME = 5L;
+  private static final int DEFAULT_TIME_DURATION = 1;
+  private static final long MAX_ONLINE_PAYLOAD_SIZE = 10 * 1024 * 1024L;
+
+  private final UserDashboardResource userDashboardResource;
+  private final DetectionConfigManager detectionConfigDAO;
+  private final DataProvider provider;
+  private final MetricConfigManager metricConfigDAO;
+  private final DatasetConfigManager datasetConfigDAO;
+  private final EventManager eventDAO;
+  private final MergedAnomalyResultManager anomalyDAO;
+  private final EvaluationManager evaluationDAO;
+  private final TaskManager taskDAO;
+  private final DetectionPipelineLoader loader;
+  private final DetectionConfigValidator detectionValidator;
+  private final DatasetConfigValidator datasetConfigValidator;
+  private final MetricConfigValidator metricConfigValidator;
+  private final ObjectMapper objectMapper = new ObjectMapper();
+  private final Yaml yaml;
+
+  public AnomalyDetectionResource(UserDashboardResource userDashboardResource) 
{
+    this.detectionConfigDAO = 
DAORegistry.getInstance().getDetectionConfigManager();
+    this.metricConfigDAO = DAORegistry.getInstance().getMetricConfigDAO();
+    this.datasetConfigDAO = DAORegistry.getInstance().getDatasetConfigDAO();
+    this.eventDAO = DAORegistry.getInstance().getEventDAO();
+    this.anomalyDAO = DAORegistry.getInstance().getMergedAnomalyResultDAO();
+    this.taskDAO = DAORegistry.getInstance().getTaskDAO();
+    this.evaluationDAO = DAORegistry.getInstance().getEvaluationManager();
+    this.userDashboardResource = userDashboardResource;
+
+    TimeSeriesLoader timeseriesLoader =
+        new DefaultTimeSeriesLoader(metricConfigDAO, datasetConfigDAO,
+            ThirdEyeCacheRegistry.getInstance().getQueryCache(),
+            ThirdEyeCacheRegistry.getInstance().getTimeSeriesCache());
+
+    AggregationLoader aggregationLoader =
+        new DefaultAggregationLoader(metricConfigDAO, datasetConfigDAO,
+            ThirdEyeCacheRegistry.getInstance().getQueryCache(),
+            ThirdEyeCacheRegistry.getInstance().getDatasetMaxDataTimeCache());
+
+    this.loader = new DetectionPipelineLoader();
+
+    this.provider = new DefaultDataProvider(metricConfigDAO, datasetConfigDAO, 
eventDAO, anomalyDAO,
+        evaluationDAO, timeseriesLoader, aggregationLoader, loader,
+        TimeSeriesCacheBuilder.getInstance(), 
AnomaliesCacheBuilder.getInstance());
+    this.detectionValidator = new DetectionConfigValidator(this.provider);
+    this.metricConfigValidator = new MetricConfigValidator();
+    this.datasetConfigValidator = new DatasetConfigValidator();
+
+    // Read template from disk
+    this.yaml = new Yaml();
+  }
+
+  /**
+   * Run an online anomaly detection service synchronously. It will run 
anomaly detection using
+   * default configs for detection, metric, dataset
+   *
+   * @param start     detection window start time
+   * @param end       detection window end time
+   * @param payload   payload in request including online data
+   * @param principal user who sent this request. It's used to separate 
different config names
+   * @return a message containing the detected anomalies and the detection 
config used
+   */
+  @POST
+  @Path("/")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  @ApiOperation("Request an anomaly detection online task")
+  public Response onlineApi(
+          @QueryParam("start") long start,
+          @QueryParam("end") long end,
+          @ApiParam("jsonPayload") String payload,
+          @Auth ThirdEyePrincipal principal) {
+    DatasetConfigDTO datasetConfigDTO = null;
+    MetricConfigDTO metricConfigDTO = null;
+    DetectionConfigDTO detectionConfigDTO = null;
+    TaskDTO taskDTO = null;
+    List<AnomalySummary> anomalies = null;
+    Response.Status responseStatus;
+    Map<String, String> responseMessage = new HashMap<>();
+    ObjectMapper objectMapper = new ObjectMapper();
+    // Use username to separate different requests. One user can only send one 
request at a time
+    String nameSuffix = "_" + principal.getName();
+
+    try {
+      if (payload.getBytes().length > MAX_ONLINE_PAYLOAD_SIZE) {
+        responseStatus = Response.Status.BAD_REQUEST;
+        responseMessage.put("message", "Payload too large");
+        return Response.status(responseStatus).entity(responseMessage).build();
+      }
+
+      JsonNode payloadNode = objectMapper.readTree(payload);
+
+      if (!validateOnlineRequestPayload(payloadNode)) {
+        responseStatus = Response.Status.BAD_REQUEST;
+        responseMessage.put("message", "Invalid request payload");
+        return Response.status(responseStatus).entity(responseMessage).build();
+      }
+
+      // Preprocess: remove existing entities generated by the previous 
interrupted request
+      cleanExistingOnlineTask(nameSuffix);
+
+      // Create & save dataset
+      datasetConfigDTO = generateDatasetConfig(payloadNode, nameSuffix);
+
+      // Create & save metric along with online data
+      metricConfigDTO = generateMetricConfig(payloadNode, nameSuffix);

Review comment:
       I think both ways have some pros and cons. I personally think that using 
a single endpoint is easier for users?
   * The main purpose of online AD endpoint is to provide a convenient way to 
run AD tasks so the endpoint should be as simple as possible. Using a single 
endpoint is more user-friendly. Using a separate CRUD endpoint does allow more 
flexibility but it will ask users to firstly register their data before using 
this service.
   * Secondly, online service will not allow much too large size of data so in 
my sense, sending data in the request every time is not a bottleneck?
   
   I think we could provide both ways. This is phase 1 for this feature. In 
phase 2, we probably could support another two endpoints to support what you 
suggested. 
   
   Regarding the cleanup, I think currently in phase 1, it is just a one-call 
request so I mentioned this as stateless because users will not have a separate 
endpoint to retrieve anomalies afterwards and hence we could clean up them. In 
phase 2, another two endpoints will be provided and for those endpoints, we do 
not need to do the cleanup.
   
   Thanks for your suggestions!
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org
For additional commands, e-mail: commits-h...@pinot.apache.org

Reply via email to