This is an automated email from the ASF dual-hosted git repository. skperez pushed a commit to branch SDAP-493 in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git
commit b5df944ec6dcc3bb038c02f5849fd300c0a219c6 Author: skorper <[email protected]> AuthorDate: Mon Sep 25 13:37:09 2023 -0700 removed resultSizeLimit param from matchup --- analysis/webservice/algorithms_spark/Matchup.py | 17 +++-------------- analysis/webservice/apidocs/openapi.yml | 13 ------------- 2 files changed, 3 insertions(+), 27 deletions(-) diff --git a/analysis/webservice/algorithms_spark/Matchup.py b/analysis/webservice/algorithms_spark/Matchup.py index a55f61d..77ecc34 100644 --- a/analysis/webservice/algorithms_spark/Matchup.py +++ b/analysis/webservice/algorithms_spark/Matchup.py @@ -137,14 +137,6 @@ class Matchup(NexusCalcSparkTornadoHandler): + "If true, only the nearest point will be returned for each primary point. " + "If false, all points within the tolerances will be returned for each primary point. Default: False" }, - "resultSizeLimit": { - "name": "Result Size Limit", - "type": "int", - "description": "Optional integer value that limits the number of results returned from the matchup. " - "If the number of primary matches is greater than this limit, the service will respond with " - "(HTTP 202: Accepted) and an empty response body. A value of 0 means return all results. " - "Default: 500" - }, "prioritizeDistance": { "name": "Prioritize distance", "type": "boolean", @@ -223,8 +215,6 @@ class Matchup(NexusCalcSparkTornadoHandler): match_once = request.get_boolean_arg("matchOnce", default=False) - result_size_limit = request.get_int_arg("resultSizeLimit", default=500) - start_seconds_from_epoch = int((start_time - EPOCH).total_seconds()) end_seconds_from_epoch = int((end_time - EPOCH).total_seconds()) @@ -234,7 +224,7 @@ class Matchup(NexusCalcSparkTornadoHandler): return bounding_polygon, primary_ds_name, secondary_ds_names, parameter_s, \ start_time, start_seconds_from_epoch, end_time, end_seconds_from_epoch, \ depth_min, depth_max, time_tolerance, radius_tolerance, \ - platforms, match_once, result_size_limit, prioritize_distance + platforms, match_once, prioritize_distance def get_job_pool(self, tile_ids): if len(tile_ids) > LARGE_JOB_THRESHOLD: @@ -244,7 +234,7 @@ class Matchup(NexusCalcSparkTornadoHandler): def async_calc(self, execution_id, tile_ids, bounding_polygon, primary_ds_name, secondary_ds_names, parameter_s, start_time, end_time, depth_min, depth_max, time_tolerance, radius_tolerance, platforms, match_once, - result_size_limit, start, prioritize_distance): + start, prioritize_distance): # Call spark_matchup self.log.debug("Calling Spark Driver") @@ -310,7 +300,7 @@ class Matchup(NexusCalcSparkTornadoHandler): bounding_polygon, primary_ds_name, secondary_ds_names, parameter_s, \ start_time, start_seconds_from_epoch, end_time, end_seconds_from_epoch, \ depth_min, depth_max, time_tolerance, radius_tolerance, \ - platforms, match_once, result_size_limit, prioritize_distance = self.parse_arguments(request) + platforms, match_once, prioritize_distance = self.parse_arguments(request) args = { "primary": primary_ds_name, @@ -380,7 +370,6 @@ class Matchup(NexusCalcSparkTornadoHandler): radius_tolerance=radius_tolerance, platforms=platforms, match_once=match_once, - result_size_limit=result_size_limit, start=start, prioritize_distance=prioritize_distance )) diff --git a/analysis/webservice/apidocs/openapi.yml b/analysis/webservice/apidocs/openapi.yml index ea9b16b..dc6fdb4 100644 --- a/analysis/webservice/apidocs/openapi.yml +++ b/analysis/webservice/apidocs/openapi.yml @@ -154,19 +154,6 @@ paths: type: boolean default: false example: false - - in: query - name: resultSizeLimit - description: | - Optional integer value that limits the number of results - returned from the matchup. If the number of primary matches - is greater than this limit, the service will respond with - (HTTP 202 Accepted) and an empty response body. A value of - 0 means return all results. - required: false - schema: - type: integer - default: 500 - example: 500 - in: query name: prioritizeDistance description: |
