This is an automated email from the ASF dual-hosted git repository.
nchung pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git
The following commit(s) were added to refs/heads/master by this push:
new 9f29fcb Updated matchup to ensure both time and space are account for
when 'matchOnce' is true (#176)
9f29fcb is described below
commit 9f29fcb9f68d56bc8efd80a3bf66ec1bcc5800ed
Author: Stepheny Perez <[email protected]>
AuthorDate: Wed Jul 20 11:37:15 2022 -0700
Updated matchup to ensure both time and space are account for when
'matchOnce' is true (#176)
---
CHANGELOG.md | 3 +++
analysis/webservice/algorithms_spark/Matchup.py | 25 ++++++++++++++++---------
2 files changed, 19 insertions(+), 9 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9d0bb20..94ade82 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -19,4 +19,7 @@ and this project adheres to [Semantic
Versioning](https://semver.org/spec/v2.0.0
- Fix failing test_matchup unit test
- Fixed null value for count in matchup response
- SDAP-371: Fixed DOMS subset endpoint
+- Fixed issue where satellite satellite matchups failed
+- Fixed issue where null results were returned if more than "resultSizeLimit"
matches are found
+- Fixed issue where satellite to satellite matchups with the same dataset
don't return the expected result
### Security
\ No newline at end of file
diff --git a/analysis/webservice/algorithms_spark/Matchup.py
b/analysis/webservice/algorithms_spark/Matchup.py
index 671a935..c154f1f 100644
--- a/analysis/webservice/algorithms_spark/Matchup.py
+++ b/analysis/webservice/algorithms_spark/Matchup.py
@@ -277,12 +277,13 @@ class Matchup(NexusCalcSparkHandler):
threading.Thread(target=do_result_insert).start()
- if 0 < result_size_limit < len(matches):
- result = DomsQueryResults(results=None, args=args,
details=details, bounds=None, count=None,
- computeOptions=None,
executionId=execution_id, status_code=202)
- else:
- result = DomsQueryResults(results=matches, args=args,
details=details, bounds=None, count=len(matches),
- computeOptions=None,
executionId=execution_id)
+ # Get only the first "result_size_limit" results
+ matches = matches[0:result_size_limit]
+
+ result = DomsQueryResults(results=matches, args=args,
+ details=details, bounds=None,
+ count=len(matches), computeOptions=None,
+ executionId=execution_id)
return result
@@ -582,7 +583,12 @@ def spark_matchup_driver(tile_ids, bounding_wkt,
primary_ds_name, secondary_ds_n
lat1, lon1 = (primary.latitude, primary.longitude)
lat2, lon2 = (matchup.latitude, matchup.longitude)
az12, az21, distance = wgs84_geod.inv(lon1, lat1, lon2, lat2)
- return distance
+ return distance, time_dist(primary, matchup)
+
+ def time_dist(primary, matchup):
+ primary_time = iso_time_to_epoch(primary.time)
+ matchup_time = iso_time_to_epoch(matchup.time)
+ return abs(primary_time - matchup_time)
rdd_filtered = rdd_filtered \
.map(lambda primary_matchup: tuple([primary_matchup[0],
tuple([primary_matchup[1], dist(primary_matchup[0], primary_matchup[1])])])) \
@@ -635,10 +641,11 @@ def tile_to_edge_points(tile):
data = [tile.data[tuple(idx)]]
edge_point = {
- 'point': f'Point({tile.longitudes[idx[2]]}
{tile.latitudes[idx[1]]})',
+ 'latitude': tile.latitudes[idx[1]],
+ 'longitude': tile.longitudes[idx[2]],
'time':
datetime.utcfromtimestamp(tile.times[idx[0]]).strftime('%Y-%m-%dT%H:%M:%SZ'),
'source': tile.dataset,
- 'platform': None,
+ 'platform': 'orbiting satellite',
'device': None,
'fileurl': tile.granule,
'variables': tile.variables,