This is an automated email from the ASF dual-hosted git repository.

nchung pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git


The following commit(s) were added to refs/heads/master by this push:
     new 6583534  SDAP-366: Fix CSV output for matchup (#153)
6583534 is described below

commit 6583534206a1d9dbfb1eb1bada50af6f3e09396a
Author: Stepheny Perez <[email protected]>
AuthorDate: Thu Mar 10 19:35:30 2022 -0800

    SDAP-366: Fix CSV output for matchup (#153)
    
    * Fix CSV output for matchup
    
    * Update DOMS CSV format to only include values that are included in the 
result
    
    * Use cf variable name if present
    
    * CSV works when insitu results contain varying variables
    
    * Updated CSV format with suggested changes
    
    * Updated CSV based on feedback
---
 .../tests/algorithms/test_base_doms_handler.py     | 113 +++++++++++++++
 .../webservice/algorithms/doms/BaseDomsHandler.py  | 151 +++++++++------------
 .../webservice/algorithms/doms/ResultsStorage.py   |  35 +++--
 analysis/webservice/algorithms/doms/config.py      |   5 +-
 analysis/webservice/algorithms_spark/Matchup.py    |  25 +++-
 5 files changed, 223 insertions(+), 106 deletions(-)

diff --git a/analysis/tests/algorithms/test_base_doms_handler.py 
b/analysis/tests/algorithms/test_base_doms_handler.py
new file mode 100644
index 0000000..dbb579f
--- /dev/null
+++ b/analysis/tests/algorithms/test_base_doms_handler.py
@@ -0,0 +1,113 @@
+import datetime
+import uuid
+import csv
+from webservice.algorithms.doms.BaseDomsHandler import DomsCSVFormatter
+
+
+def test_csv():
+    """
+    Test that CSV is constructed properly given result, params, and
+    details.
+    """
+    test_execution_id = str(uuid.uuid4())
+    results = [
+        {
+            "id": "9c08c026-eff7-30a7-ab1e-413a64f507ff[[0 0 3]]",
+            "x": 173.375,
+            "y": -29.875,
+            "source": "MUR25-JPL-L4-GLOB-v04.2",
+            "device": "radiometers",
+            "platform": "orbiting satellite",
+            "time": datetime.datetime(2018, 9, 27, 9, 0),
+            "analysed_sst": 18.069000244140625,
+            "analysis_error": -272.7799987792969,
+            "sst_anomaly": 1.0,
+            "matches": [
+                {
+                    "id": "PCEWYL",
+                    "x": 173.38,
+                    "y": -29.88,
+                    "source": "icoads",
+                    "device": None,
+                    "platform": "drifting surface float",
+                    "time": datetime.datetime(2018, 10, 18, 20, 0),
+                    "sea_water_temperature": 19.6
+                }
+            ]
+        },
+        {
+            "id": "8ff1b246-16de-34e2-87bb-600c4107a7f8[[ 0  8 15]]",
+            "x": 161.375,
+            "y": -27.875,
+            "source": "MUR25-JPL-L4-GLOB-v04.2",
+            "device": "radiometers",
+            "platform": "orbiting satellite",
+            "time": datetime.datetime(2018, 9, 28, 9, 0, ),
+            "analysed_sst": 19.891998291015625,
+            "analysis_error": -272.7799987792969,
+            "sst_anomaly": 1.0,
+            "matches": [
+                {
+                    "id": "PCY3CI",
+                    "x": 161.38,
+                    "y": -27.88,
+                    "source": "icoads",
+                    "device": None,
+                    "platform": "drifting surface float",
+                    "time": datetime.datetime(2018, 10, 23, 10, 0, ),
+                    "sea_water_temperature": 20.0
+                }
+            ]
+        },
+    ]
+    params = {
+        'primary': 'MUR25-JPL-L4-GLOB-v04.2',
+        'matchup': ['icoads'],
+        'depthMin': 0.0,
+        'depthMax': 5.0,
+        'timeTolerance': 2592000,
+        'radiusTolerance': 1000.0,
+        'startTime': datetime.datetime(2018, 9, 24, 0, 0, ),
+        'endTime': datetime.datetime(2018, 9, 30, 0, 0, ),
+        'platforms': '1,2,3,4,5,6,7,8,9',
+        'bbox': '160,-30,180,-25',
+        'parameter': 'sst'
+    }
+    details = {
+        'numGriddedMatched': 54,
+        'numGriddedChecked': 0,
+        'numInSituMatched': 54,
+        'numInSituChecked': 0,
+        'timeToComplete': 26
+    }
+
+    csv_formatter = DomsCSVFormatter()
+    csv_result = csv_formatter.create(
+        executionId=test_execution_id,
+        results=results,
+        params=params,
+        details=details
+    )
+
+    csv_reader = csv.reader(csv_result.split('\n'), delimiter=',')
+    header = None
+    for row in csv_reader:
+        if not row:
+            continue
+
+        if header:
+            # Expected science vars should all contain data
+            expected_var_names = [
+                'analysed_sst',
+                'analysis_error',
+                'sst_anomaly',
+                'sea_water_temperature'
+            ]
+            for var_name in expected_var_names:
+                assert var_name in header
+                assert len(header) == len(row)
+                index = header.index(var_name)
+                assert row[index] is not None
+
+        if 'id' == row[0]:
+            header = row
diff --git a/analysis/webservice/algorithms/doms/BaseDomsHandler.py 
b/analysis/webservice/algorithms/doms/BaseDomsHandler.py
index dbfc692..b5834f9 100644
--- a/analysis/webservice/algorithms/doms/BaseDomsHandler.py
+++ b/analysis/webservice/algorithms/doms/BaseDomsHandler.py
@@ -19,6 +19,8 @@ import csv
 import json
 from datetime import datetime
 import time
+import itertools
+import importlib_metadata
 from decimal import Decimal
 
 import numpy as np
@@ -114,7 +116,7 @@ class DomsCSVFormatter:
             DomsCSVFormatter.__addDynamicAttrs(csv_mem_file, executionId, 
results, params, details)
             csv.writer(csv_mem_file).writerow([])
 
-            DomsCSVFormatter.__packValues(csv_mem_file, results, params)
+            DomsCSVFormatter.__packValues(csv_mem_file, results)
 
             csv_out = csv_mem_file.getvalue()
         finally:
@@ -123,84 +125,65 @@ class DomsCSVFormatter:
         return csv_out
 
     @staticmethod
-    def __packValues(csv_mem_file, results, params):
+    def __packValues(csv_mem_file, results):
+        primary_headers = list(dict.fromkeys(
+            key for result in results for key in result if key != 'matches'
+        ))
 
-        writer = csv.writer(csv_mem_file)
-
-        headers = [
-            # Primary
-            "id", "source", "lon (degrees_east)", "lat (degrees_north)", 
"time", "platform",
-            "sea_surface_salinity (1e-3)", "sea_surface_temperature 
(degree_C)", "wind_speed (m s-1)", "wind_direction",
-            "wind_u (m s-1)", "wind_v (m s-1)",
-            # Match
-            "id", "source", "lon (degrees_east)", "lat (degrees_north)", 
"time", "platform",
-            "depth (m)", "sea_water_salinity (1e-3)",
-            "sea_water_temperature (degree_C)", "wind_speed (m s-1)",
-            "wind_direction", "wind_u (m s-1)", "wind_v (m s-1)"
-        ]
+        secondary_headers = list(dict.fromkeys(
+            key for result in results for match in result['matches'] for key 
in match
+        ))
 
-        writer.writerow(headers)
-
-        #
-        # Only include the depth variable related to the match-up parameter. 
If the match-up parameter
-        # is not sss or sst then do not include any depth data, just fill 
values.
-        #
-        if params["parameter"] == "sss":
-            depth = "sea_water_salinity_depth"
-        elif params["parameter"] == "sst":
-            depth = "sea_water_temperature_depth"
-        else:
-            depth = "NO_DEPTH"
+        writer = csv.writer(csv_mem_file)
+        writer.writerow(list(itertools.chain(primary_headers, 
secondary_headers)))
 
         for primaryValue in results:
             for matchup in primaryValue["matches"]:
-                row = [
-                    # Primary
-                    primaryValue["id"], primaryValue["source"], 
str(primaryValue["x"]), str(primaryValue["y"]),
-                    primaryValue["time"].strftime(ISO_8601), 
primaryValue["platform"],
-                    primaryValue.get("sea_water_salinity", ""), 
primaryValue.get("sea_water_temperature", ""),
-                    primaryValue.get("wind_speed", ""), 
primaryValue.get("wind_direction", ""),
-                    primaryValue.get("wind_u", ""), primaryValue.get("wind_v", 
""),
-
-                    # Matchup
-                    matchup["id"], matchup["source"], matchup["x"], 
matchup["y"],
-                    matchup["time"].strftime(ISO_8601), matchup["platform"],
-                    matchup.get(depth, ""), matchup.get("sea_water_salinity", 
""),
-                    matchup.get("sea_water_temperature", ""),
-                    matchup.get("wind_speed", ""), 
matchup.get("wind_direction", ""),
-                    matchup.get("wind_u", ""), matchup.get("wind_v", ""),
-                ]
-                writer.writerow(row)
+                # Primary
+                primary_row = [None for _ in range(len(primary_headers))]
+                for key, value in primaryValue.items():
+                    if key == 'matches':
+                        continue
+                    index = primary_headers.index(key)
+                    primary_row[index] = value
+                # Secondary
+                secondary_row = [None for _ in range(len(secondary_headers))]
+                for key, value in matchup.items():
+                    index = secondary_headers.index(key)
+                    secondary_row[index] = value
+                writer.writerow(list(itertools.chain(primary_row, 
secondary_row)))
 
     @staticmethod
     def __addConstants(csvfile):
 
+        version = importlib_metadata.distribution('nexusanalysis').version
+
         global_attrs = [
             {"Global Attribute": "product_version", "Value": "1.0"},
             {"Global Attribute": "Conventions", "Value": "CF-1.6, ACDD-1.3"},
-            {"Global Attribute": "title", "Value": "DOMS satellite-insitu 
machup output file"},
+            {"Global Attribute": "title", "Value": "CDMS satellite-insitu 
machup output file"},
             {"Global Attribute": "history",
-             "Value": "Processing_Version = V1.0, Software_Name = DOMS, 
Software_Version = 1.03"},
-            {"Global Attribute": "institution", "Value": "JPL, FSU, NCAR"},
+             "Value": f"Processing_Version = V1.0, Software_Name = CDMS, 
Software_Version = {version}"},
+            {"Global Attribute": "institution", "Value": "JPL, FSU, NCAR, 
Saildrone"},
             {"Global Attribute": "source", "Value": "doms.jpl.nasa.gov"},
             {"Global Attribute": "standard_name_vocabulary",
              "Value": "CF Standard Name Table v27, BODC controlled 
vocabulary"},
-            {"Global Attribute": "cdm_data_type", "Value": "Point/Profile, 
Swath/Grid"},
+            {"Global Attribute": "cdm_data_type", "Value": "trajectory, 
station, point, swath, grid"},
             {"Global Attribute": "processing_level", "Value": "4"},
-            {"Global Attribute": "project", "Value": "Distributed 
Oceanographic Matchup System (DOMS)"},
+            {"Global Attribute": "project", "Value": "Cloud-based Data Matchup 
Service (CDMS)"},
             {"Global Attribute": "keywords_vocabulary",
              "Value": "NASA Global Change Master Directory (GCMD) Science 
Keywords"},
             # TODO What should the keywords be?
             {"Global Attribute": "keywords", "Value": "SATELLITES, OCEAN 
PLATFORMS, SHIPS, BUOYS, MOORINGS, AUVS, ROV, "
                                                       "NASA/JPL/PODAAC, 
FSU/COAPS, UCAR/NCAR, SALINITY, "
                                                       "SEA SURFACE 
TEMPERATURE, SURFACE WINDS"},
-            {"Global Attribute": "creator_name", "Value": "NASA PO.DAAC"},
-            {"Global Attribute": "creator_email", "Value": 
"[email protected]"},
-            {"Global Attribute": "creator_url", "Value": 
"https://podaac.jpl.nasa.gov/"},
-            {"Global Attribute": "publisher_name", "Value": "NASA PO.DAAC"},
-            {"Global Attribute": "publisher_email", "Value": 
"[email protected]"},
-            {"Global Attribute": "publisher_url", "Value": 
"https://podaac.jpl.nasa.gov"},
-            {"Global Attribute": "acknowledgment", "Value": "DOMS is a 
NASA/AIST-funded project. NRA NNH14ZDA001N."},
+            {"Global Attribute": "creator_name", "Value": "Cloud-Based Data 
Matchup Service (CDMS)"},
+            {"Global Attribute": "creator_email", "Value": 
"[email protected]"},
+            {"Global Attribute": "creator_url", "Value": 
"https://doms.jpl.nasa.gov/"},
+            {"Global Attribute": "publisher_name",  "Value": "CDMS"},
+            {"Global Attribute": "publisher_email", "Value": 
"[email protected]"},
+            {"Global Attribute": "publisher_url", "Value": 
"https://doms.jpl.nasa.gov"},
+            {"Global Attribute": "acknowledgment", "Value": "CDMS is a 
NASA/ACCESS funded project with prior support from NASA/AIST"},
         ]
 
         writer = csv.DictWriter(csvfile, 
sorted(next(iter(global_attrs)).keys()))
@@ -229,45 +212,41 @@ class DomsCSVFormatter:
              "Value": params["startTime"].strftime(ISO_8601)},
             {"Global Attribute": "time_coverage_end",
              "Value": params["endTime"].strftime(ISO_8601)},
-            {"Global Attribute": "time_coverage_resolution", "Value": "point"},
 
             {"Global Attribute": "geospatial_lon_min", "Value": 
params["bbox"].split(',')[0]},
             {"Global Attribute": "geospatial_lat_min", "Value": 
params["bbox"].split(',')[1]},
             {"Global Attribute": "geospatial_lon_max", "Value": 
params["bbox"].split(',')[2]},
             {"Global Attribute": "geospatial_lat_max", "Value": 
params["bbox"].split(',')[3]},
-            {"Global Attribute": "geospatial_lat_resolution", "Value": 
"point"},
-            {"Global Attribute": "geospatial_lon_resolution", "Value": 
"point"},
             {"Global Attribute": "geospatial_lat_units", "Value": 
"degrees_north"},
             {"Global Attribute": "geospatial_lon_units", "Value": 
"degrees_east"},
 
             {"Global Attribute": "geospatial_vertical_min", "Value": 
params["depthMin"]},
             {"Global Attribute": "geospatial_vertical_max", "Value": 
params["depthMax"]},
             {"Global Attribute": "geospatial_vertical_units", "Value": "m"},
-            {"Global Attribute": "geospatial_vertical_resolution", "Value": 
"point"},
             {"Global Attribute": "geospatial_vertical_positive", "Value": 
"down"},
 
-            {"Global Attribute": "DOMS_matchID", "Value": executionId},
-            {"Global Attribute": "DOMS_TimeWindow", "Value": 
params["timeTolerance"] / 60 / 60},
-            {"Global Attribute": "DOMS_TimeWindow_Units", "Value": "hours"},
+            {"Global Attribute": "CDMS_matchID", "Value": executionId},
+            {"Global Attribute": "CDMS_TimeWindow", "Value": 
params["timeTolerance"] / 60 / 60},
+            {"Global Attribute": "CDMS_TimeWindow_Units", "Value": "hours"},
 
-            {"Global Attribute": "DOMS_platforms", "Value": 
params["platforms"]},
-            {"Global Attribute": "DOMS_SearchRadius", "Value": 
params["radiusTolerance"]},
-            {"Global Attribute": "DOMS_SearchRadius_Units", "Value": "m"},
+            {"Global Attribute": "CDMS_platforms", "Value": 
params["platforms"]},
+            {"Global Attribute": "CDMS_SearchRadius", "Value": 
params["radiusTolerance"]},
+            {"Global Attribute": "CDMS_SearchRadius_Units", "Value": "m"},
 
-            {"Global Attribute": "DOMS_DatasetMetadata", "Value": ', 
'.join(insituLinks)},
-            {"Global Attribute": "DOMS_primary", "Value": params["primary"]},
-            {"Global Attribute": "DOMS_match_up", "Value": params["matchup"]},
-            {"Global Attribute": "DOMS_ParameterPrimary", "Value": 
params.get("parameter", "")},
+            {"Global Attribute": "CDMS_DatasetMetadata", "Value": ', 
'.join(insituLinks)},
+            {"Global Attribute": "CDMS_primary", "Value": params["primary"]},
+            {"Global Attribute": "CDMS_secondary", "Value": 
','.join(params['matchup'])},
+            {"Global Attribute": "CDMS_ParameterPrimary", "Value": 
params.get("parameter", "")},
 
-            {"Global Attribute": "DOMS_time_to_complete", "Value": 
details["timeToComplete"]},
-            {"Global Attribute": "DOMS_time_to_complete_units", "Value": 
"seconds"},
-            {"Global Attribute": "DOMS_num_matchup_matched", "Value": 
details["numInSituMatched"]},
-            {"Global Attribute": "DOMS_num_primary_matched", "Value": 
details["numGriddedMatched"]},
+            {"Global Attribute": "CDMS_time_to_complete", "Value": 
details["timeToComplete"]},
+            {"Global Attribute": "CDMS_time_to_complete_units", "Value": 
"seconds"},
+            {"Global Attribute": "CDMS_num_secondary_matched", "Value": 
details["numInSituMatched"]},
+            {"Global Attribute": "CDMS_num_primary_matched", "Value": 
details["numGriddedMatched"]},
 
             {"Global Attribute": "date_modified", "Value": 
datetime.utcnow().replace(tzinfo=UTC).strftime(ISO_8601)},
             {"Global Attribute": "date_created", "Value": 
datetime.utcnow().replace(tzinfo=UTC).strftime(ISO_8601)},
 
-            {"Global Attribute": "URI_Matchup", "Value": 
"http://{webservice}/domsresults?id="; + executionId + "&output=CSV"},
+            {"Global Attribute": "URI_Matchup", "Value": 
"https://doms.jpl.nasa.gov/domsresults?id="; + executionId + "&output=CSV"}, # 
TODO how to replace with actual req URL
         ]
 
         writer = csv.DictWriter(csvfile, 
sorted(next(iter(global_attrs)).keys()))
@@ -300,22 +279,18 @@ class DomsNetCDFFormatter:
         dataset.geospatial_lat_min = bbox.south
         dataset.geospatial_lon_max = bbox.east
         dataset.geospatial_lon_min = bbox.west
-        dataset.geospatial_lat_resolution = "point"
-        dataset.geospatial_lon_resolution = "point"
         dataset.geospatial_lat_units = "degrees_north"
         dataset.geospatial_lon_units = "degrees_east"
         dataset.geospatial_vertical_min = float(params["depthMin"])
         dataset.geospatial_vertical_max = float(params["depthMax"])
         dataset.geospatial_vertical_units = "m"
-        dataset.geospatial_vertical_resolution = "point"
         dataset.geospatial_vertical_positive = "down"
 
         dataset.DOMS_TimeWindow = params["timeTolerance"] / 60 / 60
         dataset.DOMS_TimeWindow_Units = "hours"
         dataset.DOMS_SearchRadius = float(params["radiusTolerance"])
         dataset.DOMS_SearchRadius_Units = "m"
-        # dataset.URI_Subset = "http://webservice subsetting query request"
-        dataset.URI_Matchup = "http://{webservice}/domsresults?id="; + 
executionId + "&output=NETCDF"
+        dataset.URI_Matchup = "https://doms.jpl.nasa.gov/domsresults?id="; + 
executionId + "&output=NETCDF"
         dataset.DOMS_ParameterPrimary = params["parameter"] if "parameter" in 
params else ""
         dataset.DOMS_platforms = params["platforms"]
         dataset.DOMS_primary = params["primary"]
@@ -366,7 +341,7 @@ class DomsNetCDFFormatter:
         dataset.Conventions = "CF-1.6, ACDD-1.3"
         dataset.title = "DOMS satellite-insitu machup output file"
         dataset.history = "Processing_Version = V1.0, Software_Name = DOMS, 
Software_Version = 1.03"
-        dataset.institution = "JPL, FSU, NCAR"
+        dataset.institution = "JPL, FSU, NCAR, Saildrone"
         dataset.source = "doms.jpl.nasa.gov"
         dataset.standard_name_vocabulary = "CF Standard Name Table v27", "BODC 
controlled vocabulary"
         dataset.cdm_data_type = "Point/Profile, Swath/Grid"
@@ -375,13 +350,13 @@ class DomsNetCDFFormatter:
         dataset.keywords_vocabulary = "NASA Global Change Master Directory 
(GCMD) Science Keywords"
         dataset.keywords = "SATELLITES, OCEAN PLATFORMS, SHIPS, BUOYS, 
MOORINGS, AUVS, ROV, NASA/JPL/PODAAC, " \
                            "FSU/COAPS, UCAR/NCAR, SALINITY, SEA SURFACE 
TEMPERATURE, SURFACE WINDS"
-        dataset.creator_name = "NASA PO.DAAC"
-        dataset.creator_email = "[email protected]"
-        dataset.creator_url = "https://podaac.jpl.nasa.gov/";
-        dataset.publisher_name = "NASA PO.DAAC"
-        dataset.publisher_email = "[email protected]"
-        dataset.publisher_url = "https://podaac.jpl.nasa.gov";
-        dataset.acknowledgment = "DOMS is a NASA/AIST-funded project. NRA 
NNH14ZDA001N."
+        dataset.creator_name = "Cloud-Based Data Matchup Service (CDMS)"
+        dataset.creator_email = "[email protected]"
+        dataset.creator_url = "https://doms.jpl.nasa.gov/";
+        dataset.publisher_name = "Cloud-Based Data Matchup Service (CDMS)"
+        dataset.publisher_email = "[email protected]"
+        dataset.publisher_url = "https://doms.jpl.nasa.gov";
+        dataset.acknowledgment = "CDMS is a NASA/ACCESS funded project with 
prior support from NASA/AIST"
 
     @staticmethod
     def __writeResults(results, satelliteWriter, insituWriter):
diff --git a/analysis/webservice/algorithms/doms/ResultsStorage.py 
b/analysis/webservice/algorithms/doms/ResultsStorage.py
index 4a23721..e9005b7 100644
--- a/analysis/webservice/algorithms/doms/ResultsStorage.py
+++ b/analysis/webservice/algorithms/doms/ResultsStorage.py
@@ -18,6 +18,7 @@
 import configparser
 import logging
 import uuid
+import numpy as np
 from datetime import datetime
 
 import pkg_resources
@@ -168,16 +169,21 @@ class ResultsStorage(AbstractResultsContainer):
         self._session.execute(batch)
 
     def __insertResult(self, execution_id, primaryId, result, batch, 
insertStatement):
+        data_dict = {}
+        if 'primary' in result:
+            data_dict = result['primary']
+        elif 'secondary' in result:
+            data_dict = result['secondary']
 
-        dataMap = self.__buildDataMap(result)
+        dataMap = self.__buildDataMap(data_dict)
         result_id = uuid.uuid4()
         batch.add(insertStatement, (
             result_id,
             execution_id,
             result["id"],
             primaryId,
-            result["x"],
-            result["y"],
+            result["lon"],
+            result["lat"],
             result["source"],
             result["time"],
             result["platform"] if "platform" in result else None,
@@ -206,11 +212,16 @@ class ResultsStorage(AbstractResultsContainer):
 
     def __buildDataMap(self, result):
         dataMap = {}
-        for name in result:
-            value = result[name]
-            if name not in ["id", "x", "y", "source", "time", "platform", 
"device", "point", "matches"] and type(
-                    value) in [float, int]:
-                dataMap[name] = value
+        for data_dict in result:
+            name = data_dict.get('cf_variable_name')
+
+            if name is None:
+                name = data_dict['variable_name']
+
+            value = data_dict['variable_value']
+            if isinstance(value, np.generic):
+                value = value.item()
+            dataMap[name] = value
         return dataMap
 
 
@@ -259,16 +270,16 @@ class ResultsRetrieval(AbstractResultsContainer):
     def __rowToDataEntry(self, row, trim_data=False):
         if trim_data:
             entry = {
-                "x": float(row.x),
-                "y": float(row.y),
+                "lon": float(row.x),
+                "lat": float(row.y),
                 "source": row.source_dataset,
                 "time": row.measurement_time.replace(tzinfo=UTC)
             }
         else:
             entry = {
                 "id": row.value_id,
-                "x": float(row.x),
-                "y": float(row.y),
+                "lon": float(row.x),
+                "lat": float(row.y),
                 "source": row.source_dataset,
                 "device": row.device,
                 "platform": row.platform,
diff --git a/analysis/webservice/algorithms/doms/config.py 
b/analysis/webservice/algorithms/doms/config.py
index ff492e8..07c4d16 100644
--- a/analysis/webservice/algorithms/doms/config.py
+++ b/analysis/webservice/algorithms/doms/config.py
@@ -50,8 +50,9 @@ ENDPOINTS = [
 
 METADATA_LINKS = {
     "samos": "http://samos.coaps.fsu.edu/html/nav.php?s=2";,
-    "icoads": "https://rda.ucar.edu/datasets/ds548.1/";,
-    "spurs": "https://podaac.jpl.nasa.gov/spurs";
+    "icoads": "https://rda.ucar.edu/datasets/ds548.0/";,
+    "spurs": "https://podaac.jpl.nasa.gov/spurs";,
+    "spurs2": "https://podaac.jpl.nasa.gov/spurs?tab=spurs2-campaign";,
 }
 
 import os
diff --git a/analysis/webservice/algorithms_spark/Matchup.py 
b/analysis/webservice/algorithms_spark/Matchup.py
index 66ef2a9..7bc75a6 100644
--- a/analysis/webservice/algorithms_spark/Matchup.py
+++ b/analysis/webservice/algorithms_spark/Matchup.py
@@ -307,8 +307,8 @@ class Matchup(NexusCalcSparkHandler):
         doms_dict = {
             "platform": doms_values.getPlatformById(domspoint.platform),
             "device": doms_values.getDeviceById(domspoint.device),
-            "x": str(domspoint.longitude),
-            "y": str(domspoint.latitude),
+            "lon": str(domspoint.longitude),
+            "lat": str(domspoint.latitude),
             "point": "Point(%s %s)" % (domspoint.longitude, 
domspoint.latitude),
             "time": datetime.strptime(domspoint.time, 
"%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC),
             "fileurl": domspoint.file_url,
@@ -358,6 +358,24 @@ class DomsPoint(object):
         return str(self.__dict__)
 
     @staticmethod
+    def _variables_to_device(variables):
+        """
+        Given a list of science variables, attempt to determine what
+        the correct device is. This method will only be used for
+        satellite measurements, so the only options are 'scatterometers'
+        or 'radiometers'
+
+        :param variables: List of variable names
+        :return: device id integer
+        """
+        for variable in variables:
+            if 'wind' in variable.variable_name.lower():
+                # scatterometers
+                return 6
+        # Assume radiometers
+        return 5
+
+    @staticmethod
     def from_nexus_point(nexus_point, tile=None):
         point = DomsPoint()
 
@@ -406,9 +424,8 @@ class DomsPoint(object):
         point.source = tile.dataset
         point.file_url = tile.granule
 
-        # TODO device should change based on the satellite making the 
observations.
         point.platform = 9
-        point.device = 5
+        point.device = DomsPoint._variables_to_device(tile.variables)
         return point
 
     @staticmethod

Reply via email to