This is an automated email from the ASF dual-hosted git repository.

fgreg pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git


The following commit(s) were added to refs/heads/master by this push:
     new 301292d  SDAP-116 Fix bug in TimeAvgMapSpark that results in row of 
zeros for certain job parameters (#18)
301292d is described below

commit 301292d1c890584695e8fac3ab800b0078f88be4
Author: Joseph Jacob <[email protected]>
AuthorDate: Mon Jul 9 15:01:07 2018 -0700

    SDAP-116 Fix bug in TimeAvgMapSpark that results in row of zeros for 
certain job parameters (#18)
    
    Fixed incorrect rounding of the calculations of latitude/longitude index 
and number of coordinates.
---
 analysis/webservice/NexusHandler.py                     | 8 ++++++--
 analysis/webservice/algorithms_spark/ClimMapSpark.py    | 8 +++-----
 analysis/webservice/algorithms_spark/CorrMapSpark.py    | 8 +++-----
 analysis/webservice/algorithms_spark/TimeAvgMapSpark.py | 8 +++-----
 4 files changed, 15 insertions(+), 17 deletions(-)

diff --git a/analysis/webservice/NexusHandler.py 
b/analysis/webservice/NexusHandler.py
index b51c77c..50caef6 100644
--- a/analysis/webservice/NexusHandler.py
+++ b/analysis/webservice/NexusHandler.py
@@ -384,6 +384,10 @@ class SparkHandler(NexusHandler):
                     self._maxLatCent = np.max(lats_agg)
                     self._minLonCent = np.min(lons_agg)
                     self._maxLonCent = np.max(lons_agg)
+                    self._nlats = int((self._maxLatCent - self._minLatCent) /
+                                      self._latRes + 0.5) + 1
+                    self._nlons = int((self._maxLonCent - self._minLonCent) /
+                                      self._lonRes + 0.5) + 1
             t -= t_incr
         return nexus_tiles
 
@@ -506,10 +510,10 @@ class SparkHandler(NexusHandler):
             del nexus_tiles[i]
 
     def _lat2ind(self, lat):
-        return int((lat - self._minLatCent) / self._latRes)
+        return int((lat - self._minLatCent) / self._latRes + 0.5)
 
     def _lon2ind(self, lon):
-        return int((lon - self._minLonCent) / self._lonRes)
+        return int((lon - self._minLonCent) / self._lonRes + 0.5)
 
     def _ind2lat(self, y):
         return self._minLatCent + y * self._latRes
diff --git a/analysis/webservice/algorithms_spark/ClimMapSpark.py 
b/analysis/webservice/algorithms_spark/ClimMapSpark.py
index 7a744f0..eb567f5 100644
--- a/analysis/webservice/algorithms_spark/ClimMapSpark.py
+++ b/analysis/webservice/algorithms_spark/ClimMapSpark.py
@@ -156,9 +156,7 @@ class ClimMapSparkHandlerImpl(SparkHandler):
         #    print 'lats: ', tile.latitudes.compressed()
         #    print 'lons: ', tile.longitudes.compressed()
         self.log.debug('Using Native resolution: lat_res={0}, 
lon_res={1}'.format(self._latRes, self._lonRes))
-        nlats = int((self._maxLat - self._minLatCent) / self._latRes) + 1
-        nlons = int((self._maxLon - self._minLonCent) / self._lonRes) + 1
-        self.log.debug('nlats={0}, nlons={1}'.format(nlats, nlons))
+        self.log.debug('nlats={0}, nlons={1}'.format(self._nlats, self._nlons))
         self.log.debug('center lat range = {0} to {1}'.format(self._minLatCent,
                                                               
self._maxLatCent))
         self.log.debug('center lon range = {0} to {1}'.format(self._minLonCent,
@@ -222,8 +220,8 @@ class ClimMapSparkHandlerImpl(SparkHandler):
         #
         # The tiles below are NOT Nexus objects.  They are tuples
         # with the time avg map data and lat-lon bounding box.
-        a = np.zeros((nlats, nlons), dtype=np.float64, order='C')
-        n = np.zeros((nlats, nlons), dtype=np.uint32, order='C')
+        a = np.zeros((self._nlats, self._nlons), dtype=np.float64, order='C')
+        n = np.zeros((self._nlats, self._nlons), dtype=np.uint32, order='C')
         for tile in avg_tiles:
             if tile is not None:
                 ((tile_min_lat, tile_max_lat, tile_min_lon, tile_max_lon),
diff --git a/analysis/webservice/algorithms_spark/CorrMapSpark.py 
b/analysis/webservice/algorithms_spark/CorrMapSpark.py
index e74f835..c6b0c99 100644
--- a/analysis/webservice/algorithms_spark/CorrMapSpark.py
+++ b/analysis/webservice/algorithms_spark/CorrMapSpark.py
@@ -198,9 +198,7 @@ class CorrMapSparkHandlerImpl(SparkHandler):
 
         self.log.debug('Found {0} tiles'.format(len(nexus_tiles)))
         self.log.debug('Using Native resolution: lat_res={0}, 
lon_res={1}'.format(self._latRes, self._lonRes))
-        nlats = int((self._maxLat - self._minLatCent) / self._latRes) + 1
-        nlons = int((self._maxLon - self._minLonCent) / self._lonRes) + 1
-        self.log.debug('nlats={0}, nlons={1}'.format(nlats, nlons))
+        self.log.debug('nlats={0}, nlons={1}'.format(self._nlats, self._nlons))
 
         # Create array of tuples to pass to Spark map function
         nexus_tiles_spark = [[self._find_tile_bounds(t),
@@ -282,8 +280,8 @@ class CorrMapSparkHandlerImpl(SparkHandler):
                                        mask=~(n.astype(bool))),
                            n)).collect()
 
-        r = np.zeros((nlats, nlons), dtype=np.float64, order='C')
-        n = np.zeros((nlats, nlons), dtype=np.uint32, order='C')
+        r = np.zeros((self._nlats, self._nlons), dtype=np.float64, order='C')
+        n = np.zeros((self._nlats, self._nlons), dtype=np.uint32, order='C')
 
         # The tiles below are NOT Nexus objects.  They are tuples
         # with the following for each correlation map subset:
diff --git a/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py 
b/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
index 3e5191b..19de786 100644
--- a/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
+++ b/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
@@ -146,9 +146,7 @@ class TimeAvgMapSparkHandlerImpl(SparkHandler):
         self.log.debug('Found {0} tiles'.format(len(nexus_tiles)))
 
         self.log.debug('Using Native resolution: lat_res={0}, 
lon_res={1}'.format(self._latRes, self._lonRes))
-        nlats = int((self._maxLat - self._minLatCent) / self._latRes) + 1
-        nlons = int((self._maxLon - self._minLonCent) / self._lonRes) + 1
-        self.log.debug('nlats={0}, nlons={1}'.format(nlats, nlons))
+        self.log.debug('nlats={0}, nlons={1}'.format(self._nlats, self._nlons))
         self.log.debug('center lat range = {0} to {1}'.format(self._minLatCent,
                                                               
self._maxLatCent))
         self.log.debug('center lon range = {0} to {1}'.format(self._minLonCent,
@@ -213,8 +211,8 @@ class TimeAvgMapSparkHandlerImpl(SparkHandler):
         #
         # The tiles below are NOT Nexus objects.  They are tuples
         # with the time avg map data and lat-lon bounding box.
-        a = np.zeros((nlats, nlons), dtype=np.float64, order='C')
-        n = np.zeros((nlats, nlons), dtype=np.uint32, order='C')
+        a = np.zeros((self._nlats, self._nlons), dtype=np.float64, order='C')
+        n = np.zeros((self._nlats, self._nlons), dtype=np.uint32, order='C')
         for tile in avg_tiles:
             if tile is not None:
                 ((tile_min_lat, tile_max_lat, tile_min_lon, tile_max_lon),

Reply via email to