reformated code
Project: http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/commit/f73d912e Tree: http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/tree/f73d912e Diff: http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/diff/f73d912e Branch: refs/heads/master Commit: f73d912e1416bf1da99276242551f0f2ded5f7ec Parents: 1c6f8db Author: Frank Greguska <[email protected]> Authored: Thu Jan 11 10:27:01 2018 -0800 Committer: Frank Greguska <[email protected]> Committed: Thu Jan 11 10:27:01 2018 -0800 ---------------------------------------------------------------------- analysis/tests/__init__.py | 2 +- .../algorithms/longitudelatitudemap_test.py | 10 +- analysis/tests/algorithms_spark/Matchup_test.py | 6 +- analysis/tests/algorithms_spark/__init__.py | 2 +- analysis/webservice/Filtering.py | 9 +- analysis/webservice/GenerateImageMRF.py | 31 +- analysis/webservice/LayerConfig.py | 57 +- analysis/webservice/NexusHandler.py | 154 +-- analysis/webservice/WorkflowDriver.py | 6 +- .../webservice/algorithms/ColorBarHandler.py | 6 +- .../webservice/algorithms/CorrelationMap.py | 28 +- .../algorithms/DailyDifferenceAverage.py | 2 +- .../webservice/algorithms/DataInBoundsSearch.py | 3 +- .../webservice/algorithms/DataSeriesList.py | 1 + analysis/webservice/algorithms/Heartbeat.py | 3 +- .../algorithms/LongitudeLatitudeMap.py | 26 +- .../webservice/algorithms/MapFetchHandler.py | 23 +- .../webservice/algorithms/TestInitializer.py | 6 +- analysis/webservice/algorithms/TileSearch.py | 2 +- analysis/webservice/algorithms/TimeAvgMap.py | 98 +- analysis/webservice/algorithms/TimeSeries.py | 8 +- .../webservice/algorithms/TimeSeriesSolr.py | 17 +- analysis/webservice/algorithms/__init__.py | 2 +- analysis/webservice/algorithms/colortables.py | 1037 +++++++++--------- .../algorithms/doms/BaseDomsHandler.py | 2 +- .../algorithms/doms/DatasetListQuery.py | 35 +- .../algorithms/doms/DomsInitialization.py | 4 +- .../webservice/algorithms/doms/MatchupQuery.py | 3 +- .../webservice/algorithms/doms/MetadataQuery.py | 19 +- .../algorithms/doms/ResultsPlotQuery.py | 2 +- .../algorithms/doms/ResultsRetrieval.py | 2 +- .../algorithms/doms/ResultsStorage.py | 1 - .../webservice/algorithms/doms/StatsQuery.py | 16 +- .../webservice/algorithms/doms/ValuesQuery.py | 25 +- analysis/webservice/algorithms/doms/__init__.py | 12 +- .../webservice/algorithms/doms/datafetch.py | 7 +- .../webservice/algorithms/doms/fetchedgeimpl.py | 3 +- analysis/webservice/algorithms/doms/geo.py | 29 +- .../webservice/algorithms/doms/histogramplot.py | 34 +- analysis/webservice/algorithms/doms/mapplot.py | 36 +- .../webservice/algorithms/doms/scatterplot.py | 28 +- .../webservice/algorithms/doms/workerthread.py | 11 +- .../algorithms_spark/HofMoellerSpark.py | 29 +- analysis/webservice/algorithms_spark/Matchup.py | 1 - .../algorithms_spark/TimeSeriesSpark.py | 3 +- .../webservice/algorithms_spark/__init__.py | 2 +- analysis/webservice/matserver.py | 3 +- analysis/webservice/webapp.py | 2 +- analysis/webservice/webmodel.py | 41 +- 49 files changed, 939 insertions(+), 950 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/tests/__init__.py ---------------------------------------------------------------------- diff --git a/analysis/tests/__init__.py b/analysis/tests/__init__.py index bd9282c..76c37c9 100644 --- a/analysis/tests/__init__.py +++ b/analysis/tests/__init__.py @@ -1,4 +1,4 @@ """ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved -""" \ No newline at end of file +""" http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/tests/algorithms/longitudelatitudemap_test.py ---------------------------------------------------------------------- diff --git a/analysis/tests/algorithms/longitudelatitudemap_test.py b/analysis/tests/algorithms/longitudelatitudemap_test.py index 0728ab2..f6d83b0 100644 --- a/analysis/tests/algorithms/longitudelatitudemap_test.py +++ b/analysis/tests/algorithms/longitudelatitudemap_test.py @@ -2,22 +2,19 @@ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ -import json import time import unittest -import urllib from multiprocessing.pool import ThreadPool -from unittest import skip +from NexusHandler import AlgorithmModuleWrapper from mock import MagicMock from nexustiles.nexustiles import NexusTileService from shapely.geometry import box -from tornado.testing import AsyncHTTPTestCase, bind_unused_port +from tornado.testing import bind_unused_port from tornado.web import Application - -from NexusHandler import AlgorithmModuleWrapper from webapp import ModularNexusHandlerWrapper from webmodel import NexusRequestObject + from webservice.algorithms import LongitudeLatitudeMap @@ -73,6 +70,7 @@ class HttpIntegrationTest(unittest.TestCase): "endTime": "2016-12-01T00:00:00Z" } return params[args[0]] + request_handler_mock = MagicMock() request_handler_mock.get_argument.side_effect = get_argument request = NexusRequestObject(request_handler_mock) http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/tests/algorithms_spark/Matchup_test.py ---------------------------------------------------------------------- diff --git a/analysis/tests/algorithms_spark/Matchup_test.py b/analysis/tests/algorithms_spark/Matchup_test.py index 48271a1..75062d9 100644 --- a/analysis/tests/algorithms_spark/Matchup_test.py +++ b/analysis/tests/algorithms_spark/Matchup_test.py @@ -2,13 +2,11 @@ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ +import pickle import random import timeit import unittest -import pickle -import json -import numpy as np from webservice.algorithms_spark.Matchup import * @@ -308,4 +306,4 @@ class TestMatchup(unittest.TestCase): "lon: %s, lat: %s, time: %s, wind u,v: %s,%s" % (k.longitude, k.latitude, k.time, k.wind_u, k.wind_v), '\n\t\t'.join( ["lon: %s, lat: %s, time: %s, wind u,v: %s,%s" % ( - i.longitude, i.latitude, i.time, i.wind_u, i.wind_v) for i in v])) + i.longitude, i.latitude, i.time, i.wind_u, i.wind_v) for i in v])) http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/tests/algorithms_spark/__init__.py ---------------------------------------------------------------------- diff --git a/analysis/tests/algorithms_spark/__init__.py b/analysis/tests/algorithms_spark/__init__.py index bd9282c..76c37c9 100644 --- a/analysis/tests/algorithms_spark/__init__.py +++ b/analysis/tests/algorithms_spark/__init__.py @@ -1,4 +1,4 @@ """ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved -""" \ No newline at end of file +""" http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/Filtering.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/Filtering.py b/analysis/webservice/Filtering.py index 08b976e..3fa7351 100644 --- a/analysis/webservice/Filtering.py +++ b/analysis/webservice/Filtering.py @@ -3,18 +3,11 @@ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ -import math - import logging import traceback import numpy as np -from scipy import stats -from scipy.fftpack import fft -from scipy.ndimage.interpolation import zoom -from scipy.interpolate import UnivariateSpline -from scipy.signal import wiener, filtfilt, butter, gaussian, freqz -from scipy.ndimage import filters +from scipy.signal import filtfilt, butter log = logging.getLogger('Filtering') http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/GenerateImageMRF.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/GenerateImageMRF.py b/analysis/webservice/GenerateImageMRF.py index 7d2060a..2fd41cb 100644 --- a/analysis/webservice/GenerateImageMRF.py +++ b/analysis/webservice/GenerateImageMRF.py @@ -2,8 +2,8 @@ Copyright (c) 2017 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ -import os import errno +import os from shutil import copyfile from subprocess import call @@ -28,6 +28,7 @@ ANTARCTIC_PROJECTION = 'EPSG:3031' ARCTIC_TILEMATRIXSET = 'EPSG3413_1km' ARCTIC_PROJECTION = 'EPSG:3413' + def create_geo_mrf_header(shortname): header = """<MRF_META> <Raster> @@ -53,6 +54,7 @@ def create_geo_mrf_header(shortname): filename = path + shortname + '-geo.mrf' write_to_file(filename, header) + def create_geo_xml_config(shortname, prefix): config = """<?xml version="1.0" encoding="UTF-8"?> <LayerConfiguration> @@ -82,6 +84,7 @@ def create_geo_xml_config(shortname, prefix): filename = path + shortname + '-geo.xml' write_to_file(filename, config) + def create_arctic_mrf_header(shortname): header = """<MRF_META> <Raster> @@ -107,6 +110,7 @@ def create_arctic_mrf_header(shortname): filename = path + shortname + '-arctic.mrf' write_to_file(filename, header) + def create_arctic_xml_config(shortname, prefix): config = """<?xml version="1.0" encoding="UTF-8"?> <LayerConfiguration> @@ -136,6 +140,7 @@ def create_arctic_xml_config(shortname, prefix): filename = path + shortname + '-arctic.xml' write_to_file(filename, config) + def create_antarctic_mrf_header(shortname): header = """<MRF_META> <Raster> @@ -161,6 +166,7 @@ def create_antarctic_mrf_header(shortname): filename = path + shortname + '-antarctic.mrf' write_to_file(filename, header) + def create_antarctic_xml_config(shortname, prefix): config = """<?xml version="1.0" encoding="UTF-8"?> <LayerConfiguration> @@ -190,6 +196,7 @@ def create_antarctic_xml_config(shortname, prefix): filename = path + shortname + '-antarctic.xml' write_to_file(filename, config) + def geo_to_mrf(intiff, prefix, year, dt, shortname): path = shortname + '/MRF-GEO/' + str(year) create_path(path) @@ -210,6 +217,7 @@ def geo_to_mrf(intiff, prefix, year, dt, shortname): return retcode + def geo_to_arctic_mrf(intiff, prefix, year, dt, shortname): path = shortname + '/MRF-ARCTIC/' + str(year) create_path(path) @@ -227,7 +235,8 @@ def geo_to_arctic_mrf(intiff, prefix, year, dt, shortname): if retcode == 0: print('Reprojecting to Arctic...') retcode = call([gdal_dir + gdalwarp, "-s_srs", geo_wkt_file, "-t_srs", tgt_proj4_north, "-wo", - "SOURCE_EXTRA=125", "-dstnodata", "0", "-of", "GTiff", "-overwrite", subsetnorthtiff, outputnorthtiff]) + "SOURCE_EXTRA=125", "-dstnodata", "0", "-of", "GTiff", "-overwrite", subsetnorthtiff, + outputnorthtiff]) if retcode == 0: print("Creating Arctic MRF...") @@ -235,7 +244,8 @@ def geo_to_arctic_mrf(intiff, prefix, year, dt, shortname): dst = path + '/' + prefix + '_' + str(dt) + "_.ppg" copyfile(src, dst) - retcode = call([gdal_dir + gdal_translate, "-of", "MRF", "-co", "COMPRESS=" + COMPRESSION, "-co", "BLOCKSIZE=512", + retcode = call( + [gdal_dir + gdal_translate, "-of", "MRF", "-co", "COMPRESS=" + COMPRESSION, "-co", "BLOCKSIZE=512", "-outsize", str(OUTWIDTHPOLAR), str(OUTHEIGHTPOLAR), outputnorthtiff, output]) if retcode == 0: @@ -244,6 +254,7 @@ def geo_to_arctic_mrf(intiff, prefix, year, dt, shortname): return retcode + def geo_to_antarctic_mrf(intiff, prefix, year, dt, shortname, interp): if (interp == "") or (interp is None): interp = "near" @@ -264,7 +275,8 @@ def geo_to_antarctic_mrf(intiff, prefix, year, dt, shortname, interp): if retcode == 0: print("Reprojecting to Antarctic...") retcode = call([gdal_dir + gdalwarp, "-s_srs", geo_wkt_file, "-t_srs", tgt_proj4_south, "-wo", - "SOURCE_EXTRA=125", "-r", interp, "-dstnodata", "0", "-of", "GTiff", "-overwrite", subsetsouthtiff, + "SOURCE_EXTRA=125", "-r", interp, "-dstnodata", "0", "-of", "GTiff", "-overwrite", + subsetsouthtiff, outputsouthtiff]) if retcode == 0: @@ -273,8 +285,9 @@ def geo_to_antarctic_mrf(intiff, prefix, year, dt, shortname, interp): dst = path + '/' + prefix + '_' + str(dt) + "_.ppg" copyfile(src, dst) - retcode = call([gdal_dir + gdal_translate, "-of", "MRF", "-co", "COMPRESS=" + COMPRESSION, "-co", "BLOCKSIZE=512", - "-r", interp, "-outsize", str(OUTWIDTHPOLAR), str(OUTHEIGHTPOLAR), outputsouthtiff, output]) + retcode = call( + [gdal_dir + gdal_translate, "-of", "MRF", "-co", "COMPRESS=" + COMPRESSION, "-co", "BLOCKSIZE=512", + "-r", interp, "-outsize", str(OUTWIDTHPOLAR), str(OUTHEIGHTPOLAR), outputsouthtiff, output]) if retcode == 0: print("Creating Antarctic Tiles...") @@ -282,6 +295,7 @@ def geo_to_antarctic_mrf(intiff, prefix, year, dt, shortname, interp): return retcode + def write_to_file(filename, data): try: f = open(filename, 'w') @@ -290,6 +304,7 @@ def write_to_file(filename, data): except Exception as e: print("Error creating " + filename + ":\n" + str(e)) + def create_path(path): try: os.makedirs(path) @@ -297,6 +312,7 @@ def create_path(path): if e.errno != errno.EEXIST: raise + def create_all(shortname, prefix): create_geo_mrf_header(shortname) create_geo_xml_config(shortname, prefix) @@ -305,7 +321,8 @@ def create_all(shortname, prefix): create_antarctic_mrf_header(shortname) create_antarctic_xml_config(shortname, prefix) + def png_to_tif(input, output): retcode = call([gdal_dir + gdal_translate, "-of", "GTiff", "-ot", "byte", "-a_ullr", "-180", "90", "180", "-90", input, output]) - return retcode \ No newline at end of file + return retcode http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/LayerConfig.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/LayerConfig.py b/analysis/webservice/LayerConfig.py index e271b72..12f66d5 100644 --- a/analysis/webservice/LayerConfig.py +++ b/analysis/webservice/LayerConfig.py @@ -3,37 +3,35 @@ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ - ALL_LAYERS_ENABLED = True LAYERS = [] -LAYERS.append({"shortName":"NCDC-L4LRblend-GLOB-AVHRR_OI", "envs": ("ALL",)}) -LAYERS.append({"shortName":"SSH_alti_1deg_1mon", "envs": ("ALL",)}) - - -LAYERS.append({"shortName":"SIacSubl_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"PHIBOT_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"SIhsnow_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"SIheff_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"oceFWflx_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"oceQnet_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"MXLDEPTH_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"SIatmQnt_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"oceSPflx_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"oceSPDep_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"SIarea_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"ETAN_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"sIceLoad_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"oceQsw_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"SIsnPrcp_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"DETADT2_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"TFLUX_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"SItflux_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"SFLUX_ECCO_version4_release1", "envs": ("ALL",)}) -LAYERS.append({"shortName":"TELLUS_GRACE_MASCON_GRID_RL05_V1_LAND", "envs": ("ALL",)}) -LAYERS.append({"shortName":"TELLUS_GRACE_MASCON_GRID_RL05_V1_OCEAN", "envs": ("ALL",)}) -LAYERS.append({"shortName":"Sea_Surface_Anomalies", "envs": ("DEV",)}) - -LAYERS.append({"shortName":"JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "NCDC-L4LRblend-GLOB-AVHRR_OI", "envs": ("ALL",)}) +LAYERS.append({"shortName": "SSH_alti_1deg_1mon", "envs": ("ALL",)}) + +LAYERS.append({"shortName": "SIacSubl_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "PHIBOT_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "SIhsnow_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "SIheff_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "oceFWflx_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "oceQnet_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "MXLDEPTH_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "SIatmQnt_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "oceSPflx_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "oceSPDep_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "SIarea_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "ETAN_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "sIceLoad_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "oceQsw_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "SIsnPrcp_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "DETADT2_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "TFLUX_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "SItflux_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "SFLUX_ECCO_version4_release1", "envs": ("ALL",)}) +LAYERS.append({"shortName": "TELLUS_GRACE_MASCON_GRID_RL05_V1_LAND", "envs": ("ALL",)}) +LAYERS.append({"shortName": "TELLUS_GRACE_MASCON_GRID_RL05_V1_OCEAN", "envs": ("ALL",)}) +LAYERS.append({"shortName": "Sea_Surface_Anomalies", "envs": ("DEV",)}) + +LAYERS.append({"shortName": "JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1", "envs": ("ALL",)}) def isLayerEnabled(shortName, env): @@ -51,7 +49,6 @@ def isLayerEnabled(shortName, env): if __name__ == "__main__": - print isLayerEnabled("NCDC-L4LRblend-GLOB-AVHRR_OI", None) print isLayerEnabled("NCDC-L4LRblend-GLOB-AVHRR_OI", "PROD") print isLayerEnabled("NCDC-L4LRblend-GLOB-AVHRR_OI", "SIT") http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/NexusHandler.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/NexusHandler.py b/analysis/webservice/NexusHandler.py index 1cad3cf..c51577e 100644 --- a/analysis/webservice/NexusHandler.py +++ b/analysis/webservice/NexusHandler.py @@ -2,14 +2,14 @@ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ -import sys -import numpy as np import logging import time import types -from datetime import datetime + +import numpy as np from netCDF4 import Dataset from nexustiles.nexustiles import NexusTileService + from webservice.webmodel import NexusProcessingException AVAILABLE_HANDLERS = [] @@ -216,7 +216,7 @@ class NexusHandler(CalcHandler): for entry in resultsData: - #frmtdTime = datetime.fromtimestamp(entry["time"] ).strftime("%Y-%m") + # frmtdTime = datetime.fromtimestamp(entry["time"] ).strftime("%Y-%m") frmtdTime = entry["time"] if not frmtdTime in resultsMap: @@ -323,7 +323,7 @@ class SparkHandler(NexusHandler): self._spark_nparts = spark_nparts def _find_global_tile_set(self): - if type(self._ds) in (list,tuple): + if type(self._ds) in (list, tuple): ds = self._ds[0] else: ds = self._ds @@ -332,15 +332,17 @@ class SparkHandler(NexusHandler): # Temporary workaround until we have dataset metadata to indicate # temporal resolution. if "monthly" in ds.lower(): - t_incr = 2592000 # 30 days + t_incr = 2592000 # 30 days else: - t_incr = 86400 # 1 day + t_incr = 86400 # 1 day ################################################################## t = self._endTime self._latRes = None self._lonRes = None while ntiles == 0: - nexus_tiles = self._tile_service.get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon, self._maxLon, ds=ds, start_time=t-t_incr, end_time=t) + nexus_tiles = self._tile_service.get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon, + self._maxLon, ds=ds, start_time=t - t_incr, + end_time=t) ntiles = len(nexus_tiles) self.log.debug('find_global_tile_set got {0} tiles'.format(ntiles)) if ntiles > 0: @@ -351,13 +353,13 @@ class SparkHandler(NexusHandler): if self._latRes is None: lats = tile.latitudes.data if (len(lats) > 1): - self._latRes = abs(lats[1]-lats[0]) + self._latRes = abs(lats[1] - lats[0]) if self._lonRes is None: lons = tile.longitudes.data if (len(lons) > 1): - self._lonRes = abs(lons[1]-lons[0]) - if ((self._latRes is not None) and - (self._lonRes is not None)): + self._lonRes = abs(lons[1] - lons[0]) + if ((self._latRes is not None) and + (self._lonRes is not None)): break if (self._latRes is None) or (self._lonRes is None): ntiles = 0 @@ -393,96 +395,96 @@ class SparkHandler(NexusHandler): self.log.warn('Nothing in this tile!') bounds = None return bounds - + @staticmethod - def query_by_parts(tile_service, min_lat, max_lat, min_lon, max_lon, + def query_by_parts(tile_service, min_lat, max_lat, min_lon, max_lon, dataset, start_time, end_time, part_dim=0): nexus_max_tiles_per_query = 100 - #print 'trying query: ',min_lat, max_lat, min_lon, max_lon, \ + # print 'trying query: ',min_lat, max_lat, min_lon, max_lon, \ # dataset, start_time, end_time try: tiles = \ - tile_service.find_tiles_in_box(min_lat, max_lat, - min_lon, max_lon, - dataset, - start_time=start_time, + tile_service.find_tiles_in_box(min_lat, max_lat, + min_lon, max_lon, + dataset, + start_time=start_time, end_time=end_time, fetch_data=False) - assert(len(tiles) <= nexus_max_tiles_per_query) + assert (len(tiles) <= nexus_max_tiles_per_query) except: - #print 'failed query: ',min_lat, max_lat, min_lon, max_lon, \ + # print 'failed query: ',min_lat, max_lat, min_lon, max_lon, \ # dataset, start_time, end_time - if part_dim == 0: + if part_dim == 0: # Partition by latitude. mid_lat = (min_lat + max_lat) / 2 - nexus_tiles = SparkHandler.query_by_parts(tile_service, - min_lat, mid_lat, - min_lon, max_lon, - dataset, + nexus_tiles = SparkHandler.query_by_parts(tile_service, + min_lat, mid_lat, + min_lon, max_lon, + dataset, start_time, end_time, part_dim=part_dim) - nexus_tiles.extend(SparkHandler.query_by_parts(tile_service, - mid_lat, - max_lat, - min_lon, - max_lon, - dataset, - start_time, + nexus_tiles.extend(SparkHandler.query_by_parts(tile_service, + mid_lat, + max_lat, + min_lon, + max_lon, + dataset, + start_time, end_time, part_dim=part_dim)) - elif part_dim == 1: + elif part_dim == 1: # Partition by longitude. mid_lon = (min_lon + max_lon) / 2 - nexus_tiles = SparkHandler.query_by_parts(tile_service, - min_lat, max_lat, - min_lon, mid_lon, - dataset, + nexus_tiles = SparkHandler.query_by_parts(tile_service, + min_lat, max_lat, + min_lon, mid_lon, + dataset, start_time, end_time, part_dim=part_dim) - nexus_tiles.extend(SparkHandler.query_by_parts(tile_service, - min_lat, - max_lat, - mid_lon, - max_lon, - dataset, - start_time, + nexus_tiles.extend(SparkHandler.query_by_parts(tile_service, + min_lat, + max_lat, + mid_lon, + max_lon, + dataset, + start_time, end_time, part_dim=part_dim)) elif part_dim == 2: # Partition by time. mid_time = (start_time + end_time) / 2 - nexus_tiles = SparkHandler.query_by_parts(tile_service, - min_lat, max_lat, - min_lon, max_lon, - dataset, + nexus_tiles = SparkHandler.query_by_parts(tile_service, + min_lat, max_lat, + min_lon, max_lon, + dataset, start_time, mid_time, part_dim=part_dim) - nexus_tiles.extend(SparkHandler.query_by_parts(tile_service, - min_lat, - max_lat, - min_lon, - max_lon, - dataset, - mid_time, + nexus_tiles.extend(SparkHandler.query_by_parts(tile_service, + min_lat, + max_lat, + min_lon, + max_lon, + dataset, + mid_time, end_time, part_dim=part_dim)) else: # No exception, so query Cassandra for the tile data. - #print 'Making NEXUS query to Cassandra for %d tiles...' % \ + # print 'Making NEXUS query to Cassandra for %d tiles...' % \ # len(tiles) - #t1 = time.time() - #print 'NEXUS call start at time %f' % t1 - #sys.stdout.flush() + # t1 = time.time() + # print 'NEXUS call start at time %f' % t1 + # sys.stdout.flush() nexus_tiles = list(tile_service.fetch_data_for_tiles(*tiles)) nexus_tiles = list(tile_service.mask_tiles_to_bbox(min_lat, max_lat, min_lon, max_lon, nexus_tiles)) - #t2 = time.time() - #print 'NEXUS call end at time %f' % t2 - #print 'Seconds in NEXUS call: ', t2-t1 - #sys.stdout.flush() + # t2 = time.time() + # print 'NEXUS call end at time %f' % t2 + # print 'Seconds in NEXUS call: ', t2-t1 + # sys.stdout.flush() - #print 'Returning %d tiles' % len(nexus_tiles) + # print 'Returning %d tiles' % len(nexus_tiles) return nexus_tiles @staticmethod @@ -491,17 +493,17 @@ class SparkHandler(NexusHandler): for i in np.flipud(del_ind): del nexus_tiles[i] - def _lat2ind(self,lat): - return int((lat-self._minLatCent)/self._latRes) + def _lat2ind(self, lat): + return int((lat - self._minLatCent) / self._latRes) - def _lon2ind(self,lon): - return int((lon-self._minLonCent)/self._lonRes) + def _lon2ind(self, lon): + return int((lon - self._minLonCent) / self._lonRes) - def _ind2lat(self,y): - return self._minLatCent+y*self._latRes + def _ind2lat(self, y): + return self._minLatCent + y * self._latRes - def _ind2lon(self,x): - return self._minLonCent+x*self._lonRes + def _ind2lon(self, x): + return self._minLonCent + x * self._lonRes def _create_nc_file_time1d(self, a, fname, varname, varunits=None, fill=None): @@ -531,12 +533,12 @@ class SparkHandler(NexusHandler): rootgrp.createDimension("lat", lat_dim) rootgrp.createDimension("lon", lon_dim) vals = rootgrp.createVariable(varname, "f4", - dimensions=("lat","lon",), + dimensions=("lat", "lon",), fill_value=fill) lats = rootgrp.createVariable("lat", "f4", dimensions=("lat",)) lons = rootgrp.createVariable("lon", "f4", dimensions=("lon",)) - vals[:,:] = a - lats[:] = np.linspace(self._minLatCent, + vals[:, :] = a + lats[:] = np.linspace(self._minLatCent, self._maxLatCent, lat_dim) lons[:] = np.linspace(self._minLonCent, self._maxLonCent, lon_dim) http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/WorkflowDriver.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/WorkflowDriver.py b/analysis/webservice/WorkflowDriver.py index bb42927..9aebd2c 100644 --- a/analysis/webservice/WorkflowDriver.py +++ b/analysis/webservice/WorkflowDriver.py @@ -1,6 +1,8 @@ import argparse + from algorithms.MapFetchHandler import MapFetchHandler + def start(args): dataset_shortname = args.ds granule_name = args.g @@ -16,6 +18,7 @@ def start(args): map = MapFetchHandler() map.generate(dataset_shortname, granule_name, prefix, ct, interp, _min, _max, width, height, time_interval) + def parse_args(): parser = argparse.ArgumentParser(description='Automate NEXUS ingestion workflow', formatter_class=argparse.ArgumentDefaultsHelpFormatter) @@ -62,6 +65,7 @@ def parse_args(): return parser.parse_args() + if __name__ == "__main__": the_args = parse_args() - start(the_args) \ No newline at end of file + start(the_args) http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/ColorBarHandler.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/ColorBarHandler.py b/analysis/webservice/algorithms/ColorBarHandler.py index 9509f58..71ffcc1 100644 --- a/analysis/webservice/algorithms/ColorBarHandler.py +++ b/analysis/webservice/algorithms/ColorBarHandler.py @@ -3,10 +3,10 @@ import math import time import numpy as np -from webservice.NexusHandler import NexusHandler as BaseHandler -from webservice.NexusHandler import nexus_handler import colortables +from webservice.NexusHandler import NexusHandler as BaseHandler +from webservice.NexusHandler import nexus_handler @nexus_handler @@ -46,7 +46,6 @@ class ColorBarHandler(BaseHandler): def __init__(self): BaseHandler.__init__(self) - def __get_dataset_minmax(self, ds, dataTime): dataTimeStart = dataTime - 86400.0 # computeOptions.get_datetime_arg("t", None) dataTimeEnd = dataTime @@ -137,4 +136,3 @@ class ColorBarHandler(BaseHandler): return json.dumps(obj, indent=4) return SimpleResult() - http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/CorrelationMap.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/CorrelationMap.py b/analysis/webservice/algorithms/CorrelationMap.py index 5064a7f..e96bbdf 100644 --- a/analysis/webservice/algorithms/CorrelationMap.py +++ b/analysis/webservice/algorithms/CorrelationMap.py @@ -4,13 +4,15 @@ California Institute of Technology. All rights reserved """ import json import math +from itertools import groupby + import numpy as np -from shapely.geometry import box +from nexustiles.model.nexusmodel import get_approximate_value_for_lat_lon from scipy.stats import linregress -from itertools import groupby +from shapely.geometry import box + from webservice.NexusHandler import NexusHandler, nexus_handler, DEFAULT_PARAMETERS_SPEC from webservice.webmodel import NexusProcessingException, NexusResults -from nexustiles.model.nexusmodel import get_approximate_value_for_lat_lon @nexus_handler @@ -53,16 +55,16 @@ class LongitudeLatitudeMapHandlerImpl(NexusHandler): raise NexusProcessingException(reason="Could not find any data temporally co-located") results = [[{ - 'cnt': 0, - 'slope': 0, - 'intercept': 0, - 'r': 0, - 'p': 0, - 'stderr': 0, - 'lat': float(lat), - 'lon': float(lon) - } for lon in np.arange(minLon, maxLon, resolution)] for lat in - np.arange(minLat, maxLat, resolution)] + 'cnt': 0, + 'slope': 0, + 'intercept': 0, + 'r': 0, + 'p': 0, + 'stderr': 0, + 'lat': float(lat), + 'lon': float(lon) + } for lon in np.arange(minLon, maxLon, resolution)] for lat in + np.arange(minLat, maxLat, resolution)] for stats in results: for stat in stats: http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/DailyDifferenceAverage.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/DailyDifferenceAverage.py b/analysis/webservice/algorithms/DailyDifferenceAverage.py index 1518c9e..67d9b2d 100644 --- a/analysis/webservice/algorithms/DailyDifferenceAverage.py +++ b/analysis/webservice/algorithms/DailyDifferenceAverage.py @@ -6,11 +6,11 @@ import sys import traceback from datetime import datetime, timedelta from multiprocessing.dummy import Pool, Manager -from shapely.geometry import box import numpy as np import pytz from nexustiles.nexustiles import NexusTileService, NexusTileServiceException +from shapely.geometry import box from webservice.NexusHandler import NexusHandler, nexus_handler from webservice.webmodel import NexusResults, NexusProcessingException http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/DataInBoundsSearch.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/DataInBoundsSearch.py b/analysis/webservice/algorithms/DataInBoundsSearch.py index f9aa609..5399049 100644 --- a/analysis/webservice/algorithms/DataInBoundsSearch.py +++ b/analysis/webservice/algorithms/DataInBoundsSearch.py @@ -4,9 +4,10 @@ California Institute of Technology. All rights reserved """ import logging from datetime import datetime + from pytz import timezone -from webservice.NexusHandler import NexusHandler, nexus_handler, DEFAULT_PARAMETERS_SPEC +from webservice.NexusHandler import NexusHandler, nexus_handler from webservice.webmodel import NexusResults, NexusProcessingException EPOCH = timezone('UTC').localize(datetime(1970, 1, 1)) http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/DataSeriesList.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/DataSeriesList.py b/analysis/webservice/algorithms/DataSeriesList.py index 399742a..10ca4a6 100644 --- a/analysis/webservice/algorithms/DataSeriesList.py +++ b/analysis/webservice/algorithms/DataSeriesList.py @@ -3,6 +3,7 @@ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ import json + from webservice.NexusHandler import NexusHandler from webservice.NexusHandler import nexus_handler from webservice.webmodel import cached http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/Heartbeat.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/Heartbeat.py b/analysis/webservice/algorithms/Heartbeat.py index a462739..7156610 100644 --- a/analysis/webservice/algorithms/Heartbeat.py +++ b/analysis/webservice/algorithms/Heartbeat.py @@ -4,8 +4,7 @@ California Institute of Technology. All rights reserved """ import json -from webservice.NexusHandler import NexusHandler, nexus_handler, AVAILABLE_HANDLERS -from webservice.webmodel import NexusResults +from webservice.NexusHandler import NexusHandler, nexus_handler @nexus_handler http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/LongitudeLatitudeMap.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/LongitudeLatitudeMap.py b/analysis/webservice/algorithms/LongitudeLatitudeMap.py index be9123e..6a2ceeb 100644 --- a/analysis/webservice/algorithms/LongitudeLatitudeMap.py +++ b/analysis/webservice/algorithms/LongitudeLatitudeMap.py @@ -127,19 +127,19 @@ class LongitudeLatitudeMapHandlerImpl(NexusHandler): # ((lon, lat), (slope, intercept, r_value, p_value, std_err, mean, pmax, pmin, pstd, pcnt)) return [{ - 'lon': result[0][0], - 'lat': result[0][1], - 'slope': result[1][0] if not math.isnan(result[1][0]) else 'NaN', - 'intercept': result[1][1] if not math.isnan(result[1][1]) else 'NaN', - 'r': result[1][2], - 'p': result[1][3], - 'stderr': result[1][4] if not math.isinf(result[1][4]) else 'Inf', - 'avg': result[1][5], - 'max': result[1][6], - 'min': result[1][7], - 'std': result[1][8], - 'cnt': result[1][9], - } for result in results] + 'lon': result[0][0], + 'lat': result[0][1], + 'slope': result[1][0] if not math.isnan(result[1][0]) else 'NaN', + 'intercept': result[1][1] if not math.isnan(result[1][1]) else 'NaN', + 'r': result[1][2], + 'p': result[1][3], + 'stderr': result[1][4] if not math.isinf(result[1][4]) else 'Inf', + 'avg': result[1][5], + 'max': result[1][6], + 'min': result[1][7], + 'std': result[1][8], + 'cnt': result[1][9], + } for result in results] def pool_initializer(): http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/MapFetchHandler.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/MapFetchHandler.py b/analysis/webservice/algorithms/MapFetchHandler.py index 1c42532..5c90389 100644 --- a/analysis/webservice/algorithms/MapFetchHandler.py +++ b/analysis/webservice/algorithms/MapFetchHandler.py @@ -1,23 +1,25 @@ +import calendar +import errno import io import json import math +import os import time -import errno -import calendar from subprocess import call -import webservice.GenerateImageMRF as MRF + +import boto3 import numpy as np from PIL import Image from PIL import ImageDraw from PIL import ImageFont from dateutil.relativedelta import * -import os -import boto3 -import colortables +import colortables +import webservice.GenerateImageMRF as MRF from webservice.NexusHandler import NexusHandler as BaseHandler from webservice.NexusHandler import nexus_handler + @nexus_handler class MapFetchHandler(BaseHandler): name = "MapFetchHandler" @@ -88,7 +90,6 @@ class MapFetchHandler(BaseHandler): for x in range(0, width): value = d[y][x] if not np.isnan(value) and value != 0: - lat = tile.latitudes[y] lon = tile.longitudes[x] @@ -126,7 +127,8 @@ class MapFetchHandler(BaseHandler): return x_res, y_res @staticmethod - def __create_global(nexus_tiles, stats, width=2048, height=1024, force_min=np.nan, force_max=np.nan, table=colortables.grayscale, interpolation="nearest"): + def __create_global(nexus_tiles, stats, width=2048, height=1024, force_min=np.nan, force_max=np.nan, + table=colortables.grayscale, interpolation="nearest"): data_min = stats["minValue"] if np.isnan(force_min) else force_min data_max = stats["maxValue"] if np.isnan(force_max) else force_max @@ -289,7 +291,8 @@ class MapFetchHandler(BaseHandler): while start_time <= end_time: one_interval_later = start_time + time_interval temp_end_time = one_interval_later - relativedelta(minutes=+1) # prevent getting tiles for 2 intervals - ds1_nexus_tiles = self._tile_service.find_tiles_in_box(-90.0, 90.0, -180.0, 180.0, ds, start_time, temp_end_time) + ds1_nexus_tiles = self._tile_service.find_tiles_in_box(-90.0, 90.0, -180.0, 180.0, ds, start_time, + temp_end_time) if ds1_nexus_tiles is not None: img = self.__create_global(ds1_nexus_tiles, stats, width, height, force_min, force_max, color_table, @@ -342,4 +345,4 @@ class MapFetchHandler(BaseHandler): except Exception as e: print("Unable to add tar.gz to S3: \n" + str(e)) - call(["rm", "-rf", tar_file]) # Delete the tar.gz from local storage \ No newline at end of file + call(["rm", "-rf", tar_file]) # Delete the tar.gz from local storage http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/TestInitializer.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/TestInitializer.py b/analysis/webservice/algorithms/TestInitializer.py index 3f47c6a..e6a8ba1 100644 --- a/analysis/webservice/algorithms/TestInitializer.py +++ b/analysis/webservice/algorithms/TestInitializer.py @@ -3,8 +3,8 @@ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ -from webservice.NexusHandler import NexusHandler, nexus_initializer -from nexustiles.nexustiles import NexusTileService +from webservice.NexusHandler import nexus_initializer + @nexus_initializer class TestInitializer: @@ -13,4 +13,4 @@ class TestInitializer: pass def init(self, config): - print "*** TEST INITIALIZATION ***" \ No newline at end of file + print "*** TEST INITIALIZATION ***" http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/TileSearch.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/TileSearch.py b/analysis/webservice/algorithms/TileSearch.py index d1fade8..4550803 100644 --- a/analysis/webservice/algorithms/TileSearch.py +++ b/analysis/webservice/algorithms/TileSearch.py @@ -2,7 +2,7 @@ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ -from webservice.NexusHandler import NexusHandler, nexus_handler +from webservice.NexusHandler import NexusHandler from webservice.webmodel import NexusResults http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/TimeAvgMap.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/TimeAvgMap.py b/analysis/webservice/algorithms/TimeAvgMap.py index 5c76604..76a3dd2 100644 --- a/analysis/webservice/algorithms/TimeAvgMap.py +++ b/analysis/webservice/algorithms/TimeAvgMap.py @@ -10,16 +10,16 @@ pyximport.install() import sys import numpy as np from time import time -from webservice.NexusHandler import NexusHandler, nexus_handler, DEFAULT_PARAMETERS_SPEC +from webservice.NexusHandler import NexusHandler, DEFAULT_PARAMETERS_SPEC from webservice.webmodel import NexusResults, NoDataException from netCDF4 import Dataset -#from mpl_toolkits.basemap import Basemap + +# from mpl_toolkits.basemap import Basemap # @nexus_handler class TimeAvgMapHandlerImpl(NexusHandler): - name = "Time Average Map" path = "/timeAvgMap" description = "Computes a Latitude/Longitude Time Average plot given an arbitrary geographical area and time range" @@ -31,13 +31,15 @@ class TimeAvgMapHandlerImpl(NexusHandler): def _find_native_resolution(self): # Get a quick set of tiles (1 degree at center of box) at 1 time stamp - midLat = (self._minLat+self._maxLat)/2 - midLon = (self._minLon+self._maxLon)/2 + midLat = (self._minLat + self._maxLat) / 2 + midLon = (self._minLon + self._maxLon) / 2 ntiles = 0 t = self._endTime t_incr = 86400 while ntiles == 0: - nexus_tiles = self._tile_service.get_tiles_bounded_by_box(midLat-0.5, midLat+0.5, midLon-0.5, midLon+0.5, ds=self._ds, start_time=t-t_incr, end_time=t) + nexus_tiles = self._tile_service.get_tiles_bounded_by_box(midLat - 0.5, midLat + 0.5, midLon - 0.5, + midLon + 0.5, ds=self._ds, start_time=t - t_incr, + end_time=t) ntiles = len(nexus_tiles) print 'find_native_res: got %d tiles' % len(nexus_tiles) sys.stdout.flush() @@ -66,7 +68,9 @@ class TimeAvgMapHandlerImpl(NexusHandler): t = self._endTime t_incr = 86400 while ntiles == 0: - nexus_tiles = self._tile_service.get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon, self._maxLon, ds=self._ds, start_time=t-t_incr, end_time=t) + nexus_tiles = self._tile_service.get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon, + self._maxLon, ds=self._ds, start_time=t - t_incr, + end_time=t) ntiles = len(nexus_tiles) print 'find_global_tile_set got %d tiles' % ntiles sys.stdout.flush() @@ -78,8 +82,8 @@ class TimeAvgMapHandlerImpl(NexusHandler): for i in np.flipud(del_ind): del nexus_tiles[i] - #@staticmethod - #def _map(tile_in): + # @staticmethod + # def _map(tile_in): def _map(self, tile_in): print 'Started tile %s' % tile_in.section_spec print 'tile lats = ', tile_in.latitudes @@ -99,7 +103,7 @@ class TimeAvgMapHandlerImpl(NexusHandler): max_y = np.max(good_inds_lat) min_x = np.min(good_inds_lon) max_x = np.max(good_inds_lon) - tile_inbounds_shape = (max_y-min_y+1, max_x-min_x+1) + tile_inbounds_shape = (max_y - min_y + 1, max_x - min_x + 1) days_at_a_time = 90 t_incr = 86400 * days_at_a_time avg_tile = np.ma.array(np.zeros(tile_inbounds_shape, @@ -108,27 +112,31 @@ class TimeAvgMapHandlerImpl(NexusHandler): dtype=np.uint32)) t_start = self._startTime while t_start <= self._endTime: - t_end = min(t_start+t_incr,self._endTime) + t_end = min(t_start + t_incr, self._endTime) t1 = time() print 'nexus call start at time %f' % t1 sys.stdout.flush() - nexus_tiles = self._tile_service.get_tiles_bounded_by_box(min_lat-self._latRes/2, max_lat+self._latRes/2, min_lon-self._lonRes/2, max_lon+self._lonRes/2, ds=self._ds, start_time=t_start, end_time=t_end) + nexus_tiles = self._tile_service.get_tiles_bounded_by_box(min_lat - self._latRes / 2, + max_lat + self._latRes / 2, + min_lon - self._lonRes / 2, + max_lon + self._lonRes / 2, ds=self._ds, + start_time=t_start, end_time=t_end) t2 = time() print 'nexus call end at time %f' % t2 - print 'secs in nexus call: ', t2-t1 + print 'secs in nexus call: ', t2 - t1 sys.stdout.flush() self._prune_tiles(nexus_tiles) - print 't %d to %d - Got %d tiles' % (t_start, t_end, + print 't %d to %d - Got %d tiles' % (t_start, t_end, len(nexus_tiles)) sys.stdout.flush() for tile in nexus_tiles: - tile.data.data[:,:] = np.nan_to_num(tile.data.data) - avg_tile.data[:,:] += tile.data[0, - min_y:max_y+1, - min_x:max_x+1] - cnt_tile.data[:,:] += (~tile.data.mask[0, - min_y:max_y+1, - min_x:max_x+1]).astype(np.uint8) + tile.data.data[:, :] = np.nan_to_num(tile.data.data) + avg_tile.data[:, :] += tile.data[0, + min_y:max_y + 1, + min_x:max_x + 1] + cnt_tile.data[:, :] += (~tile.data.mask[0, + min_y:max_y + 1, + min_x:max_x + 1]).astype(np.uint8) t_start = t_end + 1 print 'cnt_tile = ', cnt_tile @@ -146,16 +154,16 @@ class TimeAvgMapHandlerImpl(NexusHandler): max_lon = None print 'Tile %s outside of bounding box' % tile_in.section_spec sys.stdout.flush() - return (avg_tile,min_lat,max_lat,min_lon,max_lon) + return (avg_tile, min_lat, max_lat, min_lon, max_lon) - def _lat2ind(self,lat): - return int((lat-self._minLatCent)/self._latRes) + def _lat2ind(self, lat): + return int((lat - self._minLatCent) / self._latRes) - def _lon2ind(self,lon): - return int((lon-self._minLonCent)/self._lonRes) + def _lon2ind(self, lon): + return int((lon - self._minLonCent) / self._lonRes) def _create_nc_file(self, a): - print 'a=',a + print 'a=', a print 'shape a = ', a.shape sys.stdout.flush() lat_dim, lon_dim = a.shape @@ -163,11 +171,11 @@ class TimeAvgMapHandlerImpl(NexusHandler): rootgrp.createDimension("lat", lat_dim) rootgrp.createDimension("lon", lon_dim) rootgrp.createVariable("TRMM_3B42_daily_precipitation_V7", "f4", - dimensions=("lat","lon",)) + dimensions=("lat", "lon",)) rootgrp.createVariable("lat", "f4", dimensions=("lat",)) rootgrp.createVariable("lon", "f4", dimensions=("lon",)) - rootgrp.variables["TRMM_3B42_daily_precipitation_V7"][:,:] = a - rootgrp.variables["lat"][:] = np.linspace(self._minLatCent, + rootgrp.variables["TRMM_3B42_daily_precipitation_V7"][:, :] = a + rootgrp.variables["lat"][:] = np.linspace(self._minLatCent, self._maxLatCent, lat_dim) rootgrp.variables["lon"][:] = np.linspace(self._minLonCent, self._maxLonCent, lon_dim) @@ -193,17 +201,17 @@ class TimeAvgMapHandlerImpl(NexusHandler): print 'Using Native resolution: lat_res=%f, lon_res=%f' % (self._latRes, self._lonRes) self._minLatCent = self._minLat + self._latRes / 2 self._minLonCent = self._minLon + self._lonRes / 2 - nlats = int((self._maxLat-self._minLatCent)/self._latRes)+1 - nlons = int((self._maxLon-self._minLonCent)/self._lonRes)+1 - self._maxLatCent = self._minLatCent + (nlats-1) * self._latRes - self._maxLonCent = self._minLonCent + (nlons-1) * self._lonRes - print 'nlats=',nlats,'nlons=',nlons - print 'center lat range = %f to %f' % (self._minLatCent, + nlats = int((self._maxLat - self._minLatCent) / self._latRes) + 1 + nlons = int((self._maxLon - self._minLonCent) / self._lonRes) + 1 + self._maxLatCent = self._minLatCent + (nlats - 1) * self._latRes + self._maxLonCent = self._minLonCent + (nlons - 1) * self._lonRes + print 'nlats=', nlats, 'nlons=', nlons + print 'center lat range = %f to %f' % (self._minLatCent, self._maxLatCent) - print 'center lon range = %f to %f' % (self._minLonCent, + print 'center lon range = %f to %f' % (self._minLonCent, self._maxLonCent) sys.stdout.flush() - a = np.zeros((nlats, nlons),dtype=np.float64,order='C') + a = np.zeros((nlats, nlons), dtype=np.float64, order='C') nexus_tiles = self._find_global_tile_set() # print 'tiles:' @@ -222,7 +230,7 @@ class TimeAvgMapHandlerImpl(NexusHandler): self._prune_tiles(nexus_tiles) print 'Pruned to %d tiles' % len(nexus_tiles) sys.stdout.flush() - #for tile in nexus_tiles: + # for tile in nexus_tiles: # print 'lats: ', tile.latitudes.compressed() # print 'lons: ', tile.longitudes.compressed() @@ -233,7 +241,7 @@ class TimeAvgMapHandlerImpl(NexusHandler): # with the time avg map data and lat-lon bounding box. for tile in avg_tiles: if tile is not None: - (tile_data, tile_min_lat, tile_max_lat, + (tile_data, tile_min_lat, tile_max_lat, tile_min_lon, tile_max_lon) = tile print 'shape tile_data = ', tile_data.shape print 'tile data mask = ', tile_data.mask @@ -244,14 +252,14 @@ class TimeAvgMapHandlerImpl(NexusHandler): x0 = self._lon2ind(tile_min_lon) x1 = self._lon2ind(tile_max_lon) print 'writing tile lat %f-%f, lon %f-%f, map y %d-%d, map x %d-%d' % \ - (tile_min_lat, tile_max_lat, - tile_min_lon, tile_max_lon, y0, y1, x0, x1) + (tile_min_lat, tile_max_lat, + tile_min_lon, tile_max_lon, y0, y1, x0, x1) sys.stdout.flush() - a[y0:y1+1,x0:x1+1] = tile_data + a[y0:y1 + 1, x0:x1 + 1] = tile_data else: print 'All pixels masked in tile lat %f-%f, lon %f-%f, map y %d-%d, map x %d-%d' % \ - (tile_min_lat, tile_max_lat, - tile_min_lon, tile_max_lon, y0, y1, x0, x1) + (tile_min_lat, tile_max_lat, + tile_min_lon, tile_max_lon, y0, y1, x0, x1) sys.stdout.flush() self._create_nc_file(a) http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/TimeSeries.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/TimeSeries.py b/analysis/webservice/algorithms/TimeSeries.py index 858fea4..c7eba5c 100644 --- a/analysis/webservice/algorithms/TimeSeries.py +++ b/analysis/webservice/algorithms/TimeSeries.py @@ -270,11 +270,11 @@ class TimeSeriesHandlerImpl(NexusHandler): if apply_seasonal_cycle_filter and apply_low_pass_filter: try: filtering.applyFiltersOnField(results, 'meanSeasonal', applySeasonal=False, applyLowPass=True, - append="LowPass") + append="LowPass") filtering.applyFiltersOnField(results, 'minSeasonal', applySeasonal=False, applyLowPass=True, - append="LowPass") + append="LowPass") filtering.applyFiltersOnField(results, 'maxSeasonal', applySeasonal=False, applyLowPass=True, - append="LowPass") + append="LowPass") except Exception as e: # If it doesn't work log the error but ignore it tb = traceback.format_exc() @@ -325,7 +325,7 @@ class TimeSeriesHandlerImpl(NexusHandler): # Border tiles need have the data loaded, masked, and stats recalculated border_tiles = list(self._tile_service.fetch_data_for_tiles(*border_tiles)) - border_tiles = self._tile_service.mask_tiles_to_polygon(bounding_polygon, border_tiles) + border_tiles = self._tile_service.mask_tiles_to_polygon(bounding_polygon, border_tiles) for tile in border_tiles: tile.update_stats() tile_means.append(tile.tile_stats.mean) http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/TimeSeriesSolr.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/TimeSeriesSolr.py b/analysis/webservice/algorithms/TimeSeriesSolr.py index 1da55b2..f69200d 100644 --- a/analysis/webservice/algorithms/TimeSeriesSolr.py +++ b/analysis/webservice/algorithms/TimeSeriesSolr.py @@ -2,9 +2,8 @@ Copyright (c) 2016 Jet Propulsion Laboratory, California Institute of Technology. All rights reserved """ -import sys -import traceback import logging +import traceback from cStringIO import StringIO from datetime import datetime from multiprocessing.dummy import Pool, Manager @@ -12,11 +11,11 @@ from multiprocessing.dummy import Pool, Manager import matplotlib.dates as mdates import matplotlib.pyplot as plt import numpy as np -from webservice.NexusHandler import NexusHandler, nexus_handler, DEFAULT_PARAMETERS_SPEC from nexustiles.nexustiles import NexusTileService from scipy import stats from webservice import Filtering as filt +from webservice.NexusHandler import NexusHandler, nexus_handler, DEFAULT_PARAMETERS_SPEC from webservice.webmodel import NexusResults, NexusProcessingException, NoDataException SENTINEL = 'STOP' @@ -288,8 +287,8 @@ class TimeSeriesCalculator(object): def calc_average_on_day(self, min_lat, max_lat, min_lon, max_lon, dataset, timeinseconds): # Get stats using solr only ds1_nexus_tiles_stats = self.__tile_service.get_stats_within_box_at_time(min_lat, max_lat, min_lon, max_lon, - dataset, - timeinseconds) + dataset, + timeinseconds) data_min_within = min([tile["tile_min_val_d"] for tile in ds1_nexus_tiles_stats]) data_max_within = max([tile["tile_max_val_d"] for tile in ds1_nexus_tiles_stats]) @@ -298,16 +297,16 @@ class TimeSeriesCalculator(object): # Get boundary tiles and calculate stats ds1_nexus_tiles = self.__tile_service.get_boundary_tiles_at_time(min_lat, max_lat, min_lon, max_lon, - dataset, - timeinseconds) + dataset, + timeinseconds) tile_data_agg = np.ma.array([tile.data for tile in ds1_nexus_tiles]) data_min_boundary = np.ma.min(tile_data_agg) data_max_boundary = np.ma.max(tile_data_agg) - #daily_mean = np.ma.mean(tile_data_agg).item() + # daily_mean = np.ma.mean(tile_data_agg).item() data_sum_boundary = np.ma.sum(tile_data_agg) data_count_boundary = np.ma.count(tile_data_agg).item() - #data_std = np.ma.std(tile_data_agg) + # data_std = np.ma.std(tile_data_agg) # Combine stats data_min = min(data_min_within, data_min_boundary) http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/f73d912e/analysis/webservice/algorithms/__init__.py ---------------------------------------------------------------------- diff --git a/analysis/webservice/algorithms/__init__.py b/analysis/webservice/algorithms/__init__.py index fb5ecc8..23cc335 100644 --- a/analysis/webservice/algorithms/__init__.py +++ b/analysis/webservice/algorithms/__init__.py @@ -12,9 +12,9 @@ import ErrorTosserTest import Heartbeat import HofMoeller import LongitudeLatitudeMap +import StandardDeviationSearch import TestInitializer import TileSearch import TimeAvgMap import TimeSeries import TimeSeriesSolr -import StandardDeviationSearch \ No newline at end of file
