Making the code python 3 compliant and fixing indentation

Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/21a78abd
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/21a78abd
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/21a78abd

Branch: refs/heads/master
Commit: 21a78abdae41a81c55b418b885371c0100d459d5
Parents: 0caf4b7
Author: Omkar20895 <omkarreddy2...@gmail.com>
Authored: Fri Aug 19 23:40:38 2016 +0530
Committer: Omkar20895 <omkarreddy2...@gmail.com>
Committed: Fri Aug 19 23:40:38 2016 +0530

----------------------------------------------------------------------
 examples/podaac_integration_example.py | 14 ++++----
 ocw/data_source/podaac.py              | 56 ++++++++++++++---------------
 ocw/tests/test_podaac.py               |  4 +--
 3 files changed, 36 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/21a78abd/examples/podaac_integration_example.py
----------------------------------------------------------------------
diff --git a/examples/podaac_integration_example.py 
b/examples/podaac_integration_example.py
index 990ee56..d68f747 100644
--- a/examples/podaac_integration_example.py
+++ b/examples/podaac_integration_example.py
@@ -29,7 +29,7 @@ OUTPUT_PLOT = "cmc_temporal_std"
 print("Loading %s dataset into an OCW dataset objects" % datasetShortName)
 cmc_dataset = podaac.load_dataset(
     variable=variable, datasetId=datasetId, datasetShortName=datasetShortName, 
name=name)
-print "CMC_Dataset.values shape: (times, lats, lons) - %s \n" % 
(cmc_dataset.values.shape,)
+print("CMC_Dataset.values shape: (times, lats, lons) - %s \n" % 
(cmc_dataset.values.shape,))
 
 # Acessing latittudes and longitudes of netCDF file
 lats = cmc_dataset.lats
@@ -37,7 +37,7 @@ lons = cmc_dataset.lons
 
 """ Step 2:  Build a Metric to use for Evaluation - Temporal STD for this 
example """
 # You can build your own metrics, but OCW also ships with some common metrics
-print "Setting up a Temporal STD metric to use for evaluation"
+print("Setting up a Temporal STD metric to use for evaluation")
 std = metrics.TemporalStdDev()
 
 """ Step 3: Create an Evaluation Object using Datasets and our Metric """
@@ -45,11 +45,11 @@ std = metrics.TemporalStdDev()
 # Evaluation(reference, targets, metrics, subregions=None)
 # Evaluation can take in multiple targets and metrics, so we need to convert
 # our examples into Python lists.  Evaluation will iterate over the lists
-print "Making the Evaluation definition"
+print("Making the Evaluation definition")
 # Temporal STD Metric gets one target dataset then reference dataset
 # should be None
 std_evaluation = evaluation.Evaluation(None, [cmc_dataset], [std])
-print "Executing the Evaluation using the object's run() method"
+print("Executing the Evaluation using the object's run() method")
 std_evaluation.run()
 
 """ Step 4: Make a Plot from the Evaluation.results """
@@ -60,10 +60,10 @@ std_evaluation.run()
 # The shape of results is (num_metrics, num_target_datasets) if no subregion
 # Accessing the actual results when we have used 1 metric and 1 dataset is
 # done this way:
-print "Accessing the Results of the Evaluation run"
+print("Accessing the Results of the Evaluation run")
 results = std_evaluation.unary_results[0][0]
-print "The results are of type: %s" % type(results)
-print "Generating a contour map using ocw.plotter.draw_contour_map()"
+print("The results are of type: %s" % type(results))
+print("Generating a contour map using ocw.plotter.draw_contour_map()")
 
 fname = OUTPUT_PLOT
 gridshape = (4, 5)  # 20 Years worth of plots. 20 rows in 1 column

http://git-wip-us.apache.org/repos/asf/climate/blob/21a78abd/ocw/data_source/podaac.py
----------------------------------------------------------------------
diff --git a/ocw/data_source/podaac.py b/ocw/data_source/podaac.py
index 7c233b3..b4085e8 100644
--- a/ocw/data_source/podaac.py
+++ b/ocw/data_source/podaac.py
@@ -21,8 +21,6 @@ from ocw.dataset import Dataset
 from netCDF4 import Dataset as netcdf_dataset
 from netcdftime import utime
 import os
-import urllib
-import xml.etree.ElementTree as ET
 
 
 def convert_times_to_datetime(time):
@@ -69,45 +67,45 @@ def load_dataset(variable, datasetId='', 
datasetShortName='', name=''):
     :raises: ServerError
     '''
     # Downloading the dataset using podaac toolkit
-        podaac = Podaac()
-        path = os.path.dirname(os.path.abspath(__file__))
-        granuleName = podaac.extract_l4_granule(
-            datasetId=datasetId, shortName=datasetShortName, path=path)
-        path = path + '/' + granuleName
-        d = netcdf_dataset(path, mode='r')
-        dataset = d.variables[variable]
+    podaac = Podaac()
+    path = os.path.dirname(os.path.abspath(__file__))
+    granuleName = podaac.extract_l4_granule(
+        dataset_id=datasetId, short_name=datasetShortName, path=path)
+    path = path + '/' + granuleName
+    d = netcdf_dataset(path, mode='r')
+    dataset = d.variables[variable]
 
     # By convention, but not by standard, if the dimensions exist, they will 
be in the order:
     # time (t), altitude (z), latitude (y), longitude (x)
     # but conventions aren't always followed and all dimensions aren't always 
present so
     # see if we can make some educated deductions before defaulting to just 
pulling the first three
     # columns.
-        temp_dimensions = map(lambda x: x.lower(), dataset.dimensions)
-        dataset_dimensions = dataset.dimensions
-        time = dataset_dimensions[temp_dimensions.index(
-            'time') if 'time' in temp_dimensions else 0]
-        lat = dataset_dimensions[temp_dimensions.index(
-            'lat') if 'lat' in temp_dimensions else 1]
-        lon = dataset_dimensions[temp_dimensions.index(
-            'lon') if 'lon' in temp_dimensions else 2]
+    temp_dimensions = map(lambda x: x.lower(), dataset.dimensions)
+    dataset_dimensions = dataset.dimensions
+    time = dataset_dimensions[temp_dimensions.index(
+        'time') if 'time' in temp_dimensions else 0]
+    lat = dataset_dimensions[temp_dimensions.index(
+        'lat') if 'lat' in temp_dimensions else 1]
+    lon = dataset_dimensions[temp_dimensions.index(
+        'lon') if 'lon' in temp_dimensions else 2]
 
     # Time is given to us in some units since an epoch. We need to convert
     # these values to datetime objects. Note that we use the main object's
     # time object and not the dataset specific reference to it. We need to
     # grab the 'units' from it and it fails on the dataset specific object.
-        times = np.array(convert_times_to_datetime(d[time]))
-        lats = np.array(d.variables[lat][:])
-        lons = np.array(d.variables[lon][:])
-        values = np.array(dataset[:])
-        origin = {
-            'source': 'PO.DAAC',
-            'url': 'podaac.jpl.nasa.gov/ws'
-        }
+    times = np.array(convert_times_to_datetime(d[time]))
+    lats = np.array(d.variables[lat][:])
+    lons = np.array(d.variables[lon][:])
+    values = np.array(dataset[:])
+    origin = {
+        'source': 'PO.DAAC',
+        'url': 'podaac.jpl.nasa.gov/ws'
+    }
 
     # Removing the downloaded temporary granule before creating the OCW
     # dataset.
-        d.close()
-        path = os.path.join(os.path.dirname(__file__), granuleName)
-        os.remove(path)
+    d.close()
+    path = os.path.join(os.path.dirname(__file__), granuleName)
+    os.remove(path)
 
-        return Dataset(lats, lons, times, values, variable, name=name, 
origin=origin)
+    return Dataset(lats, lons, times, values, variable, name=name, 
origin=origin)

http://git-wip-us.apache.org/repos/asf/climate/blob/21a78abd/ocw/tests/test_podaac.py
----------------------------------------------------------------------
diff --git a/ocw/tests/test_podaac.py b/ocw/tests/test_podaac.py
index f696d02..04712b8 100644
--- a/ocw/tests/test_podaac.py
+++ b/ocw/tests/test_podaac.py
@@ -51,8 +51,8 @@ class TestPodaacDataSource(unittest.TestCase):
         self.assertEquals(len(self.dataset.values), 1)
 
     def test_valid_date_conversion(self):
-    start = dt.datetime(2006, 6, 7, 12)
-    self.assertTrue(start == self.dataset.times[0])
+        start = dt.datetime(2006, 6, 7, 12)
+        self.assertTrue(start == self.dataset.times[0])
 
     def test_dataset_origin(self):
         self.assertEquals(self.dataset.origin['source'], 'PO.DAAC')

Reply via email to