CLIMATE-596 - Fix dataset processor parameter propagation

- Fix dataset parameter propagation issue that were discovered with the
  fixes in CLIMATE-592. All kwargs are now passed as such instead of
  defaulting to positional arguments.
- Update a broken dataset.py test that didn't properly assign kwargs due
  to changes in previous commits.


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/01f3272b
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/01f3272b
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/01f3272b

Branch: refs/heads/master
Commit: 01f3272bad9324ab3583aafb7a0ac62e0e7750ef
Parents: d4cefc5
Author: Michael Joyce <[email protected]>
Authored: Thu Mar 12 09:16:07 2015 -0700
Committer: Michael Joyce <[email protected]>
Committed: Thu Mar 12 09:16:07 2015 -0700

----------------------------------------------------------------------
 ocw/dataset_processor.py  | 30 +++++++++++++++++-------------
 ocw/tests/test_dataset.py | 10 +++++++---
 2 files changed, 24 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/01f3272b/ocw/dataset_processor.py
----------------------------------------------------------------------
diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py
index 1f9edf0..f00ab36 100644
--- a/ocw/dataset_processor.py
+++ b/ocw/dataset_processor.py
@@ -61,9 +61,10 @@ def temporal_rebin(target_dataset, temporal_resolution):
                              target_dataset.lons, 
                              binned_dates, 
                              binned_values,
-                             target_dataset.variable,
-                             target_dataset.units,
-                             target_dataset.name)
+                             variable=target_dataset.variable,
+                             units=target_dataset.units,
+                             name=target_dataset.name,
+                             origin=target_dataset.origin)
     
     return new_dataset
 
@@ -117,9 +118,10 @@ def spatial_regrid(target_dataset, new_latitudes, 
new_longitudes):
                                    new_longitudes, 
                                    target_dataset.times, 
                                    new_values,
-                                   target_dataset.variable,
-                                   target_dataset.units,
-                                   target_dataset.name)
+                                   variable=target_dataset.variable,
+                                   units=target_dataset.units,
+                                   name=target_dataset.name,
+                                   origin=target_dataset.origin)
     return regridded_dataset
 
 def ensemble(datasets):
@@ -145,7 +147,7 @@ def ensemble(datasets):
                                   datasets[0].lons, 
                                   datasets[0].times,
                                   ensemble_values,
-                                  datasets[0].units,
+                                  units=datasets[0].units,
                                   name="Dataset Ensemble")
     
     return ensemble_dataset
@@ -187,9 +189,10 @@ def subset(subregion, target_dataset):
             dataset_slices["time_start"]:dataset_slices["time_end"] + 1,
             dataset_slices["lat_start"]:dataset_slices["lat_end"] + 1,
             dataset_slices["lon_start"]:dataset_slices["lon_end"] + 1],
-        target_dataset.variable,
-        target_dataset.units,
-        target_dataset.name
+        variable=target_dataset.variable,
+        units=target_dataset.units,
+        name=target_dataset.name,
+        origin=target_dataset.origin
     )
 
 def safe_subset(subregion, target_dataset):
@@ -254,9 +257,10 @@ def normalize_dataset_datetimes(dataset, timestep):
         dataset.lons,
         np.array(new_times),
         dataset.values,
-        dataset.variable,
-        dataset.units,
-        dataset.name
+        variable=dataset.variable,
+        units=dataset.units,
+        name=dataset.name,
+        origin=dataset.origin
     )
 
 def write_netcdf(dataset, path, compress=True):

http://git-wip-us.apache.org/repos/asf/climate/blob/01f3272b/ocw/tests/test_dataset.py
----------------------------------------------------------------------
diff --git a/ocw/tests/test_dataset.py b/ocw/tests/test_dataset.py
index 3edbe83..bd9dbd6 100644
--- a/ocw/tests/test_dataset.py
+++ b/ocw/tests/test_dataset.py
@@ -32,9 +32,13 @@ class TestDatasetAttributes(unittest.TestCase):
         self.variable = 'prec'
         self.name = 'foo'
         self.origin = {'path': '/a/fake/file/path'}
-        self.test_dataset = Dataset(self.lat, self.lon, self.time, 
-                                    self.value, self.variable,
-                                    self.name, self.origin)
+        self.test_dataset = Dataset(self.lat,
+                                    self.lon,
+                                    self.time,
+                                    self.value,
+                                    variable=self.variable,
+                                    name=self.name,
+                                    origin=self.origin)
 
     def test_lats(self):
         self.assertItemsEqual(self.test_dataset.lats, self.lat)

Reply via email to